1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
105 ;; UNSPEC_VOLATILE Usage:
108 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
110 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
111 ; instruction epilogue sequence that isn't expanded
112 ; into normal RTL. Used for both normal and sibcall
114 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
115 ; for inlined constants.
116 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
118 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
120 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
122 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
124 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
126 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
128 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
129 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
130 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
131 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
132 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
133 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
134 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
139 ;;---------------------------------------------------------------------------
142 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
143 ; generating ARM code. This is used to control the length of some insn
144 ; patterns that share the same RTL in both ARM and Thumb code.
145 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
147 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
148 ; scheduling decisions for the load unit and the multiplier.
149 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
151 ; IS_XSCALE is set to 'yes' when compiling for XScale.
152 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon"
162 (const (symbol_ref "arm_fpu_attr")))
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
253 ; condition codes: this one is used by final_prescan_insn to speed up
254 ; conditionalizing instructions. It saves having to scan the rtl to see if
255 ; it uses or alters the condition codes.
257 ; USE means that the condition codes are used by the insn in the process of
258 ; outputting code, this means (at present) that we can't use the insn in
261 ; SET means that the purpose of the insn is to set the condition codes in a
262 ; well defined manner.
264 ; CLOB means that the condition codes are altered in an undefined manner, if
265 ; they are altered at all
267 ; JUMP_CLOB is used when the condition cannot be represented by a single
268 ; instruction (UNEQ and LTGT). These cannot be predicated.
270 ; NOCOND means that the condition codes are neither altered nor affect the
271 ; output of this insn
273 (define_attr "conds" "use,set,clob,jump_clob,nocond"
274 (if_then_else (eq_attr "type" "call")
275 (const_string "clob")
276 (const_string "nocond")))
278 ; Predicable means that the insn can be conditionally executed based on
279 ; an automatically added predicate (additional patterns are generated by
280 ; gen...). We default to 'no' because no Thumb patterns match this rule
281 ; and not all ARM patterns do.
282 (define_attr "predicable" "no,yes" (const_string "no"))
284 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
285 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
286 ; suffer blockages enough to warrant modelling this (and it can adversely
287 ; affect the schedule).
288 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
290 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
291 ; to stall the processor. Used with model_wbuf above.
292 (define_attr "write_conflict" "no,yes"
293 (if_then_else (eq_attr "type"
294 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
296 (const_string "no")))
298 ; Classify the insns into those that take one cycle and those that take more
299 ; than one on the main cpu execution unit.
300 (define_attr "core_cycles" "single,multi"
301 (if_then_else (eq_attr "type"
302 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
303 (const_string "single")
304 (const_string "multi")))
306 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
307 ;; distant label. Only applicable to Thumb code.
308 (define_attr "far_jump" "yes,no" (const_string "no"))
311 ;; The number of machine instructions this pattern expands to.
312 ;; Used for Thumb-2 conditional execution.
313 (define_attr "ce_count" "" (const_int 1))
315 ;;---------------------------------------------------------------------------
318 ; A list of modes that are exactly 64 bits in size. We use this to expand
319 ; some splits that are the same for all modes when operating on ARM
321 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
323 ;;---------------------------------------------------------------------------
326 (include "predicates.md")
327 (include "constraints.md")
329 ;;---------------------------------------------------------------------------
330 ;; Pipeline descriptions
332 ;; Processor type. This is created automatically from arm-cores.def.
333 (include "arm-tune.md")
335 (define_attr "tune_cortexr4" "yes,no"
337 (eq_attr "tune" "cortexr4,cortexr4f")
339 (const_string "no"))))
341 ;; True if the generic scheduling description should be used.
343 (define_attr "generic_sched" "yes,no"
345 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
346 (eq_attr "tune_cortexr4" "yes"))
348 (const_string "yes"))))
350 (define_attr "generic_vfp" "yes,no"
352 (and (eq_attr "fpu" "vfp")
353 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
354 (eq_attr "tune_cortexr4" "no"))
356 (const_string "no"))))
358 (include "arm-generic.md")
359 (include "arm926ejs.md")
360 (include "arm1020e.md")
361 (include "arm1026ejs.md")
362 (include "arm1136jfs.md")
363 (include "cortex-a8.md")
364 (include "cortex-a9.md")
365 (include "cortex-r4.md")
366 (include "cortex-r4f.md")
370 ;;---------------------------------------------------------------------------
375 ;; Note: For DImode insns, there is normally no reason why operands should
376 ;; not be in the same register, what we don't want is for something being
377 ;; written to partially overlap something that is an input.
378 ;; Cirrus 64bit additions should not be split because we have a native
379 ;; 64bit addition instructions.
381 (define_expand "adddi3"
383 [(set (match_operand:DI 0 "s_register_operand" "")
384 (plus:DI (match_operand:DI 1 "s_register_operand" "")
385 (match_operand:DI 2 "s_register_operand" "")))
386 (clobber (reg:CC CC_REGNUM))])]
389 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
391 if (!cirrus_fp_register (operands[0], DImode))
392 operands[0] = force_reg (DImode, operands[0]);
393 if (!cirrus_fp_register (operands[1], DImode))
394 operands[1] = force_reg (DImode, operands[1]);
395 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
401 if (GET_CODE (operands[1]) != REG)
402 operands[1] = force_reg (SImode, operands[1]);
403 if (GET_CODE (operands[2]) != REG)
404 operands[2] = force_reg (SImode, operands[2]);
409 (define_insn "*thumb1_adddi3"
410 [(set (match_operand:DI 0 "register_operand" "=l")
411 (plus:DI (match_operand:DI 1 "register_operand" "%0")
412 (match_operand:DI 2 "register_operand" "l")))
413 (clobber (reg:CC CC_REGNUM))
416 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
417 [(set_attr "length" "4")]
420 (define_insn_and_split "*arm_adddi3"
421 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
422 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
423 (match_operand:DI 2 "s_register_operand" "r, 0")))
424 (clobber (reg:CC CC_REGNUM))]
425 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
427 "TARGET_32BIT && reload_completed"
428 [(parallel [(set (reg:CC_C CC_REGNUM)
429 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
431 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
432 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
433 (plus:SI (match_dup 4) (match_dup 5))))]
436 operands[3] = gen_highpart (SImode, operands[0]);
437 operands[0] = gen_lowpart (SImode, operands[0]);
438 operands[4] = gen_highpart (SImode, operands[1]);
439 operands[1] = gen_lowpart (SImode, operands[1]);
440 operands[5] = gen_highpart (SImode, operands[2]);
441 operands[2] = gen_lowpart (SImode, operands[2]);
443 [(set_attr "conds" "clob")
444 (set_attr "length" "8")]
447 (define_insn_and_split "*adddi_sesidi_di"
448 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
449 (plus:DI (sign_extend:DI
450 (match_operand:SI 2 "s_register_operand" "r,r"))
451 (match_operand:DI 1 "s_register_operand" "r,0")))
452 (clobber (reg:CC CC_REGNUM))]
453 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
455 "TARGET_32BIT && reload_completed"
456 [(parallel [(set (reg:CC_C CC_REGNUM)
457 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
459 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
460 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
461 (plus:SI (ashiftrt:SI (match_dup 2)
466 operands[3] = gen_highpart (SImode, operands[0]);
467 operands[0] = gen_lowpart (SImode, operands[0]);
468 operands[4] = gen_highpart (SImode, operands[1]);
469 operands[1] = gen_lowpart (SImode, operands[1]);
470 operands[2] = gen_lowpart (SImode, operands[2]);
472 [(set_attr "conds" "clob")
473 (set_attr "length" "8")]
476 (define_insn_and_split "*adddi_zesidi_di"
477 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
478 (plus:DI (zero_extend:DI
479 (match_operand:SI 2 "s_register_operand" "r,r"))
480 (match_operand:DI 1 "s_register_operand" "r,0")))
481 (clobber (reg:CC CC_REGNUM))]
482 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
484 "TARGET_32BIT && reload_completed"
485 [(parallel [(set (reg:CC_C CC_REGNUM)
486 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
488 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
489 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
490 (plus:SI (match_dup 4) (const_int 0))))]
493 operands[3] = gen_highpart (SImode, operands[0]);
494 operands[0] = gen_lowpart (SImode, operands[0]);
495 operands[4] = gen_highpart (SImode, operands[1]);
496 operands[1] = gen_lowpart (SImode, operands[1]);
497 operands[2] = gen_lowpart (SImode, operands[2]);
499 [(set_attr "conds" "clob")
500 (set_attr "length" "8")]
503 (define_expand "addsi3"
504 [(set (match_operand:SI 0 "s_register_operand" "")
505 (plus:SI (match_operand:SI 1 "s_register_operand" "")
506 (match_operand:SI 2 "reg_or_int_operand" "")))]
509 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
511 arm_split_constant (PLUS, SImode, NULL_RTX,
512 INTVAL (operands[2]), operands[0], operands[1],
513 optimize && can_create_pseudo_p ());
519 ; If there is a scratch available, this will be faster than synthesizing the
522 [(match_scratch:SI 3 "r")
523 (set (match_operand:SI 0 "arm_general_register_operand" "")
524 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
525 (match_operand:SI 2 "const_int_operand" "")))]
527 !(const_ok_for_arm (INTVAL (operands[2]))
528 || const_ok_for_arm (-INTVAL (operands[2])))
529 && const_ok_for_arm (~INTVAL (operands[2]))"
530 [(set (match_dup 3) (match_dup 2))
531 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
535 (define_insn_and_split "*arm_addsi3"
536 [(set (match_operand:SI 0 "s_register_operand" "=r, !k,r, !k,r")
537 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k,rk,!k,rk")
538 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,L, L,?n")))]
547 GET_CODE (operands[2]) == CONST_INT
548 && !(const_ok_for_arm (INTVAL (operands[2]))
549 || const_ok_for_arm (-INTVAL (operands[2])))"
550 [(clobber (const_int 0))]
552 arm_split_constant (PLUS, SImode, curr_insn,
553 INTVAL (operands[2]), operands[0],
557 [(set_attr "length" "4,4,4,4,16")
558 (set_attr "predicable" "yes")]
561 ;; Register group 'k' is a single register group containing only the stack
562 ;; register. Trying to reload it will always fail catastrophically,
563 ;; so never allow those alternatives to match if reloading is needed.
565 (define_insn "*thumb1_addsi3"
566 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
567 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
568 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
571 static const char * const asms[] =
573 \"add\\t%0, %0, %2\",
574 \"sub\\t%0, %0, #%n2\",
575 \"add\\t%0, %1, %2\",
576 \"add\\t%0, %0, %2\",
577 \"add\\t%0, %0, %2\",
578 \"add\\t%0, %1, %2\",
581 if ((which_alternative == 2 || which_alternative == 6)
582 && GET_CODE (operands[2]) == CONST_INT
583 && INTVAL (operands[2]) < 0)
584 return \"sub\\t%0, %1, #%n2\";
585 return asms[which_alternative];
587 [(set_attr "length" "2")]
590 ;; Reloading and elimination of the frame pointer can
591 ;; sometimes cause this optimization to be missed.
593 [(set (match_operand:SI 0 "arm_general_register_operand" "")
594 (match_operand:SI 1 "const_int_operand" ""))
596 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
598 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
599 && (INTVAL (operands[1]) & 3) == 0"
600 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
604 ;; ??? Make Thumb-2 variants which prefer low regs
605 (define_insn "*addsi3_compare0"
606 [(set (reg:CC_NOOV CC_REGNUM)
608 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
609 (match_operand:SI 2 "arm_add_operand" "rI,L"))
611 (set (match_operand:SI 0 "s_register_operand" "=r,r")
612 (plus:SI (match_dup 1) (match_dup 2)))]
616 sub%.\\t%0, %1, #%n2"
617 [(set_attr "conds" "set")]
620 (define_insn "*addsi3_compare0_scratch"
621 [(set (reg:CC_NOOV CC_REGNUM)
623 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
624 (match_operand:SI 1 "arm_add_operand" "rI,L"))
630 [(set_attr "conds" "set")]
633 (define_insn "*compare_negsi_si"
634 [(set (reg:CC_Z CC_REGNUM)
636 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
637 (match_operand:SI 1 "s_register_operand" "r")))]
640 [(set_attr "conds" "set")]
643 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
644 ;; addend is a constant.
645 (define_insn "*cmpsi2_addneg"
646 [(set (reg:CC CC_REGNUM)
648 (match_operand:SI 1 "s_register_operand" "r,r")
649 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
650 (set (match_operand:SI 0 "s_register_operand" "=r,r")
651 (plus:SI (match_dup 1)
652 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
653 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
656 add%.\\t%0, %1, #%n2"
657 [(set_attr "conds" "set")]
660 ;; Convert the sequence
662 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
666 ;; bcs dest ((unsigned)rn >= 1)
667 ;; similarly for the beq variant using bcc.
668 ;; This is a common looping idiom (while (n--))
670 [(set (match_operand:SI 0 "arm_general_register_operand" "")
671 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
673 (set (match_operand 2 "cc_register" "")
674 (compare (match_dup 0) (const_int -1)))
676 (if_then_else (match_operator 3 "equality_operator"
677 [(match_dup 2) (const_int 0)])
678 (match_operand 4 "" "")
679 (match_operand 5 "" "")))]
680 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
684 (match_dup 1) (const_int 1)))
685 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
687 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
690 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
691 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
694 operands[2], const0_rtx);"
697 ;; The next four insns work because they compare the result with one of
698 ;; the operands, and we know that the use of the condition code is
699 ;; either GEU or LTU, so we can use the carry flag from the addition
700 ;; instead of doing the compare a second time.
701 (define_insn "*addsi3_compare_op1"
702 [(set (reg:CC_C CC_REGNUM)
704 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
705 (match_operand:SI 2 "arm_add_operand" "rI,L"))
707 (set (match_operand:SI 0 "s_register_operand" "=r,r")
708 (plus:SI (match_dup 1) (match_dup 2)))]
712 sub%.\\t%0, %1, #%n2"
713 [(set_attr "conds" "set")]
716 (define_insn "*addsi3_compare_op2"
717 [(set (reg:CC_C CC_REGNUM)
719 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
720 (match_operand:SI 2 "arm_add_operand" "rI,L"))
722 (set (match_operand:SI 0 "s_register_operand" "=r,r")
723 (plus:SI (match_dup 1) (match_dup 2)))]
727 sub%.\\t%0, %1, #%n2"
728 [(set_attr "conds" "set")]
731 (define_insn "*compare_addsi2_op0"
732 [(set (reg:CC_C CC_REGNUM)
734 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
735 (match_operand:SI 1 "arm_add_operand" "rI,L"))
741 [(set_attr "conds" "set")]
744 (define_insn "*compare_addsi2_op1"
745 [(set (reg:CC_C CC_REGNUM)
747 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
748 (match_operand:SI 1 "arm_add_operand" "rI,L"))
754 [(set_attr "conds" "set")]
757 (define_insn "*addsi3_carryin"
758 [(set (match_operand:SI 0 "s_register_operand" "=r")
759 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
760 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
761 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
764 [(set_attr "conds" "use")]
767 (define_insn "*addsi3_carryin_shift"
768 [(set (match_operand:SI 0 "s_register_operand" "=r")
769 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
771 (match_operator:SI 2 "shift_operator"
772 [(match_operand:SI 3 "s_register_operand" "r")
773 (match_operand:SI 4 "reg_or_int_operand" "rM")])
774 (match_operand:SI 1 "s_register_operand" "r"))))]
776 "adc%?\\t%0, %1, %3%S2"
777 [(set_attr "conds" "use")
778 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
779 (const_string "alu_shift")
780 (const_string "alu_shift_reg")))]
783 (define_insn "*addsi3_carryin_alt1"
784 [(set (match_operand:SI 0 "s_register_operand" "=r")
785 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
786 (match_operand:SI 2 "arm_rhs_operand" "rI"))
787 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
790 [(set_attr "conds" "use")]
793 (define_insn "*addsi3_carryin_alt2"
794 [(set (match_operand:SI 0 "s_register_operand" "=r")
795 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
796 (match_operand:SI 1 "s_register_operand" "r"))
797 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
800 [(set_attr "conds" "use")]
803 (define_insn "*addsi3_carryin_alt3"
804 [(set (match_operand:SI 0 "s_register_operand" "=r")
805 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
806 (match_operand:SI 2 "arm_rhs_operand" "rI"))
807 (match_operand:SI 1 "s_register_operand" "r")))]
810 [(set_attr "conds" "use")]
813 (define_expand "incscc"
814 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
815 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
816 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
817 (match_operand:SI 1 "s_register_operand" "0,?r")))]
822 (define_insn "*arm_incscc"
823 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
824 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
825 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
826 (match_operand:SI 1 "s_register_operand" "0,?r")))]
830 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
831 [(set_attr "conds" "use")
832 (set_attr "length" "4,8")]
835 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
837 [(set (match_operand:SI 0 "s_register_operand" "")
838 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
839 (match_operand:SI 2 "s_register_operand" ""))
841 (clobber (match_operand:SI 3 "s_register_operand" ""))]
843 [(set (match_dup 3) (match_dup 1))
844 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
846 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
849 (define_expand "addsf3"
850 [(set (match_operand:SF 0 "s_register_operand" "")
851 (plus:SF (match_operand:SF 1 "s_register_operand" "")
852 (match_operand:SF 2 "arm_float_add_operand" "")))]
853 "TARGET_32BIT && TARGET_HARD_FLOAT"
856 && !cirrus_fp_register (operands[2], SFmode))
857 operands[2] = force_reg (SFmode, operands[2]);
860 (define_expand "adddf3"
861 [(set (match_operand:DF 0 "s_register_operand" "")
862 (plus:DF (match_operand:DF 1 "s_register_operand" "")
863 (match_operand:DF 2 "arm_float_add_operand" "")))]
864 "TARGET_32BIT && TARGET_HARD_FLOAT"
867 && !cirrus_fp_register (operands[2], DFmode))
868 operands[2] = force_reg (DFmode, operands[2]);
871 (define_expand "subdi3"
873 [(set (match_operand:DI 0 "s_register_operand" "")
874 (minus:DI (match_operand:DI 1 "s_register_operand" "")
875 (match_operand:DI 2 "s_register_operand" "")))
876 (clobber (reg:CC CC_REGNUM))])]
879 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
881 && cirrus_fp_register (operands[0], DImode)
882 && cirrus_fp_register (operands[1], DImode))
884 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
890 if (GET_CODE (operands[1]) != REG)
891 operands[1] = force_reg (SImode, operands[1]);
892 if (GET_CODE (operands[2]) != REG)
893 operands[2] = force_reg (SImode, operands[2]);
898 (define_insn "*arm_subdi3"
899 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
900 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
901 (match_operand:DI 2 "s_register_operand" "r,0,0")))
902 (clobber (reg:CC CC_REGNUM))]
904 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
905 [(set_attr "conds" "clob")
906 (set_attr "length" "8")]
909 (define_insn "*thumb_subdi3"
910 [(set (match_operand:DI 0 "register_operand" "=l")
911 (minus:DI (match_operand:DI 1 "register_operand" "0")
912 (match_operand:DI 2 "register_operand" "l")))
913 (clobber (reg:CC CC_REGNUM))]
915 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
916 [(set_attr "length" "4")]
919 (define_insn "*subdi_di_zesidi"
920 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
921 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
923 (match_operand:SI 2 "s_register_operand" "r,r"))))
924 (clobber (reg:CC CC_REGNUM))]
926 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
927 [(set_attr "conds" "clob")
928 (set_attr "length" "8")]
931 (define_insn "*subdi_di_sesidi"
932 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
933 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
935 (match_operand:SI 2 "s_register_operand" "r,r"))))
936 (clobber (reg:CC CC_REGNUM))]
938 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
939 [(set_attr "conds" "clob")
940 (set_attr "length" "8")]
943 (define_insn "*subdi_zesidi_di"
944 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
945 (minus:DI (zero_extend:DI
946 (match_operand:SI 2 "s_register_operand" "r,r"))
947 (match_operand:DI 1 "s_register_operand" "?r,0")))
948 (clobber (reg:CC CC_REGNUM))]
950 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
951 [(set_attr "conds" "clob")
952 (set_attr "length" "8")]
955 (define_insn "*subdi_sesidi_di"
956 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
957 (minus:DI (sign_extend:DI
958 (match_operand:SI 2 "s_register_operand" "r,r"))
959 (match_operand:DI 1 "s_register_operand" "?r,0")))
960 (clobber (reg:CC CC_REGNUM))]
962 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
963 [(set_attr "conds" "clob")
964 (set_attr "length" "8")]
967 (define_insn "*subdi_zesidi_zesidi"
968 [(set (match_operand:DI 0 "s_register_operand" "=r")
969 (minus:DI (zero_extend:DI
970 (match_operand:SI 1 "s_register_operand" "r"))
972 (match_operand:SI 2 "s_register_operand" "r"))))
973 (clobber (reg:CC CC_REGNUM))]
975 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
976 [(set_attr "conds" "clob")
977 (set_attr "length" "8")]
980 (define_expand "subsi3"
981 [(set (match_operand:SI 0 "s_register_operand" "")
982 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
983 (match_operand:SI 2 "s_register_operand" "")))]
986 if (GET_CODE (operands[1]) == CONST_INT)
990 arm_split_constant (MINUS, SImode, NULL_RTX,
991 INTVAL (operands[1]), operands[0],
992 operands[2], optimize && can_create_pseudo_p ());
995 else /* TARGET_THUMB1 */
996 operands[1] = force_reg (SImode, operands[1]);
1001 (define_insn "*thumb1_subsi3_insn"
1002 [(set (match_operand:SI 0 "register_operand" "=l")
1003 (minus:SI (match_operand:SI 1 "register_operand" "l")
1004 (match_operand:SI 2 "register_operand" "l")))]
1007 [(set_attr "length" "2")]
1010 ; ??? Check Thumb-2 split length
1011 (define_insn_and_split "*arm_subsi3_insn"
1012 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1013 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1014 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1021 && GET_CODE (operands[1]) == CONST_INT
1022 && !const_ok_for_arm (INTVAL (operands[1]))"
1023 [(clobber (const_int 0))]
1025 arm_split_constant (MINUS, SImode, curr_insn,
1026 INTVAL (operands[1]), operands[0], operands[2], 0);
1029 [(set_attr "length" "4,4,16")
1030 (set_attr "predicable" "yes")]
1034 [(match_scratch:SI 3 "r")
1035 (set (match_operand:SI 0 "arm_general_register_operand" "")
1036 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1037 (match_operand:SI 2 "arm_general_register_operand" "")))]
1039 && !const_ok_for_arm (INTVAL (operands[1]))
1040 && const_ok_for_arm (~INTVAL (operands[1]))"
1041 [(set (match_dup 3) (match_dup 1))
1042 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1046 (define_insn "*subsi3_compare0"
1047 [(set (reg:CC_NOOV CC_REGNUM)
1049 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1050 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1052 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1053 (minus:SI (match_dup 1) (match_dup 2)))]
1058 [(set_attr "conds" "set")]
1061 (define_expand "decscc"
1062 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1063 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1064 (match_operator:SI 2 "arm_comparison_operator"
1065 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1070 (define_insn "*arm_decscc"
1071 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1072 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1073 (match_operator:SI 2 "arm_comparison_operator"
1074 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1078 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1079 [(set_attr "conds" "use")
1080 (set_attr "length" "*,8")]
1083 (define_expand "subsf3"
1084 [(set (match_operand:SF 0 "s_register_operand" "")
1085 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1086 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1087 "TARGET_32BIT && TARGET_HARD_FLOAT"
1089 if (TARGET_MAVERICK)
1091 if (!cirrus_fp_register (operands[1], SFmode))
1092 operands[1] = force_reg (SFmode, operands[1]);
1093 if (!cirrus_fp_register (operands[2], SFmode))
1094 operands[2] = force_reg (SFmode, operands[2]);
1098 (define_expand "subdf3"
1099 [(set (match_operand:DF 0 "s_register_operand" "")
1100 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1101 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1102 "TARGET_32BIT && TARGET_HARD_FLOAT"
1104 if (TARGET_MAVERICK)
1106 if (!cirrus_fp_register (operands[1], DFmode))
1107 operands[1] = force_reg (DFmode, operands[1]);
1108 if (!cirrus_fp_register (operands[2], DFmode))
1109 operands[2] = force_reg (DFmode, operands[2]);
1114 ;; Multiplication insns
1116 (define_expand "mulsi3"
1117 [(set (match_operand:SI 0 "s_register_operand" "")
1118 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1119 (match_operand:SI 1 "s_register_operand" "")))]
1124 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1125 (define_insn "*arm_mulsi3"
1126 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1127 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1128 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1129 "TARGET_32BIT && !arm_arch6"
1130 "mul%?\\t%0, %2, %1"
1131 [(set_attr "insn" "mul")
1132 (set_attr "predicable" "yes")]
1135 (define_insn "*arm_mulsi3_v6"
1136 [(set (match_operand:SI 0 "s_register_operand" "=r")
1137 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1138 (match_operand:SI 2 "s_register_operand" "r")))]
1139 "TARGET_32BIT && arm_arch6"
1140 "mul%?\\t%0, %1, %2"
1141 [(set_attr "insn" "mul")
1142 (set_attr "predicable" "yes")]
1145 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1146 ; 1 and 2; are the same, because reload will make operand 0 match
1147 ; operand 1 without realizing that this conflicts with operand 2. We fix
1148 ; this by adding another alternative to match this case, and then `reload'
1149 ; it ourselves. This alternative must come first.
1150 (define_insn "*thumb_mulsi3"
1151 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1152 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1153 (match_operand:SI 2 "register_operand" "l,l,l")))]
1154 "TARGET_THUMB1 && !arm_arch6"
1156 if (which_alternative < 2)
1157 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1159 return \"mul\\t%0, %2\";
1161 [(set_attr "length" "4,4,2")
1162 (set_attr "insn" "mul")]
1165 (define_insn "*thumb_mulsi3_v6"
1166 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1167 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1168 (match_operand:SI 2 "register_operand" "l,0,0")))]
1169 "TARGET_THUMB1 && arm_arch6"
1174 [(set_attr "length" "2")
1175 (set_attr "insn" "mul")]
1178 (define_insn "*mulsi3_compare0"
1179 [(set (reg:CC_NOOV CC_REGNUM)
1180 (compare:CC_NOOV (mult:SI
1181 (match_operand:SI 2 "s_register_operand" "r,r")
1182 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1184 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1185 (mult:SI (match_dup 2) (match_dup 1)))]
1186 "TARGET_ARM && !arm_arch6"
1187 "mul%.\\t%0, %2, %1"
1188 [(set_attr "conds" "set")
1189 (set_attr "insn" "muls")]
1192 (define_insn "*mulsi3_compare0_v6"
1193 [(set (reg:CC_NOOV CC_REGNUM)
1194 (compare:CC_NOOV (mult:SI
1195 (match_operand:SI 2 "s_register_operand" "r")
1196 (match_operand:SI 1 "s_register_operand" "r"))
1198 (set (match_operand:SI 0 "s_register_operand" "=r")
1199 (mult:SI (match_dup 2) (match_dup 1)))]
1200 "TARGET_ARM && arm_arch6 && optimize_size"
1201 "mul%.\\t%0, %2, %1"
1202 [(set_attr "conds" "set")
1203 (set_attr "insn" "muls")]
1206 (define_insn "*mulsi_compare0_scratch"
1207 [(set (reg:CC_NOOV CC_REGNUM)
1208 (compare:CC_NOOV (mult:SI
1209 (match_operand:SI 2 "s_register_operand" "r,r")
1210 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1212 (clobber (match_scratch:SI 0 "=&r,&r"))]
1213 "TARGET_ARM && !arm_arch6"
1214 "mul%.\\t%0, %2, %1"
1215 [(set_attr "conds" "set")
1216 (set_attr "insn" "muls")]
1219 (define_insn "*mulsi_compare0_scratch_v6"
1220 [(set (reg:CC_NOOV CC_REGNUM)
1221 (compare:CC_NOOV (mult:SI
1222 (match_operand:SI 2 "s_register_operand" "r")
1223 (match_operand:SI 1 "s_register_operand" "r"))
1225 (clobber (match_scratch:SI 0 "=r"))]
1226 "TARGET_ARM && arm_arch6 && optimize_size"
1227 "mul%.\\t%0, %2, %1"
1228 [(set_attr "conds" "set")
1229 (set_attr "insn" "muls")]
1232 ;; Unnamed templates to match MLA instruction.
1234 (define_insn "*mulsi3addsi"
1235 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1237 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1238 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1239 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1240 "TARGET_32BIT && !arm_arch6"
1241 "mla%?\\t%0, %2, %1, %3"
1242 [(set_attr "insn" "mla")
1243 (set_attr "predicable" "yes")]
1246 (define_insn "*mulsi3addsi_v6"
1247 [(set (match_operand:SI 0 "s_register_operand" "=r")
1249 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1250 (match_operand:SI 1 "s_register_operand" "r"))
1251 (match_operand:SI 3 "s_register_operand" "r")))]
1252 "TARGET_32BIT && arm_arch6"
1253 "mla%?\\t%0, %2, %1, %3"
1254 [(set_attr "insn" "mla")
1255 (set_attr "predicable" "yes")]
1258 (define_insn "*mulsi3addsi_compare0"
1259 [(set (reg:CC_NOOV CC_REGNUM)
1262 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1263 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1264 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1266 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1267 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1269 "TARGET_ARM && arm_arch6"
1270 "mla%.\\t%0, %2, %1, %3"
1271 [(set_attr "conds" "set")
1272 (set_attr "insn" "mlas")]
1275 (define_insn "*mulsi3addsi_compare0_v6"
1276 [(set (reg:CC_NOOV CC_REGNUM)
1279 (match_operand:SI 2 "s_register_operand" "r")
1280 (match_operand:SI 1 "s_register_operand" "r"))
1281 (match_operand:SI 3 "s_register_operand" "r"))
1283 (set (match_operand:SI 0 "s_register_operand" "=r")
1284 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1286 "TARGET_ARM && arm_arch6 && optimize_size"
1287 "mla%.\\t%0, %2, %1, %3"
1288 [(set_attr "conds" "set")
1289 (set_attr "insn" "mlas")]
1292 (define_insn "*mulsi3addsi_compare0_scratch"
1293 [(set (reg:CC_NOOV CC_REGNUM)
1296 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1297 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1298 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1300 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1301 "TARGET_ARM && !arm_arch6"
1302 "mla%.\\t%0, %2, %1, %3"
1303 [(set_attr "conds" "set")
1304 (set_attr "insn" "mlas")]
1307 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1308 [(set (reg:CC_NOOV CC_REGNUM)
1311 (match_operand:SI 2 "s_register_operand" "r")
1312 (match_operand:SI 1 "s_register_operand" "r"))
1313 (match_operand:SI 3 "s_register_operand" "r"))
1315 (clobber (match_scratch:SI 0 "=r"))]
1316 "TARGET_ARM && arm_arch6 && optimize_size"
1317 "mla%.\\t%0, %2, %1, %3"
1318 [(set_attr "conds" "set")
1319 (set_attr "insn" "mlas")]
1322 (define_insn "*mulsi3subsi"
1323 [(set (match_operand:SI 0 "s_register_operand" "=r")
1325 (match_operand:SI 3 "s_register_operand" "r")
1326 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1327 (match_operand:SI 1 "s_register_operand" "r"))))]
1328 "TARGET_32BIT && arm_arch_thumb2"
1329 "mls%?\\t%0, %2, %1, %3"
1330 [(set_attr "insn" "mla")
1331 (set_attr "predicable" "yes")]
1334 ;; Unnamed template to match long long multiply-accumulate (smlal)
1336 (define_insn "*mulsidi3adddi"
1337 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1340 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1341 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1342 (match_operand:DI 1 "s_register_operand" "0")))]
1343 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1344 "smlal%?\\t%Q0, %R0, %3, %2"
1345 [(set_attr "insn" "smlal")
1346 (set_attr "predicable" "yes")]
1349 (define_insn "*mulsidi3adddi_v6"
1350 [(set (match_operand:DI 0 "s_register_operand" "=r")
1353 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1354 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1355 (match_operand:DI 1 "s_register_operand" "0")))]
1356 "TARGET_32BIT && arm_arch6"
1357 "smlal%?\\t%Q0, %R0, %3, %2"
1358 [(set_attr "insn" "smlal")
1359 (set_attr "predicable" "yes")]
1362 ;; 32x32->64 widening multiply.
1363 ;; As with mulsi3, the only difference between the v3-5 and v6+
1364 ;; versions of these patterns is the requirement that the output not
1365 ;; overlap the inputs, but that still means we have to have a named
1366 ;; expander and two different starred insns.
1368 (define_expand "mulsidi3"
1369 [(set (match_operand:DI 0 "s_register_operand" "")
1371 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1372 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1373 "TARGET_32BIT && arm_arch3m"
1377 (define_insn "*mulsidi3_nov6"
1378 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1380 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1381 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1382 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1383 "smull%?\\t%Q0, %R0, %1, %2"
1384 [(set_attr "insn" "smull")
1385 (set_attr "predicable" "yes")]
1388 (define_insn "*mulsidi3_v6"
1389 [(set (match_operand:DI 0 "s_register_operand" "=r")
1391 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1392 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1393 "TARGET_32BIT && arm_arch6"
1394 "smull%?\\t%Q0, %R0, %1, %2"
1395 [(set_attr "insn" "smull")
1396 (set_attr "predicable" "yes")]
1399 (define_expand "umulsidi3"
1400 [(set (match_operand:DI 0 "s_register_operand" "")
1402 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1403 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1404 "TARGET_32BIT && arm_arch3m"
1408 (define_insn "*umulsidi3_nov6"
1409 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1411 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1412 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1413 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1414 "umull%?\\t%Q0, %R0, %1, %2"
1415 [(set_attr "insn" "umull")
1416 (set_attr "predicable" "yes")]
1419 (define_insn "*umulsidi3_v6"
1420 [(set (match_operand:DI 0 "s_register_operand" "=r")
1422 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1423 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1424 "TARGET_32BIT && arm_arch6"
1425 "umull%?\\t%Q0, %R0, %1, %2"
1426 [(set_attr "insn" "umull")
1427 (set_attr "predicable" "yes")]
1430 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1432 (define_insn "*umulsidi3adddi"
1433 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1436 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1437 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1438 (match_operand:DI 1 "s_register_operand" "0")))]
1439 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1440 "umlal%?\\t%Q0, %R0, %3, %2"
1441 [(set_attr "insn" "umlal")
1442 (set_attr "predicable" "yes")]
1445 (define_insn "*umulsidi3adddi_v6"
1446 [(set (match_operand:DI 0 "s_register_operand" "=r")
1449 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1450 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1451 (match_operand:DI 1 "s_register_operand" "0")))]
1452 "TARGET_32BIT && arm_arch6"
1453 "umlal%?\\t%Q0, %R0, %3, %2"
1454 [(set_attr "insn" "umlal")
1455 (set_attr "predicable" "yes")]
1458 (define_expand "smulsi3_highpart"
1460 [(set (match_operand:SI 0 "s_register_operand" "")
1464 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1465 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1467 (clobber (match_scratch:SI 3 ""))])]
1468 "TARGET_32BIT && arm_arch3m"
1472 (define_insn "*smulsi3_highpart_nov6"
1473 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1477 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1478 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1480 (clobber (match_scratch:SI 3 "=&r,&r"))]
1481 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1482 "smull%?\\t%3, %0, %2, %1"
1483 [(set_attr "insn" "smull")
1484 (set_attr "predicable" "yes")]
1487 (define_insn "*smulsi3_highpart_v6"
1488 [(set (match_operand:SI 0 "s_register_operand" "=r")
1492 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1493 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1495 (clobber (match_scratch:SI 3 "=r"))]
1496 "TARGET_32BIT && arm_arch6"
1497 "smull%?\\t%3, %0, %2, %1"
1498 [(set_attr "insn" "smull")
1499 (set_attr "predicable" "yes")]
1502 (define_expand "umulsi3_highpart"
1504 [(set (match_operand:SI 0 "s_register_operand" "")
1508 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1509 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1511 (clobber (match_scratch:SI 3 ""))])]
1512 "TARGET_32BIT && arm_arch3m"
1516 (define_insn "*umulsi3_highpart_nov6"
1517 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1521 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1522 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1524 (clobber (match_scratch:SI 3 "=&r,&r"))]
1525 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1526 "umull%?\\t%3, %0, %2, %1"
1527 [(set_attr "insn" "umull")
1528 (set_attr "predicable" "yes")]
1531 (define_insn "*umulsi3_highpart_v6"
1532 [(set (match_operand:SI 0 "s_register_operand" "=r")
1536 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1537 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1539 (clobber (match_scratch:SI 3 "=r"))]
1540 "TARGET_32BIT && arm_arch6"
1541 "umull%?\\t%3, %0, %2, %1"
1542 [(set_attr "insn" "umull")
1543 (set_attr "predicable" "yes")]
1546 (define_insn "mulhisi3"
1547 [(set (match_operand:SI 0 "s_register_operand" "=r")
1548 (mult:SI (sign_extend:SI
1549 (match_operand:HI 1 "s_register_operand" "%r"))
1551 (match_operand:HI 2 "s_register_operand" "r"))))]
1552 "TARGET_DSP_MULTIPLY"
1553 "smulbb%?\\t%0, %1, %2"
1554 [(set_attr "insn" "smulxy")
1555 (set_attr "predicable" "yes")]
1558 (define_insn "*mulhisi3tb"
1559 [(set (match_operand:SI 0 "s_register_operand" "=r")
1560 (mult:SI (ashiftrt:SI
1561 (match_operand:SI 1 "s_register_operand" "r")
1564 (match_operand:HI 2 "s_register_operand" "r"))))]
1565 "TARGET_DSP_MULTIPLY"
1566 "smultb%?\\t%0, %1, %2"
1567 [(set_attr "insn" "smulxy")
1568 (set_attr "predicable" "yes")]
1571 (define_insn "*mulhisi3bt"
1572 [(set (match_operand:SI 0 "s_register_operand" "=r")
1573 (mult:SI (sign_extend:SI
1574 (match_operand:HI 1 "s_register_operand" "r"))
1576 (match_operand:SI 2 "s_register_operand" "r")
1578 "TARGET_DSP_MULTIPLY"
1579 "smulbt%?\\t%0, %1, %2"
1580 [(set_attr "insn" "smulxy")
1581 (set_attr "predicable" "yes")]
1584 (define_insn "*mulhisi3tt"
1585 [(set (match_operand:SI 0 "s_register_operand" "=r")
1586 (mult:SI (ashiftrt:SI
1587 (match_operand:SI 1 "s_register_operand" "r")
1590 (match_operand:SI 2 "s_register_operand" "r")
1592 "TARGET_DSP_MULTIPLY"
1593 "smultt%?\\t%0, %1, %2"
1594 [(set_attr "insn" "smulxy")
1595 (set_attr "predicable" "yes")]
1598 (define_insn "*mulhisi3addsi"
1599 [(set (match_operand:SI 0 "s_register_operand" "=r")
1600 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1601 (mult:SI (sign_extend:SI
1602 (match_operand:HI 2 "s_register_operand" "%r"))
1604 (match_operand:HI 3 "s_register_operand" "r")))))]
1605 "TARGET_DSP_MULTIPLY"
1606 "smlabb%?\\t%0, %2, %3, %1"
1607 [(set_attr "insn" "smlaxy")
1608 (set_attr "predicable" "yes")]
1611 (define_insn "*mulhidi3adddi"
1612 [(set (match_operand:DI 0 "s_register_operand" "=r")
1614 (match_operand:DI 1 "s_register_operand" "0")
1615 (mult:DI (sign_extend:DI
1616 (match_operand:HI 2 "s_register_operand" "%r"))
1618 (match_operand:HI 3 "s_register_operand" "r")))))]
1619 "TARGET_DSP_MULTIPLY"
1620 "smlalbb%?\\t%Q0, %R0, %2, %3"
1621 [(set_attr "insn" "smlalxy")
1622 (set_attr "predicable" "yes")])
1624 (define_expand "mulsf3"
1625 [(set (match_operand:SF 0 "s_register_operand" "")
1626 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1627 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1628 "TARGET_32BIT && TARGET_HARD_FLOAT"
1631 && !cirrus_fp_register (operands[2], SFmode))
1632 operands[2] = force_reg (SFmode, operands[2]);
1635 (define_expand "muldf3"
1636 [(set (match_operand:DF 0 "s_register_operand" "")
1637 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1638 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1639 "TARGET_32BIT && TARGET_HARD_FLOAT"
1642 && !cirrus_fp_register (operands[2], DFmode))
1643 operands[2] = force_reg (DFmode, operands[2]);
1648 (define_expand "divsf3"
1649 [(set (match_operand:SF 0 "s_register_operand" "")
1650 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1651 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1652 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1655 (define_expand "divdf3"
1656 [(set (match_operand:DF 0 "s_register_operand" "")
1657 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1658 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1659 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1664 (define_expand "modsf3"
1665 [(set (match_operand:SF 0 "s_register_operand" "")
1666 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1667 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1668 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1671 (define_expand "moddf3"
1672 [(set (match_operand:DF 0 "s_register_operand" "")
1673 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1674 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1675 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1678 ;; Boolean and,ior,xor insns
1680 ;; Split up double word logical operations
1682 ;; Split up simple DImode logical operations. Simply perform the logical
1683 ;; operation on the upper and lower halves of the registers.
1685 [(set (match_operand:DI 0 "s_register_operand" "")
1686 (match_operator:DI 6 "logical_binary_operator"
1687 [(match_operand:DI 1 "s_register_operand" "")
1688 (match_operand:DI 2 "s_register_operand" "")]))]
1689 "TARGET_32BIT && reload_completed
1690 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1691 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1692 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1695 operands[3] = gen_highpart (SImode, operands[0]);
1696 operands[0] = gen_lowpart (SImode, operands[0]);
1697 operands[4] = gen_highpart (SImode, operands[1]);
1698 operands[1] = gen_lowpart (SImode, operands[1]);
1699 operands[5] = gen_highpart (SImode, operands[2]);
1700 operands[2] = gen_lowpart (SImode, operands[2]);
1705 [(set (match_operand:DI 0 "s_register_operand" "")
1706 (match_operator:DI 6 "logical_binary_operator"
1707 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1708 (match_operand:DI 1 "s_register_operand" "")]))]
1709 "TARGET_32BIT && reload_completed"
1710 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1711 (set (match_dup 3) (match_op_dup:SI 6
1712 [(ashiftrt:SI (match_dup 2) (const_int 31))
1716 operands[3] = gen_highpart (SImode, operands[0]);
1717 operands[0] = gen_lowpart (SImode, operands[0]);
1718 operands[4] = gen_highpart (SImode, operands[1]);
1719 operands[1] = gen_lowpart (SImode, operands[1]);
1720 operands[5] = gen_highpart (SImode, operands[2]);
1721 operands[2] = gen_lowpart (SImode, operands[2]);
1725 ;; The zero extend of operand 2 means we can just copy the high part of
1726 ;; operand1 into operand0.
1728 [(set (match_operand:DI 0 "s_register_operand" "")
1730 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1731 (match_operand:DI 1 "s_register_operand" "")))]
1732 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1733 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1734 (set (match_dup 3) (match_dup 4))]
1737 operands[4] = gen_highpart (SImode, operands[1]);
1738 operands[3] = gen_highpart (SImode, operands[0]);
1739 operands[0] = gen_lowpart (SImode, operands[0]);
1740 operands[1] = gen_lowpart (SImode, operands[1]);
1744 ;; The zero extend of operand 2 means we can just copy the high part of
1745 ;; operand1 into operand0.
1747 [(set (match_operand:DI 0 "s_register_operand" "")
1749 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1750 (match_operand:DI 1 "s_register_operand" "")))]
1751 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1752 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1753 (set (match_dup 3) (match_dup 4))]
1756 operands[4] = gen_highpart (SImode, operands[1]);
1757 operands[3] = gen_highpart (SImode, operands[0]);
1758 operands[0] = gen_lowpart (SImode, operands[0]);
1759 operands[1] = gen_lowpart (SImode, operands[1]);
1763 (define_insn "anddi3"
1764 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1765 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1766 (match_operand:DI 2 "s_register_operand" "r,r")))]
1767 "TARGET_32BIT && ! TARGET_IWMMXT"
1769 [(set_attr "length" "8")]
1772 (define_insn_and_split "*anddi_zesidi_di"
1773 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1774 (and:DI (zero_extend:DI
1775 (match_operand:SI 2 "s_register_operand" "r,r"))
1776 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1779 "TARGET_32BIT && reload_completed"
1780 ; The zero extend of operand 2 clears the high word of the output
1782 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1783 (set (match_dup 3) (const_int 0))]
1786 operands[3] = gen_highpart (SImode, operands[0]);
1787 operands[0] = gen_lowpart (SImode, operands[0]);
1788 operands[1] = gen_lowpart (SImode, operands[1]);
1790 [(set_attr "length" "8")]
1793 (define_insn "*anddi_sesdi_di"
1794 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1795 (and:DI (sign_extend:DI
1796 (match_operand:SI 2 "s_register_operand" "r,r"))
1797 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1800 [(set_attr "length" "8")]
1803 (define_expand "andsi3"
1804 [(set (match_operand:SI 0 "s_register_operand" "")
1805 (and:SI (match_operand:SI 1 "s_register_operand" "")
1806 (match_operand:SI 2 "reg_or_int_operand" "")))]
1811 if (GET_CODE (operands[2]) == CONST_INT)
1813 arm_split_constant (AND, SImode, NULL_RTX,
1814 INTVAL (operands[2]), operands[0],
1815 operands[1], optimize && can_create_pseudo_p ());
1820 else /* TARGET_THUMB1 */
1822 if (GET_CODE (operands[2]) != CONST_INT)
1823 operands[2] = force_reg (SImode, operands[2]);
1828 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1830 operands[2] = force_reg (SImode,
1831 GEN_INT (~INTVAL (operands[2])));
1833 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1838 for (i = 9; i <= 31; i++)
1840 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1842 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1846 else if ((((HOST_WIDE_INT) 1) << i) - 1
1847 == ~INTVAL (operands[2]))
1849 rtx shift = GEN_INT (i);
1850 rtx reg = gen_reg_rtx (SImode);
1852 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1853 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1859 operands[2] = force_reg (SImode, operands[2]);
1865 ; ??? Check split length for Thumb-2
1866 (define_insn_and_split "*arm_andsi3_insn"
1867 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1868 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1869 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1873 bic%?\\t%0, %1, #%B2
1876 && GET_CODE (operands[2]) == CONST_INT
1877 && !(const_ok_for_arm (INTVAL (operands[2]))
1878 || const_ok_for_arm (~INTVAL (operands[2])))"
1879 [(clobber (const_int 0))]
1881 arm_split_constant (AND, SImode, curr_insn,
1882 INTVAL (operands[2]), operands[0], operands[1], 0);
1885 [(set_attr "length" "4,4,16")
1886 (set_attr "predicable" "yes")]
1889 (define_insn "*thumb1_andsi3_insn"
1890 [(set (match_operand:SI 0 "register_operand" "=l")
1891 (and:SI (match_operand:SI 1 "register_operand" "%0")
1892 (match_operand:SI 2 "register_operand" "l")))]
1895 [(set_attr "length" "2")]
1898 (define_insn "*andsi3_compare0"
1899 [(set (reg:CC_NOOV CC_REGNUM)
1901 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1902 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1904 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1905 (and:SI (match_dup 1) (match_dup 2)))]
1909 bic%.\\t%0, %1, #%B2"
1910 [(set_attr "conds" "set")]
1913 (define_insn "*andsi3_compare0_scratch"
1914 [(set (reg:CC_NOOV CC_REGNUM)
1916 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1917 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1919 (clobber (match_scratch:SI 2 "=X,r"))]
1923 bic%.\\t%2, %0, #%B1"
1924 [(set_attr "conds" "set")]
1927 (define_insn "*zeroextractsi_compare0_scratch"
1928 [(set (reg:CC_NOOV CC_REGNUM)
1929 (compare:CC_NOOV (zero_extract:SI
1930 (match_operand:SI 0 "s_register_operand" "r")
1931 (match_operand 1 "const_int_operand" "n")
1932 (match_operand 2 "const_int_operand" "n"))
1935 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1936 && INTVAL (operands[1]) > 0
1937 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1938 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1940 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1941 << INTVAL (operands[2]));
1942 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1945 [(set_attr "conds" "set")]
1948 (define_insn_and_split "*ne_zeroextractsi"
1949 [(set (match_operand:SI 0 "s_register_operand" "=r")
1950 (ne:SI (zero_extract:SI
1951 (match_operand:SI 1 "s_register_operand" "r")
1952 (match_operand:SI 2 "const_int_operand" "n")
1953 (match_operand:SI 3 "const_int_operand" "n"))
1955 (clobber (reg:CC CC_REGNUM))]
1957 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1958 && INTVAL (operands[2]) > 0
1959 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1960 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1963 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1964 && INTVAL (operands[2]) > 0
1965 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1966 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1967 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1968 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1970 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1972 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1973 (match_dup 0) (const_int 1)))]
1975 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1976 << INTVAL (operands[3]));
1978 [(set_attr "conds" "clob")
1979 (set (attr "length")
1980 (if_then_else (eq_attr "is_thumb" "yes")
1985 (define_insn_and_split "*ne_zeroextractsi_shifted"
1986 [(set (match_operand:SI 0 "s_register_operand" "=r")
1987 (ne:SI (zero_extract:SI
1988 (match_operand:SI 1 "s_register_operand" "r")
1989 (match_operand:SI 2 "const_int_operand" "n")
1992 (clobber (reg:CC CC_REGNUM))]
1996 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1997 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1999 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2001 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2002 (match_dup 0) (const_int 1)))]
2004 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2006 [(set_attr "conds" "clob")
2007 (set_attr "length" "8")]
2010 (define_insn_and_split "*ite_ne_zeroextractsi"
2011 [(set (match_operand:SI 0 "s_register_operand" "=r")
2012 (if_then_else:SI (ne (zero_extract:SI
2013 (match_operand:SI 1 "s_register_operand" "r")
2014 (match_operand:SI 2 "const_int_operand" "n")
2015 (match_operand:SI 3 "const_int_operand" "n"))
2017 (match_operand:SI 4 "arm_not_operand" "rIK")
2019 (clobber (reg:CC CC_REGNUM))]
2021 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2022 && INTVAL (operands[2]) > 0
2023 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2024 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2025 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2028 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2029 && INTVAL (operands[2]) > 0
2030 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2031 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2032 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2033 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2034 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2036 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2038 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2039 (match_dup 0) (match_dup 4)))]
2041 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2042 << INTVAL (operands[3]));
2044 [(set_attr "conds" "clob")
2045 (set_attr "length" "8")]
2048 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2049 [(set (match_operand:SI 0 "s_register_operand" "=r")
2050 (if_then_else:SI (ne (zero_extract:SI
2051 (match_operand:SI 1 "s_register_operand" "r")
2052 (match_operand:SI 2 "const_int_operand" "n")
2055 (match_operand:SI 3 "arm_not_operand" "rIK")
2057 (clobber (reg:CC CC_REGNUM))]
2058 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2060 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2061 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2062 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2064 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2066 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2067 (match_dup 0) (match_dup 3)))]
2069 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2071 [(set_attr "conds" "clob")
2072 (set_attr "length" "8")]
2076 [(set (match_operand:SI 0 "s_register_operand" "")
2077 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2078 (match_operand:SI 2 "const_int_operand" "")
2079 (match_operand:SI 3 "const_int_operand" "")))
2080 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2082 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2083 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2085 HOST_WIDE_INT temp = INTVAL (operands[2]);
2087 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2088 operands[3] = GEN_INT (32 - temp);
2092 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2094 [(set (match_operand:SI 0 "s_register_operand" "")
2095 (match_operator:SI 1 "shiftable_operator"
2096 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2097 (match_operand:SI 3 "const_int_operand" "")
2098 (match_operand:SI 4 "const_int_operand" ""))
2099 (match_operand:SI 5 "s_register_operand" "")]))
2100 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2102 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2105 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2108 HOST_WIDE_INT temp = INTVAL (operands[3]);
2110 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2111 operands[4] = GEN_INT (32 - temp);
2116 [(set (match_operand:SI 0 "s_register_operand" "")
2117 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2118 (match_operand:SI 2 "const_int_operand" "")
2119 (match_operand:SI 3 "const_int_operand" "")))]
2121 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2122 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2124 HOST_WIDE_INT temp = INTVAL (operands[2]);
2126 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2127 operands[3] = GEN_INT (32 - temp);
2132 [(set (match_operand:SI 0 "s_register_operand" "")
2133 (match_operator:SI 1 "shiftable_operator"
2134 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2135 (match_operand:SI 3 "const_int_operand" "")
2136 (match_operand:SI 4 "const_int_operand" ""))
2137 (match_operand:SI 5 "s_register_operand" "")]))
2138 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2140 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2143 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2146 HOST_WIDE_INT temp = INTVAL (operands[3]);
2148 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2149 operands[4] = GEN_INT (32 - temp);
2153 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2154 ;;; represented by the bitfield, then this will produce incorrect results.
2155 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2156 ;;; which have a real bit-field insert instruction, the truncation happens
2157 ;;; in the bit-field insert instruction itself. Since arm does not have a
2158 ;;; bit-field insert instruction, we would have to emit code here to truncate
2159 ;;; the value before we insert. This loses some of the advantage of having
2160 ;;; this insv pattern, so this pattern needs to be reevalutated.
2162 (define_expand "insv"
2163 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2164 (match_operand:SI 1 "general_operand" "")
2165 (match_operand:SI 2 "general_operand" ""))
2166 (match_operand:SI 3 "reg_or_int_operand" ""))]
2167 "TARGET_ARM || arm_arch_thumb2"
2170 int start_bit = INTVAL (operands[2]);
2171 int width = INTVAL (operands[1]);
2172 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2173 rtx target, subtarget;
2175 if (arm_arch_thumb2)
2177 bool use_bfi = TRUE;
2179 if (GET_CODE (operands[3]) == CONST_INT)
2181 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2185 emit_insn (gen_insv_zero (operands[0], operands[1],
2190 /* See if the set can be done with a single orr instruction. */
2191 if (val == mask && const_ok_for_arm (val << start_bit))
2197 if (GET_CODE (operands[3]) != REG)
2198 operands[3] = force_reg (SImode, operands[3]);
2200 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2206 target = operands[0];
2207 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2208 subreg as the final target. */
2209 if (GET_CODE (target) == SUBREG)
2211 subtarget = gen_reg_rtx (SImode);
2212 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2213 < GET_MODE_SIZE (SImode))
2214 target = SUBREG_REG (target);
2219 if (GET_CODE (operands[3]) == CONST_INT)
2221 /* Since we are inserting a known constant, we may be able to
2222 reduce the number of bits that we have to clear so that
2223 the mask becomes simple. */
2224 /* ??? This code does not check to see if the new mask is actually
2225 simpler. It may not be. */
2226 rtx op1 = gen_reg_rtx (SImode);
2227 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2228 start of this pattern. */
2229 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2230 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2232 emit_insn (gen_andsi3 (op1, operands[0],
2233 gen_int_mode (~mask2, SImode)));
2234 emit_insn (gen_iorsi3 (subtarget, op1,
2235 gen_int_mode (op3_value << start_bit, SImode)));
2237 else if (start_bit == 0
2238 && !(const_ok_for_arm (mask)
2239 || const_ok_for_arm (~mask)))
2241 /* A Trick, since we are setting the bottom bits in the word,
2242 we can shift operand[3] up, operand[0] down, OR them together
2243 and rotate the result back again. This takes 3 insns, and
2244 the third might be mergeable into another op. */
2245 /* The shift up copes with the possibility that operand[3] is
2246 wider than the bitfield. */
2247 rtx op0 = gen_reg_rtx (SImode);
2248 rtx op1 = gen_reg_rtx (SImode);
2250 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2251 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2252 emit_insn (gen_iorsi3 (op1, op1, op0));
2253 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2255 else if ((width + start_bit == 32)
2256 && !(const_ok_for_arm (mask)
2257 || const_ok_for_arm (~mask)))
2259 /* Similar trick, but slightly less efficient. */
2261 rtx op0 = gen_reg_rtx (SImode);
2262 rtx op1 = gen_reg_rtx (SImode);
2264 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2265 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2266 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2267 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2271 rtx op0 = gen_int_mode (mask, SImode);
2272 rtx op1 = gen_reg_rtx (SImode);
2273 rtx op2 = gen_reg_rtx (SImode);
2275 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2277 rtx tmp = gen_reg_rtx (SImode);
2279 emit_insn (gen_movsi (tmp, op0));
2283 /* Mask out any bits in operand[3] that are not needed. */
2284 emit_insn (gen_andsi3 (op1, operands[3], op0));
2286 if (GET_CODE (op0) == CONST_INT
2287 && (const_ok_for_arm (mask << start_bit)
2288 || const_ok_for_arm (~(mask << start_bit))))
2290 op0 = gen_int_mode (~(mask << start_bit), SImode);
2291 emit_insn (gen_andsi3 (op2, operands[0], op0));
2295 if (GET_CODE (op0) == CONST_INT)
2297 rtx tmp = gen_reg_rtx (SImode);
2299 emit_insn (gen_movsi (tmp, op0));
2304 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2306 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2310 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2312 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2315 if (subtarget != target)
2317 /* If TARGET is still a SUBREG, then it must be wider than a word,
2318 so we must be careful only to set the subword we were asked to. */
2319 if (GET_CODE (target) == SUBREG)
2320 emit_move_insn (target, subtarget);
2322 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2329 (define_insn "insv_zero"
2330 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2331 (match_operand:SI 1 "const_int_operand" "M")
2332 (match_operand:SI 2 "const_int_operand" "M"))
2336 [(set_attr "length" "4")
2337 (set_attr "predicable" "yes")]
2340 (define_insn "insv_t2"
2341 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2342 (match_operand:SI 1 "const_int_operand" "M")
2343 (match_operand:SI 2 "const_int_operand" "M"))
2344 (match_operand:SI 3 "s_register_operand" "r"))]
2346 "bfi%?\t%0, %3, %2, %1"
2347 [(set_attr "length" "4")
2348 (set_attr "predicable" "yes")]
2351 ; constants for op 2 will never be given to these patterns.
2352 (define_insn_and_split "*anddi_notdi_di"
2353 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2354 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2355 (match_operand:DI 2 "s_register_operand" "0,r")))]
2358 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2359 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2360 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2363 operands[3] = gen_highpart (SImode, operands[0]);
2364 operands[0] = gen_lowpart (SImode, operands[0]);
2365 operands[4] = gen_highpart (SImode, operands[1]);
2366 operands[1] = gen_lowpart (SImode, operands[1]);
2367 operands[5] = gen_highpart (SImode, operands[2]);
2368 operands[2] = gen_lowpart (SImode, operands[2]);
2370 [(set_attr "length" "8")
2371 (set_attr "predicable" "yes")]
2374 (define_insn_and_split "*anddi_notzesidi_di"
2375 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2376 (and:DI (not:DI (zero_extend:DI
2377 (match_operand:SI 2 "s_register_operand" "r,r")))
2378 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2381 bic%?\\t%Q0, %Q1, %2
2383 ; (not (zero_extend ...)) allows us to just copy the high word from
2384 ; operand1 to operand0.
2387 && operands[0] != operands[1]"
2388 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2389 (set (match_dup 3) (match_dup 4))]
2392 operands[3] = gen_highpart (SImode, operands[0]);
2393 operands[0] = gen_lowpart (SImode, operands[0]);
2394 operands[4] = gen_highpart (SImode, operands[1]);
2395 operands[1] = gen_lowpart (SImode, operands[1]);
2397 [(set_attr "length" "4,8")
2398 (set_attr "predicable" "yes")]
2401 (define_insn_and_split "*anddi_notsesidi_di"
2402 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2403 (and:DI (not:DI (sign_extend:DI
2404 (match_operand:SI 2 "s_register_operand" "r,r")))
2405 (match_operand:DI 1 "s_register_operand" "0,r")))]
2408 "TARGET_32BIT && reload_completed"
2409 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2410 (set (match_dup 3) (and:SI (not:SI
2411 (ashiftrt:SI (match_dup 2) (const_int 31)))
2415 operands[3] = gen_highpart (SImode, operands[0]);
2416 operands[0] = gen_lowpart (SImode, operands[0]);
2417 operands[4] = gen_highpart (SImode, operands[1]);
2418 operands[1] = gen_lowpart (SImode, operands[1]);
2420 [(set_attr "length" "8")
2421 (set_attr "predicable" "yes")]
2424 (define_insn "andsi_notsi_si"
2425 [(set (match_operand:SI 0 "s_register_operand" "=r")
2426 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2427 (match_operand:SI 1 "s_register_operand" "r")))]
2429 "bic%?\\t%0, %1, %2"
2430 [(set_attr "predicable" "yes")]
2433 (define_insn "bicsi3"
2434 [(set (match_operand:SI 0 "register_operand" "=l")
2435 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2436 (match_operand:SI 2 "register_operand" "0")))]
2439 [(set_attr "length" "2")]
2442 (define_insn "andsi_not_shiftsi_si"
2443 [(set (match_operand:SI 0 "s_register_operand" "=r")
2444 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2445 [(match_operand:SI 2 "s_register_operand" "r")
2446 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2447 (match_operand:SI 1 "s_register_operand" "r")))]
2449 "bic%?\\t%0, %1, %2%S4"
2450 [(set_attr "predicable" "yes")
2451 (set_attr "shift" "2")
2452 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2453 (const_string "alu_shift")
2454 (const_string "alu_shift_reg")))]
2457 (define_insn "*andsi_notsi_si_compare0"
2458 [(set (reg:CC_NOOV CC_REGNUM)
2460 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2461 (match_operand:SI 1 "s_register_operand" "r"))
2463 (set (match_operand:SI 0 "s_register_operand" "=r")
2464 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2466 "bic%.\\t%0, %1, %2"
2467 [(set_attr "conds" "set")]
2470 (define_insn "*andsi_notsi_si_compare0_scratch"
2471 [(set (reg:CC_NOOV CC_REGNUM)
2473 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2474 (match_operand:SI 1 "s_register_operand" "r"))
2476 (clobber (match_scratch:SI 0 "=r"))]
2478 "bic%.\\t%0, %1, %2"
2479 [(set_attr "conds" "set")]
2482 (define_insn "iordi3"
2483 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2484 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2485 (match_operand:DI 2 "s_register_operand" "r,r")))]
2486 "TARGET_32BIT && ! TARGET_IWMMXT"
2488 [(set_attr "length" "8")
2489 (set_attr "predicable" "yes")]
2492 (define_insn "*iordi_zesidi_di"
2493 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2494 (ior:DI (zero_extend:DI
2495 (match_operand:SI 2 "s_register_operand" "r,r"))
2496 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2499 orr%?\\t%Q0, %Q1, %2
2501 [(set_attr "length" "4,8")
2502 (set_attr "predicable" "yes")]
2505 (define_insn "*iordi_sesidi_di"
2506 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2507 (ior:DI (sign_extend:DI
2508 (match_operand:SI 2 "s_register_operand" "r,r"))
2509 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2512 [(set_attr "length" "8")
2513 (set_attr "predicable" "yes")]
2516 (define_expand "iorsi3"
2517 [(set (match_operand:SI 0 "s_register_operand" "")
2518 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2519 (match_operand:SI 2 "reg_or_int_operand" "")))]
2522 if (GET_CODE (operands[2]) == CONST_INT)
2526 arm_split_constant (IOR, SImode, NULL_RTX,
2527 INTVAL (operands[2]), operands[0], operands[1],
2528 optimize && can_create_pseudo_p ());
2531 else /* TARGET_THUMB1 */
2532 operands [2] = force_reg (SImode, operands [2]);
2537 (define_insn_and_split "*arm_iorsi3"
2538 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2539 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2540 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2546 && GET_CODE (operands[2]) == CONST_INT
2547 && !const_ok_for_arm (INTVAL (operands[2]))"
2548 [(clobber (const_int 0))]
2550 arm_split_constant (IOR, SImode, curr_insn,
2551 INTVAL (operands[2]), operands[0], operands[1], 0);
2554 [(set_attr "length" "4,16")
2555 (set_attr "predicable" "yes")]
2558 (define_insn "*thumb1_iorsi3"
2559 [(set (match_operand:SI 0 "register_operand" "=l")
2560 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2561 (match_operand:SI 2 "register_operand" "l")))]
2564 [(set_attr "length" "2")]
2568 [(match_scratch:SI 3 "r")
2569 (set (match_operand:SI 0 "arm_general_register_operand" "")
2570 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2571 (match_operand:SI 2 "const_int_operand" "")))]
2573 && !const_ok_for_arm (INTVAL (operands[2]))
2574 && const_ok_for_arm (~INTVAL (operands[2]))"
2575 [(set (match_dup 3) (match_dup 2))
2576 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2580 (define_insn "*iorsi3_compare0"
2581 [(set (reg:CC_NOOV CC_REGNUM)
2582 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2583 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2585 (set (match_operand:SI 0 "s_register_operand" "=r")
2586 (ior:SI (match_dup 1) (match_dup 2)))]
2588 "orr%.\\t%0, %1, %2"
2589 [(set_attr "conds" "set")]
2592 (define_insn "*iorsi3_compare0_scratch"
2593 [(set (reg:CC_NOOV CC_REGNUM)
2594 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2595 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2597 (clobber (match_scratch:SI 0 "=r"))]
2599 "orr%.\\t%0, %1, %2"
2600 [(set_attr "conds" "set")]
2603 (define_insn "xordi3"
2604 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2605 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2606 (match_operand:DI 2 "s_register_operand" "r,r")))]
2607 "TARGET_32BIT && !TARGET_IWMMXT"
2609 [(set_attr "length" "8")
2610 (set_attr "predicable" "yes")]
2613 (define_insn "*xordi_zesidi_di"
2614 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2615 (xor:DI (zero_extend:DI
2616 (match_operand:SI 2 "s_register_operand" "r,r"))
2617 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2620 eor%?\\t%Q0, %Q1, %2
2622 [(set_attr "length" "4,8")
2623 (set_attr "predicable" "yes")]
2626 (define_insn "*xordi_sesidi_di"
2627 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2628 (xor:DI (sign_extend:DI
2629 (match_operand:SI 2 "s_register_operand" "r,r"))
2630 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2633 [(set_attr "length" "8")
2634 (set_attr "predicable" "yes")]
2637 (define_expand "xorsi3"
2638 [(set (match_operand:SI 0 "s_register_operand" "")
2639 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2640 (match_operand:SI 2 "arm_rhs_operand" "")))]
2643 if (GET_CODE (operands[2]) == CONST_INT)
2644 operands[2] = force_reg (SImode, operands[2]);
2648 (define_insn "*arm_xorsi3"
2649 [(set (match_operand:SI 0 "s_register_operand" "=r")
2650 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2651 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2653 "eor%?\\t%0, %1, %2"
2654 [(set_attr "predicable" "yes")]
2657 (define_insn "*thumb1_xorsi3"
2658 [(set (match_operand:SI 0 "register_operand" "=l")
2659 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2660 (match_operand:SI 2 "register_operand" "l")))]
2663 [(set_attr "length" "2")]
2666 (define_insn "*xorsi3_compare0"
2667 [(set (reg:CC_NOOV CC_REGNUM)
2668 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2669 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2671 (set (match_operand:SI 0 "s_register_operand" "=r")
2672 (xor:SI (match_dup 1) (match_dup 2)))]
2674 "eor%.\\t%0, %1, %2"
2675 [(set_attr "conds" "set")]
2678 (define_insn "*xorsi3_compare0_scratch"
2679 [(set (reg:CC_NOOV CC_REGNUM)
2680 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2681 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2685 [(set_attr "conds" "set")]
2688 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2689 ; (NOT D) we can sometimes merge the final NOT into one of the following
2693 [(set (match_operand:SI 0 "s_register_operand" "")
2694 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2695 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2696 (match_operand:SI 3 "arm_rhs_operand" "")))
2697 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2699 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2700 (not:SI (match_dup 3))))
2701 (set (match_dup 0) (not:SI (match_dup 4)))]
2705 (define_insn "*andsi_iorsi3_notsi"
2706 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2707 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2708 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2709 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2711 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2712 [(set_attr "length" "8")
2713 (set_attr "ce_count" "2")
2714 (set_attr "predicable" "yes")]
2717 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2718 ; insns are available?
2720 [(set (match_operand:SI 0 "s_register_operand" "")
2721 (match_operator:SI 1 "logical_binary_operator"
2722 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2723 (match_operand:SI 3 "const_int_operand" "")
2724 (match_operand:SI 4 "const_int_operand" ""))
2725 (match_operator:SI 9 "logical_binary_operator"
2726 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2727 (match_operand:SI 6 "const_int_operand" ""))
2728 (match_operand:SI 7 "s_register_operand" "")])]))
2729 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2731 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2732 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2735 [(ashift:SI (match_dup 2) (match_dup 4))
2739 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2742 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2746 [(set (match_operand:SI 0 "s_register_operand" "")
2747 (match_operator:SI 1 "logical_binary_operator"
2748 [(match_operator:SI 9 "logical_binary_operator"
2749 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2750 (match_operand:SI 6 "const_int_operand" ""))
2751 (match_operand:SI 7 "s_register_operand" "")])
2752 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2753 (match_operand:SI 3 "const_int_operand" "")
2754 (match_operand:SI 4 "const_int_operand" ""))]))
2755 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2757 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2758 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2761 [(ashift:SI (match_dup 2) (match_dup 4))
2765 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2768 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2772 [(set (match_operand:SI 0 "s_register_operand" "")
2773 (match_operator:SI 1 "logical_binary_operator"
2774 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2775 (match_operand:SI 3 "const_int_operand" "")
2776 (match_operand:SI 4 "const_int_operand" ""))
2777 (match_operator:SI 9 "logical_binary_operator"
2778 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2779 (match_operand:SI 6 "const_int_operand" ""))
2780 (match_operand:SI 7 "s_register_operand" "")])]))
2781 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2783 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2784 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2787 [(ashift:SI (match_dup 2) (match_dup 4))
2791 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2794 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2798 [(set (match_operand:SI 0 "s_register_operand" "")
2799 (match_operator:SI 1 "logical_binary_operator"
2800 [(match_operator:SI 9 "logical_binary_operator"
2801 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2802 (match_operand:SI 6 "const_int_operand" ""))
2803 (match_operand:SI 7 "s_register_operand" "")])
2804 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2805 (match_operand:SI 3 "const_int_operand" "")
2806 (match_operand:SI 4 "const_int_operand" ""))]))
2807 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2809 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2810 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2813 [(ashift:SI (match_dup 2) (match_dup 4))
2817 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2820 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2824 ;; Minimum and maximum insns
2826 (define_expand "smaxsi3"
2828 (set (match_operand:SI 0 "s_register_operand" "")
2829 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2830 (match_operand:SI 2 "arm_rhs_operand" "")))
2831 (clobber (reg:CC CC_REGNUM))])]
2834 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2836 /* No need for a clobber of the condition code register here. */
2837 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2838 gen_rtx_SMAX (SImode, operands[1],
2844 (define_insn "*smax_0"
2845 [(set (match_operand:SI 0 "s_register_operand" "=r")
2846 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2849 "bic%?\\t%0, %1, %1, asr #31"
2850 [(set_attr "predicable" "yes")]
2853 (define_insn "*smax_m1"
2854 [(set (match_operand:SI 0 "s_register_operand" "=r")
2855 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2858 "orr%?\\t%0, %1, %1, asr #31"
2859 [(set_attr "predicable" "yes")]
2862 (define_insn "*arm_smax_insn"
2863 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2864 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2865 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2866 (clobber (reg:CC CC_REGNUM))]
2869 cmp\\t%1, %2\;movlt\\t%0, %2
2870 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2871 [(set_attr "conds" "clob")
2872 (set_attr "length" "8,12")]
2875 (define_expand "sminsi3"
2877 (set (match_operand:SI 0 "s_register_operand" "")
2878 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2879 (match_operand:SI 2 "arm_rhs_operand" "")))
2880 (clobber (reg:CC CC_REGNUM))])]
2883 if (operands[2] == const0_rtx)
2885 /* No need for a clobber of the condition code register here. */
2886 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2887 gen_rtx_SMIN (SImode, operands[1],
2893 (define_insn "*smin_0"
2894 [(set (match_operand:SI 0 "s_register_operand" "=r")
2895 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2898 "and%?\\t%0, %1, %1, asr #31"
2899 [(set_attr "predicable" "yes")]
2902 (define_insn "*arm_smin_insn"
2903 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2904 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2905 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2906 (clobber (reg:CC CC_REGNUM))]
2909 cmp\\t%1, %2\;movge\\t%0, %2
2910 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2911 [(set_attr "conds" "clob")
2912 (set_attr "length" "8,12")]
2915 (define_expand "umaxsi3"
2917 (set (match_operand:SI 0 "s_register_operand" "")
2918 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2919 (match_operand:SI 2 "arm_rhs_operand" "")))
2920 (clobber (reg:CC CC_REGNUM))])]
2925 (define_insn "*arm_umaxsi3"
2926 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2927 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2928 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2929 (clobber (reg:CC CC_REGNUM))]
2932 cmp\\t%1, %2\;movcc\\t%0, %2
2933 cmp\\t%1, %2\;movcs\\t%0, %1
2934 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2935 [(set_attr "conds" "clob")
2936 (set_attr "length" "8,8,12")]
2939 (define_expand "uminsi3"
2941 (set (match_operand:SI 0 "s_register_operand" "")
2942 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2943 (match_operand:SI 2 "arm_rhs_operand" "")))
2944 (clobber (reg:CC CC_REGNUM))])]
2949 (define_insn "*arm_uminsi3"
2950 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2951 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2952 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2953 (clobber (reg:CC CC_REGNUM))]
2956 cmp\\t%1, %2\;movcs\\t%0, %2
2957 cmp\\t%1, %2\;movcc\\t%0, %1
2958 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2959 [(set_attr "conds" "clob")
2960 (set_attr "length" "8,8,12")]
2963 (define_insn "*store_minmaxsi"
2964 [(set (match_operand:SI 0 "memory_operand" "=m")
2965 (match_operator:SI 3 "minmax_operator"
2966 [(match_operand:SI 1 "s_register_operand" "r")
2967 (match_operand:SI 2 "s_register_operand" "r")]))
2968 (clobber (reg:CC CC_REGNUM))]
2971 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2972 operands[1], operands[2]);
2973 output_asm_insn (\"cmp\\t%1, %2\", operands);
2975 output_asm_insn (\"ite\t%d3\", operands);
2976 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2977 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2980 [(set_attr "conds" "clob")
2981 (set (attr "length")
2982 (if_then_else (eq_attr "is_thumb" "yes")
2985 (set_attr "type" "store1")]
2988 ; Reject the frame pointer in operand[1], since reloading this after
2989 ; it has been eliminated can cause carnage.
2990 (define_insn "*minmax_arithsi"
2991 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2992 (match_operator:SI 4 "shiftable_operator"
2993 [(match_operator:SI 5 "minmax_operator"
2994 [(match_operand:SI 2 "s_register_operand" "r,r")
2995 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2996 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2997 (clobber (reg:CC CC_REGNUM))]
2998 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3001 enum rtx_code code = GET_CODE (operands[4]);
3004 if (which_alternative != 0 || operands[3] != const0_rtx
3005 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3010 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3011 operands[2], operands[3]);
3012 output_asm_insn (\"cmp\\t%2, %3\", operands);
3016 output_asm_insn (\"ite\\t%d5\", operands);
3018 output_asm_insn (\"it\\t%d5\", operands);
3020 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3022 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3025 [(set_attr "conds" "clob")
3026 (set (attr "length")
3027 (if_then_else (eq_attr "is_thumb" "yes")
3033 ;; Shift and rotation insns
3035 (define_expand "ashldi3"
3036 [(set (match_operand:DI 0 "s_register_operand" "")
3037 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3038 (match_operand:SI 2 "reg_or_int_operand" "")))]
3041 if (GET_CODE (operands[2]) == CONST_INT)
3043 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3045 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3048 /* Ideally we shouldn't fail here if we could know that operands[1]
3049 ends up already living in an iwmmxt register. Otherwise it's
3050 cheaper to have the alternate code being generated than moving
3051 values to iwmmxt regs and back. */
3054 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3059 (define_insn "arm_ashldi3_1bit"
3060 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3061 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3063 (clobber (reg:CC CC_REGNUM))]
3065 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3066 [(set_attr "conds" "clob")
3067 (set_attr "length" "8")]
3070 (define_expand "ashlsi3"
3071 [(set (match_operand:SI 0 "s_register_operand" "")
3072 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3073 (match_operand:SI 2 "arm_rhs_operand" "")))]
3076 if (GET_CODE (operands[2]) == CONST_INT
3077 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3079 emit_insn (gen_movsi (operands[0], const0_rtx));
3085 (define_insn "*thumb1_ashlsi3"
3086 [(set (match_operand:SI 0 "register_operand" "=l,l")
3087 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3088 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3091 [(set_attr "length" "2")]
3094 (define_expand "ashrdi3"
3095 [(set (match_operand:DI 0 "s_register_operand" "")
3096 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3097 (match_operand:SI 2 "reg_or_int_operand" "")))]
3100 if (GET_CODE (operands[2]) == CONST_INT)
3102 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3104 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3107 /* Ideally we shouldn't fail here if we could know that operands[1]
3108 ends up already living in an iwmmxt register. Otherwise it's
3109 cheaper to have the alternate code being generated than moving
3110 values to iwmmxt regs and back. */
3113 else if (!TARGET_REALLY_IWMMXT)
3118 (define_insn "arm_ashrdi3_1bit"
3119 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3120 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3122 (clobber (reg:CC CC_REGNUM))]
3124 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3125 [(set_attr "conds" "clob")
3126 (set_attr "length" "8")]
3129 (define_expand "ashrsi3"
3130 [(set (match_operand:SI 0 "s_register_operand" "")
3131 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3132 (match_operand:SI 2 "arm_rhs_operand" "")))]
3135 if (GET_CODE (operands[2]) == CONST_INT
3136 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3137 operands[2] = GEN_INT (31);
3141 (define_insn "*thumb1_ashrsi3"
3142 [(set (match_operand:SI 0 "register_operand" "=l,l")
3143 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3144 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3147 [(set_attr "length" "2")]
3150 (define_expand "lshrdi3"
3151 [(set (match_operand:DI 0 "s_register_operand" "")
3152 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3153 (match_operand:SI 2 "reg_or_int_operand" "")))]
3156 if (GET_CODE (operands[2]) == CONST_INT)
3158 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3160 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3163 /* Ideally we shouldn't fail here if we could know that operands[1]
3164 ends up already living in an iwmmxt register. Otherwise it's
3165 cheaper to have the alternate code being generated than moving
3166 values to iwmmxt regs and back. */
3169 else if (!TARGET_REALLY_IWMMXT)
3174 (define_insn "arm_lshrdi3_1bit"
3175 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3176 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3178 (clobber (reg:CC CC_REGNUM))]
3180 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3181 [(set_attr "conds" "clob")
3182 (set_attr "length" "8")]
3185 (define_expand "lshrsi3"
3186 [(set (match_operand:SI 0 "s_register_operand" "")
3187 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3188 (match_operand:SI 2 "arm_rhs_operand" "")))]
3191 if (GET_CODE (operands[2]) == CONST_INT
3192 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3194 emit_insn (gen_movsi (operands[0], const0_rtx));
3200 (define_insn "*thumb1_lshrsi3"
3201 [(set (match_operand:SI 0 "register_operand" "=l,l")
3202 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3203 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3206 [(set_attr "length" "2")]
3209 (define_expand "rotlsi3"
3210 [(set (match_operand:SI 0 "s_register_operand" "")
3211 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3212 (match_operand:SI 2 "reg_or_int_operand" "")))]
3215 if (GET_CODE (operands[2]) == CONST_INT)
3216 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3219 rtx reg = gen_reg_rtx (SImode);
3220 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3226 (define_expand "rotrsi3"
3227 [(set (match_operand:SI 0 "s_register_operand" "")
3228 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3229 (match_operand:SI 2 "arm_rhs_operand" "")))]
3234 if (GET_CODE (operands[2]) == CONST_INT
3235 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3236 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3238 else /* TARGET_THUMB1 */
3240 if (GET_CODE (operands [2]) == CONST_INT)
3241 operands [2] = force_reg (SImode, operands[2]);
3246 (define_insn "*thumb1_rotrsi3"
3247 [(set (match_operand:SI 0 "register_operand" "=l")
3248 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3249 (match_operand:SI 2 "register_operand" "l")))]
3252 [(set_attr "length" "2")]
3255 (define_insn "*arm_shiftsi3"
3256 [(set (match_operand:SI 0 "s_register_operand" "=r")
3257 (match_operator:SI 3 "shift_operator"
3258 [(match_operand:SI 1 "s_register_operand" "r")
3259 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3261 "* return arm_output_shift(operands, 0);"
3262 [(set_attr "predicable" "yes")
3263 (set_attr "shift" "1")
3264 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3265 (const_string "alu_shift")
3266 (const_string "alu_shift_reg")))]
3269 (define_insn "*shiftsi3_compare0"
3270 [(set (reg:CC_NOOV CC_REGNUM)
3271 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3272 [(match_operand:SI 1 "s_register_operand" "r")
3273 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3275 (set (match_operand:SI 0 "s_register_operand" "=r")
3276 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3278 "* return arm_output_shift(operands, 1);"
3279 [(set_attr "conds" "set")
3280 (set_attr "shift" "1")
3281 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3282 (const_string "alu_shift")
3283 (const_string "alu_shift_reg")))]
3286 (define_insn "*shiftsi3_compare0_scratch"
3287 [(set (reg:CC_NOOV CC_REGNUM)
3288 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3289 [(match_operand:SI 1 "s_register_operand" "r")
3290 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3292 (clobber (match_scratch:SI 0 "=r"))]
3294 "* return arm_output_shift(operands, 1);"
3295 [(set_attr "conds" "set")
3296 (set_attr "shift" "1")]
3299 (define_insn "*arm_notsi_shiftsi"
3300 [(set (match_operand:SI 0 "s_register_operand" "=r")
3301 (not:SI (match_operator:SI 3 "shift_operator"
3302 [(match_operand:SI 1 "s_register_operand" "r")
3303 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3306 [(set_attr "predicable" "yes")
3307 (set_attr "shift" "1")
3308 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3309 (const_string "alu_shift")
3310 (const_string "alu_shift_reg")))]
3313 (define_insn "*arm_notsi_shiftsi_compare0"
3314 [(set (reg:CC_NOOV CC_REGNUM)
3315 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3316 [(match_operand:SI 1 "s_register_operand" "r")
3317 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3319 (set (match_operand:SI 0 "s_register_operand" "=r")
3320 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3323 [(set_attr "conds" "set")
3324 (set_attr "shift" "1")
3325 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3326 (const_string "alu_shift")
3327 (const_string "alu_shift_reg")))]
3330 (define_insn "*arm_not_shiftsi_compare0_scratch"
3331 [(set (reg:CC_NOOV CC_REGNUM)
3332 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3333 [(match_operand:SI 1 "s_register_operand" "r")
3334 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3336 (clobber (match_scratch:SI 0 "=r"))]
3339 [(set_attr "conds" "set")
3340 (set_attr "shift" "1")
3341 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3342 (const_string "alu_shift")
3343 (const_string "alu_shift_reg")))]
3346 ;; We don't really have extzv, but defining this using shifts helps
3347 ;; to reduce register pressure later on.
3349 (define_expand "extzv"
3351 (ashift:SI (match_operand:SI 1 "register_operand" "")
3352 (match_operand:SI 2 "const_int_operand" "")))
3353 (set (match_operand:SI 0 "register_operand" "")
3354 (lshiftrt:SI (match_dup 4)
3355 (match_operand:SI 3 "const_int_operand" "")))]
3356 "TARGET_THUMB1 || arm_arch_thumb2"
3359 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3360 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3362 if (arm_arch_thumb2)
3364 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3369 operands[3] = GEN_INT (rshift);
3373 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3377 operands[2] = GEN_INT (lshift);
3378 operands[4] = gen_reg_rtx (SImode);
3383 [(set (match_operand:SI 0 "s_register_operand" "=r")
3384 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3385 (match_operand:SI 2 "const_int_operand" "M")
3386 (match_operand:SI 3 "const_int_operand" "M")))]
3388 "sbfx%?\t%0, %1, %3, %2"
3389 [(set_attr "length" "4")
3390 (set_attr "predicable" "yes")]
3393 (define_insn "extzv_t2"
3394 [(set (match_operand:SI 0 "s_register_operand" "=r")
3395 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3396 (match_operand:SI 2 "const_int_operand" "M")
3397 (match_operand:SI 3 "const_int_operand" "M")))]
3399 "ubfx%?\t%0, %1, %3, %2"
3400 [(set_attr "length" "4")
3401 (set_attr "predicable" "yes")]
3405 ;; Unary arithmetic insns
3407 (define_expand "negdi2"
3409 [(set (match_operand:DI 0 "s_register_operand" "")
3410 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3411 (clobber (reg:CC CC_REGNUM))])]
3416 if (GET_CODE (operands[1]) != REG)
3417 operands[1] = force_reg (SImode, operands[1]);
3422 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3423 ;; The second alternative is to allow the common case of a *full* overlap.
3424 (define_insn "*arm_negdi2"
3425 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3426 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3427 (clobber (reg:CC CC_REGNUM))]
3429 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3430 [(set_attr "conds" "clob")
3431 (set_attr "length" "8")]
3434 (define_insn "*thumb1_negdi2"
3435 [(set (match_operand:DI 0 "register_operand" "=&l")
3436 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3437 (clobber (reg:CC CC_REGNUM))]
3439 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3440 [(set_attr "length" "6")]
3443 (define_expand "negsi2"
3444 [(set (match_operand:SI 0 "s_register_operand" "")
3445 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3450 (define_insn "*arm_negsi2"
3451 [(set (match_operand:SI 0 "s_register_operand" "=r")
3452 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3454 "rsb%?\\t%0, %1, #0"
3455 [(set_attr "predicable" "yes")]
3458 (define_insn "*thumb1_negsi2"
3459 [(set (match_operand:SI 0 "register_operand" "=l")
3460 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3463 [(set_attr "length" "2")]
3466 (define_expand "negsf2"
3467 [(set (match_operand:SF 0 "s_register_operand" "")
3468 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3469 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3473 (define_expand "negdf2"
3474 [(set (match_operand:DF 0 "s_register_operand" "")
3475 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3476 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3479 ;; abssi2 doesn't really clobber the condition codes if a different register
3480 ;; is being set. To keep things simple, assume during rtl manipulations that
3481 ;; it does, but tell the final scan operator the truth. Similarly for
3484 (define_expand "abssi2"
3486 [(set (match_operand:SI 0 "s_register_operand" "")
3487 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3488 (clobber (match_dup 2))])]
3492 operands[2] = gen_rtx_SCRATCH (SImode);
3494 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3497 (define_insn "*arm_abssi2"
3498 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3499 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3500 (clobber (reg:CC CC_REGNUM))]
3503 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3504 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3505 [(set_attr "conds" "clob,*")
3506 (set_attr "shift" "1")
3507 ;; predicable can't be set based on the variant, so left as no
3508 (set_attr "length" "8")]
3511 (define_insn_and_split "*thumb1_abssi2"
3512 [(set (match_operand:SI 0 "s_register_operand" "=l")
3513 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3514 (clobber (match_scratch:SI 2 "=&l"))]
3517 "TARGET_THUMB1 && reload_completed"
3518 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3519 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3520 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3522 [(set_attr "length" "6")]
3525 (define_insn "*arm_neg_abssi2"
3526 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3527 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3528 (clobber (reg:CC CC_REGNUM))]
3531 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3532 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3533 [(set_attr "conds" "clob,*")
3534 (set_attr "shift" "1")
3535 ;; predicable can't be set based on the variant, so left as no
3536 (set_attr "length" "8")]
3539 (define_insn_and_split "*thumb1_neg_abssi2"
3540 [(set (match_operand:SI 0 "s_register_operand" "=l")
3541 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3542 (clobber (match_scratch:SI 2 "=&l"))]
3545 "TARGET_THUMB1 && reload_completed"
3546 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3547 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3548 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3550 [(set_attr "length" "6")]
3553 (define_expand "abssf2"
3554 [(set (match_operand:SF 0 "s_register_operand" "")
3555 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3556 "TARGET_32BIT && TARGET_HARD_FLOAT"
3559 (define_expand "absdf2"
3560 [(set (match_operand:DF 0 "s_register_operand" "")
3561 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3562 "TARGET_32BIT && TARGET_HARD_FLOAT"
3565 (define_expand "sqrtsf2"
3566 [(set (match_operand:SF 0 "s_register_operand" "")
3567 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3568 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3571 (define_expand "sqrtdf2"
3572 [(set (match_operand:DF 0 "s_register_operand" "")
3573 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3574 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3577 (define_insn_and_split "one_cmpldi2"
3578 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3579 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3582 "TARGET_32BIT && reload_completed"
3583 [(set (match_dup 0) (not:SI (match_dup 1)))
3584 (set (match_dup 2) (not:SI (match_dup 3)))]
3587 operands[2] = gen_highpart (SImode, operands[0]);
3588 operands[0] = gen_lowpart (SImode, operands[0]);
3589 operands[3] = gen_highpart (SImode, operands[1]);
3590 operands[1] = gen_lowpart (SImode, operands[1]);
3592 [(set_attr "length" "8")
3593 (set_attr "predicable" "yes")]
3596 (define_expand "one_cmplsi2"
3597 [(set (match_operand:SI 0 "s_register_operand" "")
3598 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3603 (define_insn "*arm_one_cmplsi2"
3604 [(set (match_operand:SI 0 "s_register_operand" "=r")
3605 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3608 [(set_attr "predicable" "yes")]
3611 (define_insn "*thumb1_one_cmplsi2"
3612 [(set (match_operand:SI 0 "register_operand" "=l")
3613 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3616 [(set_attr "length" "2")]
3619 (define_insn "*notsi_compare0"
3620 [(set (reg:CC_NOOV CC_REGNUM)
3621 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3623 (set (match_operand:SI 0 "s_register_operand" "=r")
3624 (not:SI (match_dup 1)))]
3627 [(set_attr "conds" "set")]
3630 (define_insn "*notsi_compare0_scratch"
3631 [(set (reg:CC_NOOV CC_REGNUM)
3632 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3634 (clobber (match_scratch:SI 0 "=r"))]
3637 [(set_attr "conds" "set")]
3640 ;; Fixed <--> Floating conversion insns
3642 (define_expand "floatsisf2"
3643 [(set (match_operand:SF 0 "s_register_operand" "")
3644 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3645 "TARGET_32BIT && TARGET_HARD_FLOAT"
3647 if (TARGET_MAVERICK)
3649 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3654 (define_expand "floatsidf2"
3655 [(set (match_operand:DF 0 "s_register_operand" "")
3656 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3657 "TARGET_32BIT && TARGET_HARD_FLOAT"
3659 if (TARGET_MAVERICK)
3661 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3666 (define_expand "fix_truncsfsi2"
3667 [(set (match_operand:SI 0 "s_register_operand" "")
3668 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3669 "TARGET_32BIT && TARGET_HARD_FLOAT"
3671 if (TARGET_MAVERICK)
3673 if (!cirrus_fp_register (operands[0], SImode))
3674 operands[0] = force_reg (SImode, operands[0]);
3675 if (!cirrus_fp_register (operands[1], SFmode))
3676 operands[1] = force_reg (SFmode, operands[0]);
3677 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3682 (define_expand "fix_truncdfsi2"
3683 [(set (match_operand:SI 0 "s_register_operand" "")
3684 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3685 "TARGET_32BIT && TARGET_HARD_FLOAT"
3687 if (TARGET_MAVERICK)
3689 if (!cirrus_fp_register (operands[1], DFmode))
3690 operands[1] = force_reg (DFmode, operands[0]);
3691 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3698 (define_expand "truncdfsf2"
3699 [(set (match_operand:SF 0 "s_register_operand" "")
3701 (match_operand:DF 1 "s_register_operand" "")))]
3702 "TARGET_32BIT && TARGET_HARD_FLOAT"
3706 ;; Zero and sign extension instructions.
3708 (define_expand "zero_extendsidi2"
3709 [(set (match_operand:DI 0 "s_register_operand" "")
3710 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3715 (define_insn "*arm_zero_extendsidi2"
3716 [(set (match_operand:DI 0 "s_register_operand" "=r")
3717 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3720 if (REGNO (operands[1])
3721 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3722 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3723 return \"mov%?\\t%R0, #0\";
3725 [(set_attr "length" "8")
3726 (set_attr "predicable" "yes")]
3729 (define_expand "zero_extendqidi2"
3730 [(set (match_operand:DI 0 "s_register_operand" "")
3731 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3736 (define_insn "*arm_zero_extendqidi2"
3737 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3738 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3741 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3742 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3743 [(set_attr "length" "8")
3744 (set_attr "predicable" "yes")
3745 (set_attr "type" "*,load_byte")
3746 (set_attr "pool_range" "*,4092")
3747 (set_attr "neg_pool_range" "*,4084")]
3750 (define_expand "extendsidi2"
3751 [(set (match_operand:DI 0 "s_register_operand" "")
3752 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3757 (define_insn "*arm_extendsidi2"
3758 [(set (match_operand:DI 0 "s_register_operand" "=r")
3759 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3762 if (REGNO (operands[1])
3763 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3764 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3765 return \"mov%?\\t%R0, %Q0, asr #31\";
3767 [(set_attr "length" "8")
3768 (set_attr "shift" "1")
3769 (set_attr "predicable" "yes")]
3772 (define_expand "zero_extendhisi2"
3774 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3776 (set (match_operand:SI 0 "s_register_operand" "")
3777 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3781 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3783 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3784 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3788 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3790 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3794 if (!s_register_operand (operands[1], HImode))
3795 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3799 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3800 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3804 operands[1] = gen_lowpart (SImode, operands[1]);
3805 operands[2] = gen_reg_rtx (SImode);
3809 (define_insn "*thumb1_zero_extendhisi2"
3810 [(set (match_operand:SI 0 "register_operand" "=l")
3811 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3812 "TARGET_THUMB1 && !arm_arch6"
3814 rtx mem = XEXP (operands[1], 0);
3816 if (GET_CODE (mem) == CONST)
3817 mem = XEXP (mem, 0);
3819 if (GET_CODE (mem) == LABEL_REF)
3820 return \"ldr\\t%0, %1\";
3822 if (GET_CODE (mem) == PLUS)
3824 rtx a = XEXP (mem, 0);
3825 rtx b = XEXP (mem, 1);
3827 /* This can happen due to bugs in reload. */
3828 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3831 ops[0] = operands[0];
3834 output_asm_insn (\"mov %0, %1\", ops);
3836 XEXP (mem, 0) = operands[0];
3839 else if ( GET_CODE (a) == LABEL_REF
3840 && GET_CODE (b) == CONST_INT)
3841 return \"ldr\\t%0, %1\";
3844 return \"ldrh\\t%0, %1\";
3846 [(set_attr "length" "4")
3847 (set_attr "type" "load_byte")
3848 (set_attr "pool_range" "60")]
3851 (define_insn "*thumb1_zero_extendhisi2_v6"
3852 [(set (match_operand:SI 0 "register_operand" "=l,l")
3853 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3854 "TARGET_THUMB1 && arm_arch6"
3858 if (which_alternative == 0)
3859 return \"uxth\\t%0, %1\";
3861 mem = XEXP (operands[1], 0);
3863 if (GET_CODE (mem) == CONST)
3864 mem = XEXP (mem, 0);
3866 if (GET_CODE (mem) == LABEL_REF)
3867 return \"ldr\\t%0, %1\";
3869 if (GET_CODE (mem) == PLUS)
3871 rtx a = XEXP (mem, 0);
3872 rtx b = XEXP (mem, 1);
3874 /* This can happen due to bugs in reload. */
3875 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3878 ops[0] = operands[0];
3881 output_asm_insn (\"mov %0, %1\", ops);
3883 XEXP (mem, 0) = operands[0];
3886 else if ( GET_CODE (a) == LABEL_REF
3887 && GET_CODE (b) == CONST_INT)
3888 return \"ldr\\t%0, %1\";
3891 return \"ldrh\\t%0, %1\";
3893 [(set_attr "length" "2,4")
3894 (set_attr "type" "alu_shift,load_byte")
3895 (set_attr "pool_range" "*,60")]
3898 (define_insn "*arm_zero_extendhisi2"
3899 [(set (match_operand:SI 0 "s_register_operand" "=r")
3900 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3901 "TARGET_ARM && arm_arch4 && !arm_arch6"
3903 [(set_attr "type" "load_byte")
3904 (set_attr "predicable" "yes")
3905 (set_attr "pool_range" "256")
3906 (set_attr "neg_pool_range" "244")]
3909 (define_insn "*arm_zero_extendhisi2_v6"
3910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3911 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3912 "TARGET_ARM && arm_arch6"
3916 [(set_attr "type" "alu_shift,load_byte")
3917 (set_attr "predicable" "yes")
3918 (set_attr "pool_range" "*,256")
3919 (set_attr "neg_pool_range" "*,244")]
3922 (define_insn "*arm_zero_extendhisi2addsi"
3923 [(set (match_operand:SI 0 "s_register_operand" "=r")
3924 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3925 (match_operand:SI 2 "s_register_operand" "r")))]
3927 "uxtah%?\\t%0, %2, %1"
3928 [(set_attr "type" "alu_shift")
3929 (set_attr "predicable" "yes")]
3932 (define_expand "zero_extendqisi2"
3933 [(set (match_operand:SI 0 "s_register_operand" "")
3934 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3937 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3941 emit_insn (gen_andsi3 (operands[0],
3942 gen_lowpart (SImode, operands[1]),
3945 else /* TARGET_THUMB */
3947 rtx temp = gen_reg_rtx (SImode);
3950 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3951 operands[1] = gen_lowpart (SImode, operands[1]);
3954 ops[1] = operands[1];
3955 ops[2] = GEN_INT (24);
3957 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3958 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3960 ops[0] = operands[0];
3962 ops[2] = GEN_INT (24);
3964 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3965 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3972 (define_insn "*thumb1_zero_extendqisi2"
3973 [(set (match_operand:SI 0 "register_operand" "=l")
3974 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3975 "TARGET_THUMB1 && !arm_arch6"
3977 [(set_attr "length" "2")
3978 (set_attr "type" "load_byte")
3979 (set_attr "pool_range" "32")]
3982 (define_insn "*thumb1_zero_extendqisi2_v6"
3983 [(set (match_operand:SI 0 "register_operand" "=l,l")
3984 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3985 "TARGET_THUMB1 && arm_arch6"
3989 [(set_attr "length" "2,2")
3990 (set_attr "type" "alu_shift,load_byte")
3991 (set_attr "pool_range" "*,32")]
3994 (define_insn "*arm_zero_extendqisi2"
3995 [(set (match_operand:SI 0 "s_register_operand" "=r")
3996 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3997 "TARGET_ARM && !arm_arch6"
3998 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3999 [(set_attr "type" "load_byte")
4000 (set_attr "predicable" "yes")
4001 (set_attr "pool_range" "4096")
4002 (set_attr "neg_pool_range" "4084")]
4005 (define_insn "*arm_zero_extendqisi2_v6"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4007 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4008 "TARGET_ARM && arm_arch6"
4011 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4012 [(set_attr "type" "alu_shift,load_byte")
4013 (set_attr "predicable" "yes")
4014 (set_attr "pool_range" "*,4096")
4015 (set_attr "neg_pool_range" "*,4084")]
4018 (define_insn "*arm_zero_extendqisi2addsi"
4019 [(set (match_operand:SI 0 "s_register_operand" "=r")
4020 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4021 (match_operand:SI 2 "s_register_operand" "r")))]
4023 "uxtab%?\\t%0, %2, %1"
4024 [(set_attr "predicable" "yes")
4025 (set_attr "insn" "xtab")
4026 (set_attr "type" "alu_shift")]
4030 [(set (match_operand:SI 0 "s_register_operand" "")
4031 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4032 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4033 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4034 [(set (match_dup 2) (match_dup 1))
4035 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4040 [(set (match_operand:SI 0 "s_register_operand" "")
4041 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4042 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4043 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4044 [(set (match_dup 2) (match_dup 1))
4045 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4049 (define_insn "*compareqi_eq0"
4050 [(set (reg:CC_Z CC_REGNUM)
4051 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4055 [(set_attr "conds" "set")]
4058 (define_expand "extendhisi2"
4060 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4062 (set (match_operand:SI 0 "s_register_operand" "")
4063 (ashiftrt:SI (match_dup 2)
4068 if (GET_CODE (operands[1]) == MEM)
4072 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4077 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4078 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4083 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4085 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4089 if (!s_register_operand (operands[1], HImode))
4090 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4095 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4097 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4098 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4103 operands[1] = gen_lowpart (SImode, operands[1]);
4104 operands[2] = gen_reg_rtx (SImode);
4108 (define_insn "thumb1_extendhisi2"
4109 [(set (match_operand:SI 0 "register_operand" "=l")
4110 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4111 (clobber (match_scratch:SI 2 "=&l"))]
4112 "TARGET_THUMB1 && !arm_arch6"
4116 rtx mem = XEXP (operands[1], 0);
4118 /* This code used to try to use 'V', and fix the address only if it was
4119 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4120 range of QImode offsets, and offsettable_address_p does a QImode
4123 if (GET_CODE (mem) == CONST)
4124 mem = XEXP (mem, 0);
4126 if (GET_CODE (mem) == LABEL_REF)
4127 return \"ldr\\t%0, %1\";
4129 if (GET_CODE (mem) == PLUS)
4131 rtx a = XEXP (mem, 0);
4132 rtx b = XEXP (mem, 1);
4134 if (GET_CODE (a) == LABEL_REF
4135 && GET_CODE (b) == CONST_INT)
4136 return \"ldr\\t%0, %1\";
4138 if (GET_CODE (b) == REG)
4139 return \"ldrsh\\t%0, %1\";
4147 ops[2] = const0_rtx;
4150 gcc_assert (GET_CODE (ops[1]) == REG);
4152 ops[0] = operands[0];
4153 ops[3] = operands[2];
4154 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4157 [(set_attr "length" "4")
4158 (set_attr "type" "load_byte")
4159 (set_attr "pool_range" "1020")]
4162 ;; We used to have an early-clobber on the scratch register here.
4163 ;; However, there's a bug somewhere in reload which means that this
4164 ;; can be partially ignored during spill allocation if the memory
4165 ;; address also needs reloading; this causes us to die later on when
4166 ;; we try to verify the operands. Fortunately, we don't really need
4167 ;; the early-clobber: we can always use operand 0 if operand 2
4168 ;; overlaps the address.
4169 (define_insn "*thumb1_extendhisi2_insn_v6"
4170 [(set (match_operand:SI 0 "register_operand" "=l,l")
4171 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4172 (clobber (match_scratch:SI 2 "=X,l"))]
4173 "TARGET_THUMB1 && arm_arch6"
4179 if (which_alternative == 0)
4180 return \"sxth\\t%0, %1\";
4182 mem = XEXP (operands[1], 0);
4184 /* This code used to try to use 'V', and fix the address only if it was
4185 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4186 range of QImode offsets, and offsettable_address_p does a QImode
4189 if (GET_CODE (mem) == CONST)
4190 mem = XEXP (mem, 0);
4192 if (GET_CODE (mem) == LABEL_REF)
4193 return \"ldr\\t%0, %1\";
4195 if (GET_CODE (mem) == PLUS)
4197 rtx a = XEXP (mem, 0);
4198 rtx b = XEXP (mem, 1);
4200 if (GET_CODE (a) == LABEL_REF
4201 && GET_CODE (b) == CONST_INT)
4202 return \"ldr\\t%0, %1\";
4204 if (GET_CODE (b) == REG)
4205 return \"ldrsh\\t%0, %1\";
4213 ops[2] = const0_rtx;
4216 gcc_assert (GET_CODE (ops[1]) == REG);
4218 ops[0] = operands[0];
4219 if (reg_mentioned_p (operands[2], ops[1]))
4222 ops[3] = operands[2];
4223 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4226 [(set_attr "length" "2,4")
4227 (set_attr "type" "alu_shift,load_byte")
4228 (set_attr "pool_range" "*,1020")]
4231 ;; This pattern will only be used when ldsh is not available
4232 (define_expand "extendhisi2_mem"
4233 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4235 (zero_extend:SI (match_dup 7)))
4236 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4237 (set (match_operand:SI 0 "" "")
4238 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4243 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4245 mem1 = change_address (operands[1], QImode, addr);
4246 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4247 operands[0] = gen_lowpart (SImode, operands[0]);
4249 operands[2] = gen_reg_rtx (SImode);
4250 operands[3] = gen_reg_rtx (SImode);
4251 operands[6] = gen_reg_rtx (SImode);
4254 if (BYTES_BIG_ENDIAN)
4256 operands[4] = operands[2];
4257 operands[5] = operands[3];
4261 operands[4] = operands[3];
4262 operands[5] = operands[2];
4267 (define_insn "*arm_extendhisi2"
4268 [(set (match_operand:SI 0 "s_register_operand" "=r")
4269 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4270 "TARGET_ARM && arm_arch4 && !arm_arch6"
4271 "ldr%(sh%)\\t%0, %1"
4272 [(set_attr "type" "load_byte")
4273 (set_attr "predicable" "yes")
4274 (set_attr "pool_range" "256")
4275 (set_attr "neg_pool_range" "244")]
4278 ;; ??? Check Thumb-2 pool range
4279 (define_insn "*arm_extendhisi2_v6"
4280 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4281 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4282 "TARGET_32BIT && arm_arch6"
4286 [(set_attr "type" "alu_shift,load_byte")
4287 (set_attr "predicable" "yes")
4288 (set_attr "pool_range" "*,256")
4289 (set_attr "neg_pool_range" "*,244")]
4292 (define_insn "*arm_extendhisi2addsi"
4293 [(set (match_operand:SI 0 "s_register_operand" "=r")
4294 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4295 (match_operand:SI 2 "s_register_operand" "r")))]
4297 "sxtah%?\\t%0, %2, %1"
4300 (define_expand "extendqihi2"
4302 (ashift:SI (match_operand:QI 1 "general_operand" "")
4304 (set (match_operand:HI 0 "s_register_operand" "")
4305 (ashiftrt:SI (match_dup 2)
4310 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4312 emit_insn (gen_rtx_SET (VOIDmode,
4314 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4317 if (!s_register_operand (operands[1], QImode))
4318 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4319 operands[0] = gen_lowpart (SImode, operands[0]);
4320 operands[1] = gen_lowpart (SImode, operands[1]);
4321 operands[2] = gen_reg_rtx (SImode);
4325 (define_insn "*arm_extendqihi_insn"
4326 [(set (match_operand:HI 0 "s_register_operand" "=r")
4327 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4328 "TARGET_ARM && arm_arch4"
4329 "ldr%(sb%)\\t%0, %1"
4330 [(set_attr "type" "load_byte")
4331 (set_attr "predicable" "yes")
4332 (set_attr "pool_range" "256")
4333 (set_attr "neg_pool_range" "244")]
4336 (define_expand "extendqisi2"
4338 (ashift:SI (match_operand:QI 1 "general_operand" "")
4340 (set (match_operand:SI 0 "s_register_operand" "")
4341 (ashiftrt:SI (match_dup 2)
4346 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4348 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4349 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4353 if (!s_register_operand (operands[1], QImode))
4354 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4358 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4359 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4363 operands[1] = gen_lowpart (SImode, operands[1]);
4364 operands[2] = gen_reg_rtx (SImode);
4368 (define_insn "*arm_extendqisi"
4369 [(set (match_operand:SI 0 "s_register_operand" "=r")
4370 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4371 "TARGET_ARM && arm_arch4 && !arm_arch6"
4372 "ldr%(sb%)\\t%0, %1"
4373 [(set_attr "type" "load_byte")
4374 (set_attr "predicable" "yes")
4375 (set_attr "pool_range" "256")
4376 (set_attr "neg_pool_range" "244")]
4379 (define_insn "*arm_extendqisi_v6"
4380 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4381 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4382 "TARGET_ARM && arm_arch6"
4386 [(set_attr "type" "alu_shift,load_byte")
4387 (set_attr "predicable" "yes")
4388 (set_attr "pool_range" "*,256")
4389 (set_attr "neg_pool_range" "*,244")]
4392 (define_insn "*arm_extendqisi2addsi"
4393 [(set (match_operand:SI 0 "s_register_operand" "=r")
4394 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4395 (match_operand:SI 2 "s_register_operand" "r")))]
4397 "sxtab%?\\t%0, %2, %1"
4398 [(set_attr "type" "alu_shift")
4399 (set_attr "insn" "xtab")
4400 (set_attr "predicable" "yes")]
4403 (define_insn "*thumb1_extendqisi2"
4404 [(set (match_operand:SI 0 "register_operand" "=l,l")
4405 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4406 "TARGET_THUMB1 && !arm_arch6"
4410 rtx mem = XEXP (operands[1], 0);
4412 if (GET_CODE (mem) == CONST)
4413 mem = XEXP (mem, 0);
4415 if (GET_CODE (mem) == LABEL_REF)
4416 return \"ldr\\t%0, %1\";
4418 if (GET_CODE (mem) == PLUS
4419 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4420 return \"ldr\\t%0, %1\";
4422 if (which_alternative == 0)
4423 return \"ldrsb\\t%0, %1\";
4425 ops[0] = operands[0];
4427 if (GET_CODE (mem) == PLUS)
4429 rtx a = XEXP (mem, 0);
4430 rtx b = XEXP (mem, 1);
4435 if (GET_CODE (a) == REG)
4437 if (GET_CODE (b) == REG)
4438 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4439 else if (REGNO (a) == REGNO (ops[0]))
4441 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4442 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4443 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4446 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4450 gcc_assert (GET_CODE (b) == REG);
4451 if (REGNO (b) == REGNO (ops[0]))
4453 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4454 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4455 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4458 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4461 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4463 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4464 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4465 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4470 ops[2] = const0_rtx;
4472 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4476 [(set_attr "length" "2,6")
4477 (set_attr "type" "load_byte,load_byte")
4478 (set_attr "pool_range" "32,32")]
4481 (define_insn "*thumb1_extendqisi2_v6"
4482 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4483 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4484 "TARGET_THUMB1 && arm_arch6"
4490 if (which_alternative == 0)
4491 return \"sxtb\\t%0, %1\";
4493 mem = XEXP (operands[1], 0);
4495 if (GET_CODE (mem) == CONST)
4496 mem = XEXP (mem, 0);
4498 if (GET_CODE (mem) == LABEL_REF)
4499 return \"ldr\\t%0, %1\";
4501 if (GET_CODE (mem) == PLUS
4502 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4503 return \"ldr\\t%0, %1\";
4505 if (which_alternative == 0)
4506 return \"ldrsb\\t%0, %1\";
4508 ops[0] = operands[0];
4510 if (GET_CODE (mem) == PLUS)
4512 rtx a = XEXP (mem, 0);
4513 rtx b = XEXP (mem, 1);
4518 if (GET_CODE (a) == REG)
4520 if (GET_CODE (b) == REG)
4521 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4522 else if (REGNO (a) == REGNO (ops[0]))
4524 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4525 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4528 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4532 gcc_assert (GET_CODE (b) == REG);
4533 if (REGNO (b) == REGNO (ops[0]))
4535 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4536 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4539 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4542 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4544 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4545 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4550 ops[2] = const0_rtx;
4552 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4556 [(set_attr "length" "2,2,4")
4557 (set_attr "type" "alu_shift,load_byte,load_byte")
4558 (set_attr "pool_range" "*,32,32")]
4561 (define_expand "extendsfdf2"
4562 [(set (match_operand:DF 0 "s_register_operand" "")
4563 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4564 "TARGET_32BIT && TARGET_HARD_FLOAT"
4568 ;; Move insns (including loads and stores)
4570 ;; XXX Just some ideas about movti.
4571 ;; I don't think these are a good idea on the arm, there just aren't enough
4573 ;;(define_expand "loadti"
4574 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4575 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4578 ;;(define_expand "storeti"
4579 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4580 ;; (match_operand:TI 1 "s_register_operand" ""))]
4583 ;;(define_expand "movti"
4584 ;; [(set (match_operand:TI 0 "general_operand" "")
4585 ;; (match_operand:TI 1 "general_operand" ""))]
4591 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4592 ;; operands[1] = copy_to_reg (operands[1]);
4593 ;; if (GET_CODE (operands[0]) == MEM)
4594 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4595 ;; else if (GET_CODE (operands[1]) == MEM)
4596 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4600 ;; emit_insn (insn);
4604 ;; Recognize garbage generated above.
4607 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4608 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4612 ;; register mem = (which_alternative < 3);
4613 ;; register const char *template;
4615 ;; operands[mem] = XEXP (operands[mem], 0);
4616 ;; switch (which_alternative)
4618 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4619 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4620 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4621 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4622 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4623 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4625 ;; output_asm_insn (template, operands);
4629 (define_expand "movdi"
4630 [(set (match_operand:DI 0 "general_operand" "")
4631 (match_operand:DI 1 "general_operand" ""))]
4634 if (can_create_pseudo_p ())
4636 if (GET_CODE (operands[0]) != REG)
4637 operands[1] = force_reg (DImode, operands[1]);
4642 (define_insn "*arm_movdi"
4643 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4644 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4646 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4648 && ( register_operand (operands[0], DImode)
4649 || register_operand (operands[1], DImode))"
4651 switch (which_alternative)
4658 return output_move_double (operands);
4661 [(set_attr "length" "8,12,16,8,8")
4662 (set_attr "type" "*,*,*,load2,store2")
4663 (set_attr "pool_range" "*,*,*,1020,*")
4664 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4668 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4669 (match_operand:ANY64 1 "const_double_operand" ""))]
4672 && (arm_const_double_inline_cost (operands[1])
4673 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4676 arm_split_constant (SET, SImode, curr_insn,
4677 INTVAL (gen_lowpart (SImode, operands[1])),
4678 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4679 arm_split_constant (SET, SImode, curr_insn,
4680 INTVAL (gen_highpart_mode (SImode,
4681 GET_MODE (operands[0]),
4683 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4688 ; If optimizing for size, or if we have load delay slots, then
4689 ; we want to split the constant into two separate operations.
4690 ; In both cases this may split a trivial part into a single data op
4691 ; leaving a single complex constant to load. We can also get longer
4692 ; offsets in a LDR which means we get better chances of sharing the pool
4693 ; entries. Finally, we can normally do a better job of scheduling
4694 ; LDR instructions than we can with LDM.
4695 ; This pattern will only match if the one above did not.
4697 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4698 (match_operand:ANY64 1 "const_double_operand" ""))]
4699 "TARGET_ARM && reload_completed
4700 && arm_const_double_by_parts (operands[1])"
4701 [(set (match_dup 0) (match_dup 1))
4702 (set (match_dup 2) (match_dup 3))]
4704 operands[2] = gen_highpart (SImode, operands[0]);
4705 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4707 operands[0] = gen_lowpart (SImode, operands[0]);
4708 operands[1] = gen_lowpart (SImode, operands[1]);
4713 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4714 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4715 "TARGET_EITHER && reload_completed"
4716 [(set (match_dup 0) (match_dup 1))
4717 (set (match_dup 2) (match_dup 3))]
4719 operands[2] = gen_highpart (SImode, operands[0]);
4720 operands[3] = gen_highpart (SImode, operands[1]);
4721 operands[0] = gen_lowpart (SImode, operands[0]);
4722 operands[1] = gen_lowpart (SImode, operands[1]);
4724 /* Handle a partial overlap. */
4725 if (rtx_equal_p (operands[0], operands[3]))
4727 rtx tmp0 = operands[0];
4728 rtx tmp1 = operands[1];
4730 operands[0] = operands[2];
4731 operands[1] = operands[3];
4738 ;; We can't actually do base+index doubleword loads if the index and
4739 ;; destination overlap. Split here so that we at least have chance to
4742 [(set (match_operand:DI 0 "s_register_operand" "")
4743 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4744 (match_operand:SI 2 "s_register_operand" ""))))]
4746 && reg_overlap_mentioned_p (operands[0], operands[1])
4747 && reg_overlap_mentioned_p (operands[0], operands[2])"
4749 (plus:SI (match_dup 1)
4752 (mem:DI (match_dup 4)))]
4754 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4758 ;;; ??? This should have alternatives for constants.
4759 ;;; ??? This was originally identical to the movdf_insn pattern.
4760 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4761 ;;; thumb_reorg with a memory reference.
4762 (define_insn "*thumb1_movdi_insn"
4763 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4764 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4766 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4767 && ( register_operand (operands[0], DImode)
4768 || register_operand (operands[1], DImode))"
4771 switch (which_alternative)
4775 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4776 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4777 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4779 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4781 operands[1] = GEN_INT (- INTVAL (operands[1]));
4782 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4784 return \"ldmia\\t%1, {%0, %H0}\";
4786 return \"stmia\\t%0, {%1, %H1}\";
4788 return thumb_load_double_from_address (operands);
4790 operands[2] = gen_rtx_MEM (SImode,
4791 plus_constant (XEXP (operands[0], 0), 4));
4792 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4795 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4796 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4797 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4800 [(set_attr "length" "4,4,6,2,2,6,4,4")
4801 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4802 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4805 (define_expand "movsi"
4806 [(set (match_operand:SI 0 "general_operand" "")
4807 (match_operand:SI 1 "general_operand" ""))]
4811 rtx base, offset, tmp;
4815 /* Everything except mem = const or mem = mem can be done easily. */
4816 if (GET_CODE (operands[0]) == MEM)
4817 operands[1] = force_reg (SImode, operands[1]);
4818 if (arm_general_register_operand (operands[0], SImode)
4819 && GET_CODE (operands[1]) == CONST_INT
4820 && !(const_ok_for_arm (INTVAL (operands[1]))
4821 || const_ok_for_arm (~INTVAL (operands[1]))))
4823 arm_split_constant (SET, SImode, NULL_RTX,
4824 INTVAL (operands[1]), operands[0], NULL_RTX,
4825 optimize && can_create_pseudo_p ());
4829 if (TARGET_USE_MOVT && !target_word_relocations
4830 && GET_CODE (operands[1]) == SYMBOL_REF
4831 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4833 arm_emit_movpair (operands[0], operands[1]);
4837 else /* TARGET_THUMB1... */
4839 if (can_create_pseudo_p ())
4841 if (GET_CODE (operands[0]) != REG)
4842 operands[1] = force_reg (SImode, operands[1]);
4846 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4848 split_const (operands[1], &base, &offset);
4849 if (GET_CODE (base) == SYMBOL_REF
4850 && !offset_within_block_p (base, INTVAL (offset)))
4852 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4853 emit_move_insn (tmp, base);
4854 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4859 /* Recognize the case where operand[1] is a reference to thread-local
4860 data and load its address to a register. */
4861 if (arm_tls_referenced_p (operands[1]))
4863 rtx tmp = operands[1];
4866 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4868 addend = XEXP (XEXP (tmp, 0), 1);
4869 tmp = XEXP (XEXP (tmp, 0), 0);
4872 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4873 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4875 tmp = legitimize_tls_address (tmp,
4876 !can_create_pseudo_p () ? operands[0] : 0);
4879 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4880 tmp = force_operand (tmp, operands[0]);
4885 && (CONSTANT_P (operands[1])
4886 || symbol_mentioned_p (operands[1])
4887 || label_mentioned_p (operands[1])))
4888 operands[1] = legitimize_pic_address (operands[1], SImode,
4889 (!can_create_pseudo_p ()
4896 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
4897 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
4898 ;; so this does not matter.
4899 (define_insn "*arm_movt"
4900 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4901 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
4902 (match_operand:SI 2 "general_operand" "i")))]
4904 "movt%?\t%0, #:upper16:%c2"
4905 [(set_attr "predicable" "yes")
4906 (set_attr "length" "4")]
4909 (define_insn "*arm_movw"
4910 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4911 (high:SI (match_operand:SI 1 "general_operand" "i")))]
4913 "movw%?\t%0, #:lower16:%c1"
4914 [(set_attr "predicable" "yes")
4915 (set_attr "length" "4")]
4918 (define_insn "*arm_movsi_insn"
4919 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4920 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4921 "TARGET_ARM && ! TARGET_IWMMXT
4922 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4923 && ( register_operand (operands[0], SImode)
4924 || register_operand (operands[1], SImode))"
4932 [(set_attr "type" "*,*,*,*,load1,store1")
4933 (set_attr "predicable" "yes")
4934 (set_attr "pool_range" "*,*,*,*,4096,*")
4935 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
4939 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4940 (match_operand:SI 1 "const_int_operand" ""))]
4942 && (!(const_ok_for_arm (INTVAL (operands[1]))
4943 || const_ok_for_arm (~INTVAL (operands[1]))))"
4944 [(clobber (const_int 0))]
4946 arm_split_constant (SET, SImode, NULL_RTX,
4947 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4952 (define_insn "*thumb1_movsi_insn"
4953 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
4954 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
4956 && ( register_operand (operands[0], SImode)
4957 || register_operand (operands[1], SImode))"
4968 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4969 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4970 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4974 [(set (match_operand:SI 0 "register_operand" "")
4975 (match_operand:SI 1 "const_int_operand" ""))]
4976 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4977 [(set (match_dup 0) (match_dup 1))
4978 (set (match_dup 0) (neg:SI (match_dup 0)))]
4979 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4983 [(set (match_operand:SI 0 "register_operand" "")
4984 (match_operand:SI 1 "const_int_operand" ""))]
4985 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4986 [(set (match_dup 0) (match_dup 1))
4987 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4990 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4991 unsigned HOST_WIDE_INT mask = 0xff;
4994 for (i = 0; i < 25; i++)
4995 if ((val & (mask << i)) == val)
4998 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5002 operands[1] = GEN_INT (val >> i);
5003 operands[2] = GEN_INT (i);
5007 ;; When generating pic, we need to load the symbol offset into a register.
5008 ;; So that the optimizer does not confuse this with a normal symbol load
5009 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5010 ;; since that is the only type of relocation we can use.
5012 ;; The rather odd constraints on the following are to force reload to leave
5013 ;; the insn alone, and to force the minipool generation pass to then move
5014 ;; the GOT symbol to memory.
5016 (define_insn "pic_load_addr_arm"
5017 [(set (match_operand:SI 0 "s_register_operand" "=r")
5018 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5019 "TARGET_ARM && flag_pic"
5021 [(set_attr "type" "load1")
5022 (set (attr "pool_range") (const_int 4096))
5023 (set (attr "neg_pool_range") (const_int 4084))]
5026 (define_insn "pic_load_addr_thumb1"
5027 [(set (match_operand:SI 0 "s_register_operand" "=l")
5028 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5029 "TARGET_THUMB1 && flag_pic"
5031 [(set_attr "type" "load1")
5032 (set (attr "pool_range") (const_int 1024))]
5035 (define_insn "pic_add_dot_plus_four"
5036 [(set (match_operand:SI 0 "register_operand" "=r")
5037 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5039 (match_operand 2 "" "")]
5043 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5044 INTVAL (operands[2]));
5045 return \"add\\t%0, %|pc\";
5047 [(set_attr "length" "2")]
5050 (define_insn "pic_add_dot_plus_eight"
5051 [(set (match_operand:SI 0 "register_operand" "=r")
5052 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5054 (match_operand 2 "" "")]
5058 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5059 INTVAL (operands[2]));
5060 return \"add%?\\t%0, %|pc, %1\";
5062 [(set_attr "predicable" "yes")]
5065 (define_insn "tls_load_dot_plus_eight"
5066 [(set (match_operand:SI 0 "register_operand" "+r")
5067 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5069 (match_operand 2 "" "")]
5073 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5074 INTVAL (operands[2]));
5075 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5077 [(set_attr "predicable" "yes")]
5080 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5081 ;; followed by a load. These sequences can be crunched down to
5082 ;; tls_load_dot_plus_eight by a peephole.
5085 [(set (match_operand:SI 0 "register_operand" "")
5086 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5088 (match_operand 1 "" "")]
5090 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5091 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5093 (mem:SI (unspec:SI [(match_dup 3)
5100 (define_insn "pic_offset_arm"
5101 [(set (match_operand:SI 0 "register_operand" "=r")
5102 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5103 (unspec:SI [(match_operand:SI 2 "" "X")]
5104 UNSPEC_PIC_OFFSET))))]
5105 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5106 "ldr%?\\t%0, [%1,%2]"
5107 [(set_attr "type" "load1")]
5110 (define_expand "builtin_setjmp_receiver"
5111 [(label_ref (match_operand 0 "" ""))]
5115 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5117 if (arm_pic_register != INVALID_REGNUM)
5118 arm_load_pic_register (1UL << 3);
5122 ;; If copying one reg to another we can set the condition codes according to
5123 ;; its value. Such a move is common after a return from subroutine and the
5124 ;; result is being tested against zero.
5126 (define_insn "*movsi_compare0"
5127 [(set (reg:CC CC_REGNUM)
5128 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5130 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5136 [(set_attr "conds" "set")]
5139 ;; Subroutine to store a half word from a register into memory.
5140 ;; Operand 0 is the source register (HImode)
5141 ;; Operand 1 is the destination address in a register (SImode)
5143 ;; In both this routine and the next, we must be careful not to spill
5144 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5145 ;; can generate unrecognizable rtl.
5147 (define_expand "storehi"
5148 [;; store the low byte
5149 (set (match_operand 1 "" "") (match_dup 3))
5150 ;; extract the high byte
5152 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5153 ;; store the high byte
5154 (set (match_dup 4) (match_dup 5))]
5158 rtx op1 = operands[1];
5159 rtx addr = XEXP (op1, 0);
5160 enum rtx_code code = GET_CODE (addr);
5162 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5164 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5166 operands[4] = adjust_address (op1, QImode, 1);
5167 operands[1] = adjust_address (operands[1], QImode, 0);
5168 operands[3] = gen_lowpart (QImode, operands[0]);
5169 operands[0] = gen_lowpart (SImode, operands[0]);
5170 operands[2] = gen_reg_rtx (SImode);
5171 operands[5] = gen_lowpart (QImode, operands[2]);
5175 (define_expand "storehi_bigend"
5176 [(set (match_dup 4) (match_dup 3))
5178 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5179 (set (match_operand 1 "" "") (match_dup 5))]
5183 rtx op1 = operands[1];
5184 rtx addr = XEXP (op1, 0);
5185 enum rtx_code code = GET_CODE (addr);
5187 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5189 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5191 operands[4] = adjust_address (op1, QImode, 1);
5192 operands[1] = adjust_address (operands[1], QImode, 0);
5193 operands[3] = gen_lowpart (QImode, operands[0]);
5194 operands[0] = gen_lowpart (SImode, operands[0]);
5195 operands[2] = gen_reg_rtx (SImode);
5196 operands[5] = gen_lowpart (QImode, operands[2]);
5200 ;; Subroutine to store a half word integer constant into memory.
5201 (define_expand "storeinthi"
5202 [(set (match_operand 0 "" "")
5203 (match_operand 1 "" ""))
5204 (set (match_dup 3) (match_dup 2))]
5208 HOST_WIDE_INT value = INTVAL (operands[1]);
5209 rtx addr = XEXP (operands[0], 0);
5210 rtx op0 = operands[0];
5211 enum rtx_code code = GET_CODE (addr);
5213 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5215 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5217 operands[1] = gen_reg_rtx (SImode);
5218 if (BYTES_BIG_ENDIAN)
5220 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5221 if ((value & 255) == ((value >> 8) & 255))
5222 operands[2] = operands[1];
5225 operands[2] = gen_reg_rtx (SImode);
5226 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5231 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5232 if ((value & 255) == ((value >> 8) & 255))
5233 operands[2] = operands[1];
5236 operands[2] = gen_reg_rtx (SImode);
5237 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5241 operands[3] = adjust_address (op0, QImode, 1);
5242 operands[0] = adjust_address (operands[0], QImode, 0);
5243 operands[2] = gen_lowpart (QImode, operands[2]);
5244 operands[1] = gen_lowpart (QImode, operands[1]);
5248 (define_expand "storehi_single_op"
5249 [(set (match_operand:HI 0 "memory_operand" "")
5250 (match_operand:HI 1 "general_operand" ""))]
5251 "TARGET_32BIT && arm_arch4"
5253 if (!s_register_operand (operands[1], HImode))
5254 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5258 (define_expand "movhi"
5259 [(set (match_operand:HI 0 "general_operand" "")
5260 (match_operand:HI 1 "general_operand" ""))]
5265 if (can_create_pseudo_p ())
5267 if (GET_CODE (operands[0]) == MEM)
5271 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5274 if (GET_CODE (operands[1]) == CONST_INT)
5275 emit_insn (gen_storeinthi (operands[0], operands[1]));
5278 if (GET_CODE (operands[1]) == MEM)
5279 operands[1] = force_reg (HImode, operands[1]);
5280 if (BYTES_BIG_ENDIAN)
5281 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5283 emit_insn (gen_storehi (operands[1], operands[0]));
5287 /* Sign extend a constant, and keep it in an SImode reg. */
5288 else if (GET_CODE (operands[1]) == CONST_INT)
5290 rtx reg = gen_reg_rtx (SImode);
5291 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5293 /* If the constant is already valid, leave it alone. */
5294 if (!const_ok_for_arm (val))
5296 /* If setting all the top bits will make the constant
5297 loadable in a single instruction, then set them.
5298 Otherwise, sign extend the number. */
5300 if (const_ok_for_arm (~(val | ~0xffff)))
5302 else if (val & 0x8000)
5306 emit_insn (gen_movsi (reg, GEN_INT (val)));
5307 operands[1] = gen_lowpart (HImode, reg);
5309 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5310 && GET_CODE (operands[1]) == MEM)
5312 rtx reg = gen_reg_rtx (SImode);
5314 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5315 operands[1] = gen_lowpart (HImode, reg);
5317 else if (!arm_arch4)
5319 if (GET_CODE (operands[1]) == MEM)
5322 rtx offset = const0_rtx;
5323 rtx reg = gen_reg_rtx (SImode);
5325 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5326 || (GET_CODE (base) == PLUS
5327 && (GET_CODE (offset = XEXP (base, 1))
5329 && ((INTVAL(offset) & 1) != 1)
5330 && GET_CODE (base = XEXP (base, 0)) == REG))
5331 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5335 new_rtx = widen_memory_access (operands[1], SImode,
5336 ((INTVAL (offset) & ~3)
5337 - INTVAL (offset)));
5338 emit_insn (gen_movsi (reg, new_rtx));
5339 if (((INTVAL (offset) & 2) != 0)
5340 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5342 rtx reg2 = gen_reg_rtx (SImode);
5344 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5349 emit_insn (gen_movhi_bytes (reg, operands[1]));
5351 operands[1] = gen_lowpart (HImode, reg);
5355 /* Handle loading a large integer during reload. */
5356 else if (GET_CODE (operands[1]) == CONST_INT
5357 && !const_ok_for_arm (INTVAL (operands[1]))
5358 && !const_ok_for_arm (~INTVAL (operands[1])))
5360 /* Writing a constant to memory needs a scratch, which should
5361 be handled with SECONDARY_RELOADs. */
5362 gcc_assert (GET_CODE (operands[0]) == REG);
5364 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5365 emit_insn (gen_movsi (operands[0], operands[1]));
5369 else if (TARGET_THUMB2)
5371 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5372 if (can_create_pseudo_p ())
5374 if (GET_CODE (operands[0]) != REG)
5375 operands[1] = force_reg (HImode, operands[1]);
5376 /* Zero extend a constant, and keep it in an SImode reg. */
5377 else if (GET_CODE (operands[1]) == CONST_INT)
5379 rtx reg = gen_reg_rtx (SImode);
5380 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5382 emit_insn (gen_movsi (reg, GEN_INT (val)));
5383 operands[1] = gen_lowpart (HImode, reg);
5387 else /* TARGET_THUMB1 */
5389 if (can_create_pseudo_p ())
5391 if (GET_CODE (operands[1]) == CONST_INT)
5393 rtx reg = gen_reg_rtx (SImode);
5395 emit_insn (gen_movsi (reg, operands[1]));
5396 operands[1] = gen_lowpart (HImode, reg);
5399 /* ??? We shouldn't really get invalid addresses here, but this can
5400 happen if we are passed a SP (never OK for HImode/QImode) or
5401 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5402 HImode/QImode) relative address. */
5403 /* ??? This should perhaps be fixed elsewhere, for instance, in
5404 fixup_stack_1, by checking for other kinds of invalid addresses,
5405 e.g. a bare reference to a virtual register. This may confuse the
5406 alpha though, which must handle this case differently. */
5407 if (GET_CODE (operands[0]) == MEM
5408 && !memory_address_p (GET_MODE (operands[0]),
5409 XEXP (operands[0], 0)))
5411 = replace_equiv_address (operands[0],
5412 copy_to_reg (XEXP (operands[0], 0)));
5414 if (GET_CODE (operands[1]) == MEM
5415 && !memory_address_p (GET_MODE (operands[1]),
5416 XEXP (operands[1], 0)))
5418 = replace_equiv_address (operands[1],
5419 copy_to_reg (XEXP (operands[1], 0)));
5421 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5423 rtx reg = gen_reg_rtx (SImode);
5425 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5426 operands[1] = gen_lowpart (HImode, reg);
5429 if (GET_CODE (operands[0]) == MEM)
5430 operands[1] = force_reg (HImode, operands[1]);
5432 else if (GET_CODE (operands[1]) == CONST_INT
5433 && !satisfies_constraint_I (operands[1]))
5435 /* Handle loading a large integer during reload. */
5437 /* Writing a constant to memory needs a scratch, which should
5438 be handled with SECONDARY_RELOADs. */
5439 gcc_assert (GET_CODE (operands[0]) == REG);
5441 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5442 emit_insn (gen_movsi (operands[0], operands[1]));
5449 (define_insn "*thumb1_movhi_insn"
5450 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5451 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5453 && ( register_operand (operands[0], HImode)
5454 || register_operand (operands[1], HImode))"
5456 switch (which_alternative)
5458 case 0: return \"add %0, %1, #0\";
5459 case 2: return \"strh %1, %0\";
5460 case 3: return \"mov %0, %1\";
5461 case 4: return \"mov %0, %1\";
5462 case 5: return \"mov %0, %1\";
5463 default: gcc_unreachable ();
5465 /* The stack pointer can end up being taken as an index register.
5466 Catch this case here and deal with it. */
5467 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5468 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5469 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5472 ops[0] = operands[0];
5473 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5475 output_asm_insn (\"mov %0, %1\", ops);
5477 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5480 return \"ldrh %0, %1\";
5482 [(set_attr "length" "2,4,2,2,2,2")
5483 (set_attr "type" "*,load1,store1,*,*,*")]
5487 (define_expand "movhi_bytes"
5488 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5490 (zero_extend:SI (match_dup 6)))
5491 (set (match_operand:SI 0 "" "")
5492 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5497 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5499 mem1 = change_address (operands[1], QImode, addr);
5500 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5501 operands[0] = gen_lowpart (SImode, operands[0]);
5503 operands[2] = gen_reg_rtx (SImode);
5504 operands[3] = gen_reg_rtx (SImode);
5507 if (BYTES_BIG_ENDIAN)
5509 operands[4] = operands[2];
5510 operands[5] = operands[3];
5514 operands[4] = operands[3];
5515 operands[5] = operands[2];
5520 (define_expand "movhi_bigend"
5522 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5525 (ashiftrt:SI (match_dup 2) (const_int 16)))
5526 (set (match_operand:HI 0 "s_register_operand" "")
5530 operands[2] = gen_reg_rtx (SImode);
5531 operands[3] = gen_reg_rtx (SImode);
5532 operands[4] = gen_lowpart (HImode, operands[3]);
5536 ;; Pattern to recognize insn generated default case above
5537 (define_insn "*movhi_insn_arch4"
5538 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5539 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5542 && (GET_CODE (operands[1]) != CONST_INT
5543 || const_ok_for_arm (INTVAL (operands[1]))
5544 || const_ok_for_arm (~INTVAL (operands[1])))"
5546 mov%?\\t%0, %1\\t%@ movhi
5547 mvn%?\\t%0, #%B1\\t%@ movhi
5548 str%(h%)\\t%1, %0\\t%@ movhi
5549 ldr%(h%)\\t%0, %1\\t%@ movhi"
5550 [(set_attr "type" "*,*,store1,load1")
5551 (set_attr "predicable" "yes")
5552 (set_attr "pool_range" "*,*,*,256")
5553 (set_attr "neg_pool_range" "*,*,*,244")]
5556 (define_insn "*movhi_bytes"
5557 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5558 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5561 mov%?\\t%0, %1\\t%@ movhi
5562 mvn%?\\t%0, #%B1\\t%@ movhi"
5563 [(set_attr "predicable" "yes")]
5566 (define_expand "thumb_movhi_clobber"
5567 [(set (match_operand:HI 0 "memory_operand" "")
5568 (match_operand:HI 1 "register_operand" ""))
5569 (clobber (match_operand:DI 2 "register_operand" ""))]
5572 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5573 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5575 emit_insn (gen_movhi (operands[0], operands[1]));
5578 /* XXX Fixme, need to handle other cases here as well. */
5583 ;; We use a DImode scratch because we may occasionally need an additional
5584 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5585 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5586 (define_expand "reload_outhi"
5587 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5588 (match_operand:HI 1 "s_register_operand" "r")
5589 (match_operand:DI 2 "s_register_operand" "=&l")])]
5592 arm_reload_out_hi (operands);
5594 thumb_reload_out_hi (operands);
5599 (define_expand "reload_inhi"
5600 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5601 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5602 (match_operand:DI 2 "s_register_operand" "=&r")])]
5606 arm_reload_in_hi (operands);
5608 thumb_reload_out_hi (operands);
5612 (define_expand "movqi"
5613 [(set (match_operand:QI 0 "general_operand" "")
5614 (match_operand:QI 1 "general_operand" ""))]
5617 /* Everything except mem = const or mem = mem can be done easily */
5619 if (can_create_pseudo_p ())
5621 if (GET_CODE (operands[1]) == CONST_INT)
5623 rtx reg = gen_reg_rtx (SImode);
5625 emit_insn (gen_movsi (reg, operands[1]));
5626 operands[1] = gen_lowpart (QImode, reg);
5631 /* ??? We shouldn't really get invalid addresses here, but this can
5632 happen if we are passed a SP (never OK for HImode/QImode) or
5633 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5634 HImode/QImode) relative address. */
5635 /* ??? This should perhaps be fixed elsewhere, for instance, in
5636 fixup_stack_1, by checking for other kinds of invalid addresses,
5637 e.g. a bare reference to a virtual register. This may confuse the
5638 alpha though, which must handle this case differently. */
5639 if (GET_CODE (operands[0]) == MEM
5640 && !memory_address_p (GET_MODE (operands[0]),
5641 XEXP (operands[0], 0)))
5643 = replace_equiv_address (operands[0],
5644 copy_to_reg (XEXP (operands[0], 0)));
5645 if (GET_CODE (operands[1]) == MEM
5646 && !memory_address_p (GET_MODE (operands[1]),
5647 XEXP (operands[1], 0)))
5649 = replace_equiv_address (operands[1],
5650 copy_to_reg (XEXP (operands[1], 0)));
5653 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5655 rtx reg = gen_reg_rtx (SImode);
5657 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5658 operands[1] = gen_lowpart (QImode, reg);
5661 if (GET_CODE (operands[0]) == MEM)
5662 operands[1] = force_reg (QImode, operands[1]);
5664 else if (TARGET_THUMB
5665 && GET_CODE (operands[1]) == CONST_INT
5666 && !satisfies_constraint_I (operands[1]))
5668 /* Handle loading a large integer during reload. */
5670 /* Writing a constant to memory needs a scratch, which should
5671 be handled with SECONDARY_RELOADs. */
5672 gcc_assert (GET_CODE (operands[0]) == REG);
5674 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5675 emit_insn (gen_movsi (operands[0], operands[1]));
5682 (define_insn "*arm_movqi_insn"
5683 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5684 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5686 && ( register_operand (operands[0], QImode)
5687 || register_operand (operands[1], QImode))"
5693 [(set_attr "type" "*,*,load1,store1")
5694 (set_attr "predicable" "yes")]
5697 (define_insn "*thumb1_movqi_insn"
5698 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5699 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5701 && ( register_operand (operands[0], QImode)
5702 || register_operand (operands[1], QImode))"
5710 [(set_attr "length" "2")
5711 (set_attr "type" "*,load1,store1,*,*,*")
5712 (set_attr "pool_range" "*,32,*,*,*,*")]
5715 (define_expand "movsf"
5716 [(set (match_operand:SF 0 "general_operand" "")
5717 (match_operand:SF 1 "general_operand" ""))]
5722 if (GET_CODE (operands[0]) == MEM)
5723 operands[1] = force_reg (SFmode, operands[1]);
5725 else /* TARGET_THUMB1 */
5727 if (can_create_pseudo_p ())
5729 if (GET_CODE (operands[0]) != REG)
5730 operands[1] = force_reg (SFmode, operands[1]);
5736 ;; Transform a floating-point move of a constant into a core register into
5737 ;; an SImode operation.
5739 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5740 (match_operand:SF 1 "immediate_operand" ""))]
5743 && GET_CODE (operands[1]) == CONST_DOUBLE"
5744 [(set (match_dup 2) (match_dup 3))]
5746 operands[2] = gen_lowpart (SImode, operands[0]);
5747 operands[3] = gen_lowpart (SImode, operands[1]);
5748 if (operands[2] == 0 || operands[3] == 0)
5753 (define_insn "*arm_movsf_soft_insn"
5754 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5755 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5757 && TARGET_SOFT_FLOAT
5758 && (GET_CODE (operands[0]) != MEM
5759 || register_operand (operands[1], SFmode))"
5762 ldr%?\\t%0, %1\\t%@ float
5763 str%?\\t%1, %0\\t%@ float"
5764 [(set_attr "length" "4,4,4")
5765 (set_attr "predicable" "yes")
5766 (set_attr "type" "*,load1,store1")
5767 (set_attr "pool_range" "*,4096,*")
5768 (set_attr "neg_pool_range" "*,4084,*")]
5771 ;;; ??? This should have alternatives for constants.
5772 (define_insn "*thumb1_movsf_insn"
5773 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5774 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5776 && ( register_operand (operands[0], SFmode)
5777 || register_operand (operands[1], SFmode))"
5786 [(set_attr "length" "2")
5787 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5788 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5791 (define_expand "movdf"
5792 [(set (match_operand:DF 0 "general_operand" "")
5793 (match_operand:DF 1 "general_operand" ""))]
5798 if (GET_CODE (operands[0]) == MEM)
5799 operands[1] = force_reg (DFmode, operands[1]);
5801 else /* TARGET_THUMB */
5803 if (can_create_pseudo_p ())
5805 if (GET_CODE (operands[0]) != REG)
5806 operands[1] = force_reg (DFmode, operands[1]);
5812 ;; Reloading a df mode value stored in integer regs to memory can require a
5814 (define_expand "reload_outdf"
5815 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5816 (match_operand:DF 1 "s_register_operand" "r")
5817 (match_operand:SI 2 "s_register_operand" "=&r")]
5821 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5824 operands[2] = XEXP (operands[0], 0);
5825 else if (code == POST_INC || code == PRE_DEC)
5827 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5828 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5829 emit_insn (gen_movdi (operands[0], operands[1]));
5832 else if (code == PRE_INC)
5834 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5836 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5839 else if (code == POST_DEC)
5840 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5842 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5843 XEXP (XEXP (operands[0], 0), 1)));
5845 emit_insn (gen_rtx_SET (VOIDmode,
5846 replace_equiv_address (operands[0], operands[2]),
5849 if (code == POST_DEC)
5850 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5856 (define_insn "*movdf_soft_insn"
5857 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5858 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5859 "TARGET_ARM && TARGET_SOFT_FLOAT
5860 && ( register_operand (operands[0], DFmode)
5861 || register_operand (operands[1], DFmode))"
5863 switch (which_alternative)
5870 return output_move_double (operands);
5873 [(set_attr "length" "8,12,16,8,8")
5874 (set_attr "type" "*,*,*,load2,store2")
5875 (set_attr "pool_range" "1020")
5876 (set_attr "neg_pool_range" "1008")]
5879 ;;; ??? This should have alternatives for constants.
5880 ;;; ??? This was originally identical to the movdi_insn pattern.
5881 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5882 ;;; thumb_reorg with a memory reference.
5883 (define_insn "*thumb_movdf_insn"
5884 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5885 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5887 && ( register_operand (operands[0], DFmode)
5888 || register_operand (operands[1], DFmode))"
5890 switch (which_alternative)
5894 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5895 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5896 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5898 return \"ldmia\\t%1, {%0, %H0}\";
5900 return \"stmia\\t%0, {%1, %H1}\";
5902 return thumb_load_double_from_address (operands);
5904 operands[2] = gen_rtx_MEM (SImode,
5905 plus_constant (XEXP (operands[0], 0), 4));
5906 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5909 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5910 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5911 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5914 [(set_attr "length" "4,2,2,6,4,4")
5915 (set_attr "type" "*,load2,store2,load2,store2,*")
5916 (set_attr "pool_range" "*,*,*,1020,*,*")]
5919 (define_expand "movxf"
5920 [(set (match_operand:XF 0 "general_operand" "")
5921 (match_operand:XF 1 "general_operand" ""))]
5922 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5924 if (GET_CODE (operands[0]) == MEM)
5925 operands[1] = force_reg (XFmode, operands[1]);
5931 ;; load- and store-multiple insns
5932 ;; The arm can load/store any set of registers, provided that they are in
5933 ;; ascending order; but that is beyond GCC so stick with what it knows.
5935 (define_expand "load_multiple"
5936 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5937 (match_operand:SI 1 "" ""))
5938 (use (match_operand:SI 2 "" ""))])]
5941 HOST_WIDE_INT offset = 0;
5943 /* Support only fixed point registers. */
5944 if (GET_CODE (operands[2]) != CONST_INT
5945 || INTVAL (operands[2]) > 14
5946 || INTVAL (operands[2]) < 2
5947 || GET_CODE (operands[1]) != MEM
5948 || GET_CODE (operands[0]) != REG
5949 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5950 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5954 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5955 force_reg (SImode, XEXP (operands[1], 0)),
5956 TRUE, FALSE, operands[1], &offset);
5959 ;; Load multiple with write-back
5961 (define_insn "*ldmsi_postinc4"
5962 [(match_parallel 0 "load_multiple_operation"
5963 [(set (match_operand:SI 1 "s_register_operand" "=r")
5964 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5966 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5967 (mem:SI (match_dup 2)))
5968 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5969 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5970 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5971 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5972 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5973 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5974 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5975 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5976 [(set_attr "type" "load4")
5977 (set_attr "predicable" "yes")]
5980 (define_insn "*ldmsi_postinc4_thumb1"
5981 [(match_parallel 0 "load_multiple_operation"
5982 [(set (match_operand:SI 1 "s_register_operand" "=l")
5983 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5985 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5986 (mem:SI (match_dup 2)))
5987 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5988 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5989 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5990 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5991 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5992 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5993 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5994 "ldmia\\t%1!, {%3, %4, %5, %6}"
5995 [(set_attr "type" "load4")]
5998 (define_insn "*ldmsi_postinc3"
5999 [(match_parallel 0 "load_multiple_operation"
6000 [(set (match_operand:SI 1 "s_register_operand" "=r")
6001 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6003 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6004 (mem:SI (match_dup 2)))
6005 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6006 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6007 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6008 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6009 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6010 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6011 [(set_attr "type" "load3")
6012 (set_attr "predicable" "yes")]
6015 (define_insn "*ldmsi_postinc2"
6016 [(match_parallel 0 "load_multiple_operation"
6017 [(set (match_operand:SI 1 "s_register_operand" "=r")
6018 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6020 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6021 (mem:SI (match_dup 2)))
6022 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6023 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6024 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6025 "ldm%(ia%)\\t%1!, {%3, %4}"
6026 [(set_attr "type" "load2")
6027 (set_attr "predicable" "yes")]
6030 ;; Ordinary load multiple
6032 (define_insn "*ldmsi4"
6033 [(match_parallel 0 "load_multiple_operation"
6034 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6035 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6036 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6037 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6038 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6039 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6040 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6041 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6042 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6043 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6044 [(set_attr "type" "load4")
6045 (set_attr "predicable" "yes")]
6048 (define_insn "*ldmsi3"
6049 [(match_parallel 0 "load_multiple_operation"
6050 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6051 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6052 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6053 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6054 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6055 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6056 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6057 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6058 [(set_attr "type" "load3")
6059 (set_attr "predicable" "yes")]
6062 (define_insn "*ldmsi2"
6063 [(match_parallel 0 "load_multiple_operation"
6064 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6065 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6066 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6067 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6068 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6069 "ldm%(ia%)\\t%1, {%2, %3}"
6070 [(set_attr "type" "load2")
6071 (set_attr "predicable" "yes")]
6074 (define_expand "store_multiple"
6075 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6076 (match_operand:SI 1 "" ""))
6077 (use (match_operand:SI 2 "" ""))])]
6080 HOST_WIDE_INT offset = 0;
6082 /* Support only fixed point registers. */
6083 if (GET_CODE (operands[2]) != CONST_INT
6084 || INTVAL (operands[2]) > 14
6085 || INTVAL (operands[2]) < 2
6086 || GET_CODE (operands[1]) != REG
6087 || GET_CODE (operands[0]) != MEM
6088 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6089 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6093 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6094 force_reg (SImode, XEXP (operands[0], 0)),
6095 TRUE, FALSE, operands[0], &offset);
6098 ;; Store multiple with write-back
6100 (define_insn "*stmsi_postinc4"
6101 [(match_parallel 0 "store_multiple_operation"
6102 [(set (match_operand:SI 1 "s_register_operand" "=r")
6103 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6105 (set (mem:SI (match_dup 2))
6106 (match_operand:SI 3 "arm_hard_register_operand" ""))
6107 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6108 (match_operand:SI 4 "arm_hard_register_operand" ""))
6109 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6110 (match_operand:SI 5 "arm_hard_register_operand" ""))
6111 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6112 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6113 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6114 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6115 [(set_attr "predicable" "yes")
6116 (set_attr "type" "store4")]
6119 (define_insn "*stmsi_postinc4_thumb1"
6120 [(match_parallel 0 "store_multiple_operation"
6121 [(set (match_operand:SI 1 "s_register_operand" "=l")
6122 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6124 (set (mem:SI (match_dup 2))
6125 (match_operand:SI 3 "arm_hard_register_operand" ""))
6126 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6127 (match_operand:SI 4 "arm_hard_register_operand" ""))
6128 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6129 (match_operand:SI 5 "arm_hard_register_operand" ""))
6130 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6131 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6132 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6133 "stmia\\t%1!, {%3, %4, %5, %6}"
6134 [(set_attr "type" "store4")]
6137 (define_insn "*stmsi_postinc3"
6138 [(match_parallel 0 "store_multiple_operation"
6139 [(set (match_operand:SI 1 "s_register_operand" "=r")
6140 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6142 (set (mem:SI (match_dup 2))
6143 (match_operand:SI 3 "arm_hard_register_operand" ""))
6144 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6145 (match_operand:SI 4 "arm_hard_register_operand" ""))
6146 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6147 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6148 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6149 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6150 [(set_attr "predicable" "yes")
6151 (set_attr "type" "store3")]
6154 (define_insn "*stmsi_postinc2"
6155 [(match_parallel 0 "store_multiple_operation"
6156 [(set (match_operand:SI 1 "s_register_operand" "=r")
6157 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6159 (set (mem:SI (match_dup 2))
6160 (match_operand:SI 3 "arm_hard_register_operand" ""))
6161 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6162 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6163 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6164 "stm%(ia%)\\t%1!, {%3, %4}"
6165 [(set_attr "predicable" "yes")
6166 (set_attr "type" "store2")]
6169 ;; Ordinary store multiple
6171 (define_insn "*stmsi4"
6172 [(match_parallel 0 "store_multiple_operation"
6173 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6174 (match_operand:SI 2 "arm_hard_register_operand" ""))
6175 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6176 (match_operand:SI 3 "arm_hard_register_operand" ""))
6177 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6178 (match_operand:SI 4 "arm_hard_register_operand" ""))
6179 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6180 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6181 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6182 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6183 [(set_attr "predicable" "yes")
6184 (set_attr "type" "store4")]
6187 (define_insn "*stmsi3"
6188 [(match_parallel 0 "store_multiple_operation"
6189 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6190 (match_operand:SI 2 "arm_hard_register_operand" ""))
6191 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6192 (match_operand:SI 3 "arm_hard_register_operand" ""))
6193 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6194 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6195 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6196 "stm%(ia%)\\t%1, {%2, %3, %4}"
6197 [(set_attr "predicable" "yes")
6198 (set_attr "type" "store3")]
6201 (define_insn "*stmsi2"
6202 [(match_parallel 0 "store_multiple_operation"
6203 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6204 (match_operand:SI 2 "arm_hard_register_operand" ""))
6205 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6206 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6207 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6208 "stm%(ia%)\\t%1, {%2, %3}"
6209 [(set_attr "predicable" "yes")
6210 (set_attr "type" "store2")]
6213 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6214 ;; We could let this apply for blocks of less than this, but it clobbers so
6215 ;; many registers that there is then probably a better way.
6217 (define_expand "movmemqi"
6218 [(match_operand:BLK 0 "general_operand" "")
6219 (match_operand:BLK 1 "general_operand" "")
6220 (match_operand:SI 2 "const_int_operand" "")
6221 (match_operand:SI 3 "const_int_operand" "")]
6226 if (arm_gen_movmemqi (operands))
6230 else /* TARGET_THUMB1 */
6232 if ( INTVAL (operands[3]) != 4
6233 || INTVAL (operands[2]) > 48)
6236 thumb_expand_movmemqi (operands);
6242 ;; Thumb block-move insns
6244 (define_insn "movmem12b"
6245 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6246 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6247 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6248 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6249 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6250 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6251 (set (match_operand:SI 0 "register_operand" "=l")
6252 (plus:SI (match_dup 2) (const_int 12)))
6253 (set (match_operand:SI 1 "register_operand" "=l")
6254 (plus:SI (match_dup 3) (const_int 12)))
6255 (clobber (match_scratch:SI 4 "=&l"))
6256 (clobber (match_scratch:SI 5 "=&l"))
6257 (clobber (match_scratch:SI 6 "=&l"))]
6259 "* return thumb_output_move_mem_multiple (3, operands);"
6260 [(set_attr "length" "4")
6261 ; This isn't entirely accurate... It loads as well, but in terms of
6262 ; scheduling the following insn it is better to consider it as a store
6263 (set_attr "type" "store3")]
6266 (define_insn "movmem8b"
6267 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6268 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6269 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6270 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6271 (set (match_operand:SI 0 "register_operand" "=l")
6272 (plus:SI (match_dup 2) (const_int 8)))
6273 (set (match_operand:SI 1 "register_operand" "=l")
6274 (plus:SI (match_dup 3) (const_int 8)))
6275 (clobber (match_scratch:SI 4 "=&l"))
6276 (clobber (match_scratch:SI 5 "=&l"))]
6278 "* return thumb_output_move_mem_multiple (2, operands);"
6279 [(set_attr "length" "4")
6280 ; This isn't entirely accurate... It loads as well, but in terms of
6281 ; scheduling the following insn it is better to consider it as a store
6282 (set_attr "type" "store2")]
6287 ;; Compare & branch insns
6288 ;; The range calculations are based as follows:
6289 ;; For forward branches, the address calculation returns the address of
6290 ;; the next instruction. This is 2 beyond the branch instruction.
6291 ;; For backward branches, the address calculation returns the address of
6292 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6293 ;; instruction for the shortest sequence, and 4 before the branch instruction
6294 ;; if we have to jump around an unconditional branch.
6295 ;; To the basic branch range the PC offset must be added (this is +4).
6296 ;; So for forward branches we have
6297 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6298 ;; And for backward branches we have
6299 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6301 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6302 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6304 (define_expand "cbranchsi4"
6305 [(set (pc) (if_then_else
6306 (match_operator 0 "arm_comparison_operator"
6307 [(match_operand:SI 1 "s_register_operand" "")
6308 (match_operand:SI 2 "nonmemory_operand" "")])
6309 (label_ref (match_operand 3 "" ""))
6313 if (thumb1_cmpneg_operand (operands[2], SImode))
6315 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6316 operands[3], operands[0]));
6319 if (!thumb1_cmp_operand (operands[2], SImode))
6320 operands[2] = force_reg (SImode, operands[2]);
6323 (define_insn "*cbranchsi4_insn"
6324 [(set (pc) (if_then_else
6325 (match_operator 0 "arm_comparison_operator"
6326 [(match_operand:SI 1 "s_register_operand" "l,*h")
6327 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6328 (label_ref (match_operand 3 "" ""))
6332 output_asm_insn (\"cmp\\t%1, %2\", operands);
6334 switch (get_attr_length (insn))
6336 case 4: return \"b%d0\\t%l3\";
6337 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6338 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6341 [(set (attr "far_jump")
6343 (eq_attr "length" "8")
6344 (const_string "yes")
6345 (const_string "no")))
6346 (set (attr "length")
6348 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6349 (le (minus (match_dup 3) (pc)) (const_int 256)))
6352 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6353 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6358 (define_insn "cbranchsi4_scratch"
6359 [(set (pc) (if_then_else
6360 (match_operator 4 "arm_comparison_operator"
6361 [(match_operand:SI 1 "s_register_operand" "l,0")
6362 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6363 (label_ref (match_operand 3 "" ""))
6365 (clobber (match_scratch:SI 0 "=l,l"))]
6368 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6370 switch (get_attr_length (insn))
6372 case 4: return \"b%d4\\t%l3\";
6373 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6374 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6377 [(set (attr "far_jump")
6379 (eq_attr "length" "8")
6380 (const_string "yes")
6381 (const_string "no")))
6382 (set (attr "length")
6384 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6385 (le (minus (match_dup 3) (pc)) (const_int 256)))
6388 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6389 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6393 (define_insn "*movsi_cbranchsi4"
6396 (match_operator 3 "arm_comparison_operator"
6397 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6399 (label_ref (match_operand 2 "" ""))
6401 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6405 if (which_alternative == 0)
6406 output_asm_insn (\"cmp\t%0, #0\", operands);
6407 else if (which_alternative == 1)
6408 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6411 output_asm_insn (\"cmp\t%1, #0\", operands);
6412 if (which_alternative == 2)
6413 output_asm_insn (\"mov\t%0, %1\", operands);
6415 output_asm_insn (\"str\t%1, %0\", operands);
6417 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6419 case 4: return \"b%d3\\t%l2\";
6420 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6421 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6424 [(set (attr "far_jump")
6426 (ior (and (gt (symbol_ref ("which_alternative"))
6428 (eq_attr "length" "8"))
6429 (eq_attr "length" "10"))
6430 (const_string "yes")
6431 (const_string "no")))
6432 (set (attr "length")
6434 (le (symbol_ref ("which_alternative"))
6437 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6438 (le (minus (match_dup 2) (pc)) (const_int 256)))
6441 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6442 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6446 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6447 (le (minus (match_dup 2) (pc)) (const_int 256)))
6450 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6451 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6456 (define_insn "*negated_cbranchsi4"
6459 (match_operator 0 "equality_operator"
6460 [(match_operand:SI 1 "s_register_operand" "l")
6461 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6462 (label_ref (match_operand 3 "" ""))
6466 output_asm_insn (\"cmn\\t%1, %2\", operands);
6467 switch (get_attr_length (insn))
6469 case 4: return \"b%d0\\t%l3\";
6470 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6471 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6474 [(set (attr "far_jump")
6476 (eq_attr "length" "8")
6477 (const_string "yes")
6478 (const_string "no")))
6479 (set (attr "length")
6481 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6482 (le (minus (match_dup 3) (pc)) (const_int 256)))
6485 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6486 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6491 (define_insn "*tbit_cbranch"
6494 (match_operator 0 "equality_operator"
6495 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6497 (match_operand:SI 2 "const_int_operand" "i"))
6499 (label_ref (match_operand 3 "" ""))
6501 (clobber (match_scratch:SI 4 "=l"))]
6506 op[0] = operands[4];
6507 op[1] = operands[1];
6508 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6510 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6511 switch (get_attr_length (insn))
6513 case 4: return \"b%d0\\t%l3\";
6514 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6515 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6518 [(set (attr "far_jump")
6520 (eq_attr "length" "8")
6521 (const_string "yes")
6522 (const_string "no")))
6523 (set (attr "length")
6525 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6526 (le (minus (match_dup 3) (pc)) (const_int 256)))
6529 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6530 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6535 (define_insn "*tlobits_cbranch"
6538 (match_operator 0 "equality_operator"
6539 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6540 (match_operand:SI 2 "const_int_operand" "i")
6543 (label_ref (match_operand 3 "" ""))
6545 (clobber (match_scratch:SI 4 "=l"))]
6550 op[0] = operands[4];
6551 op[1] = operands[1];
6552 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6554 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6555 switch (get_attr_length (insn))
6557 case 4: return \"b%d0\\t%l3\";
6558 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6559 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6562 [(set (attr "far_jump")
6564 (eq_attr "length" "8")
6565 (const_string "yes")
6566 (const_string "no")))
6567 (set (attr "length")
6569 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6570 (le (minus (match_dup 3) (pc)) (const_int 256)))
6573 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6574 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6579 (define_insn "*tstsi3_cbranch"
6582 (match_operator 3 "equality_operator"
6583 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6584 (match_operand:SI 1 "s_register_operand" "l"))
6586 (label_ref (match_operand 2 "" ""))
6591 output_asm_insn (\"tst\\t%0, %1\", operands);
6592 switch (get_attr_length (insn))
6594 case 4: return \"b%d3\\t%l2\";
6595 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6596 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6599 [(set (attr "far_jump")
6601 (eq_attr "length" "8")
6602 (const_string "yes")
6603 (const_string "no")))
6604 (set (attr "length")
6606 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6607 (le (minus (match_dup 2) (pc)) (const_int 256)))
6610 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6611 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6616 (define_insn "*andsi3_cbranch"
6619 (match_operator 5 "equality_operator"
6620 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6621 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6623 (label_ref (match_operand 4 "" ""))
6625 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6626 (and:SI (match_dup 2) (match_dup 3)))
6627 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6631 if (which_alternative == 0)
6632 output_asm_insn (\"and\\t%0, %3\", operands);
6633 else if (which_alternative == 1)
6635 output_asm_insn (\"and\\t%1, %3\", operands);
6636 output_asm_insn (\"mov\\t%0, %1\", operands);
6640 output_asm_insn (\"and\\t%1, %3\", operands);
6641 output_asm_insn (\"str\\t%1, %0\", operands);
6644 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6646 case 4: return \"b%d5\\t%l4\";
6647 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6648 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6651 [(set (attr "far_jump")
6653 (ior (and (eq (symbol_ref ("which_alternative"))
6655 (eq_attr "length" "8"))
6656 (eq_attr "length" "10"))
6657 (const_string "yes")
6658 (const_string "no")))
6659 (set (attr "length")
6661 (eq (symbol_ref ("which_alternative"))
6664 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6665 (le (minus (match_dup 4) (pc)) (const_int 256)))
6668 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6669 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6673 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6674 (le (minus (match_dup 4) (pc)) (const_int 256)))
6677 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6678 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6683 (define_insn "*orrsi3_cbranch_scratch"
6686 (match_operator 4 "equality_operator"
6687 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6688 (match_operand:SI 2 "s_register_operand" "l"))
6690 (label_ref (match_operand 3 "" ""))
6692 (clobber (match_scratch:SI 0 "=l"))]
6696 output_asm_insn (\"orr\\t%0, %2\", operands);
6697 switch (get_attr_length (insn))
6699 case 4: return \"b%d4\\t%l3\";
6700 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6701 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6704 [(set (attr "far_jump")
6706 (eq_attr "length" "8")
6707 (const_string "yes")
6708 (const_string "no")))
6709 (set (attr "length")
6711 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6712 (le (minus (match_dup 3) (pc)) (const_int 256)))
6715 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6716 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6721 (define_insn "*orrsi3_cbranch"
6724 (match_operator 5 "equality_operator"
6725 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6726 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6728 (label_ref (match_operand 4 "" ""))
6730 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6731 (ior:SI (match_dup 2) (match_dup 3)))
6732 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6736 if (which_alternative == 0)
6737 output_asm_insn (\"orr\\t%0, %3\", operands);
6738 else if (which_alternative == 1)
6740 output_asm_insn (\"orr\\t%1, %3\", operands);
6741 output_asm_insn (\"mov\\t%0, %1\", operands);
6745 output_asm_insn (\"orr\\t%1, %3\", operands);
6746 output_asm_insn (\"str\\t%1, %0\", operands);
6749 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6751 case 4: return \"b%d5\\t%l4\";
6752 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6753 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6756 [(set (attr "far_jump")
6758 (ior (and (eq (symbol_ref ("which_alternative"))
6760 (eq_attr "length" "8"))
6761 (eq_attr "length" "10"))
6762 (const_string "yes")
6763 (const_string "no")))
6764 (set (attr "length")
6766 (eq (symbol_ref ("which_alternative"))
6769 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6770 (le (minus (match_dup 4) (pc)) (const_int 256)))
6773 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6774 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6778 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6779 (le (minus (match_dup 4) (pc)) (const_int 256)))
6782 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6783 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6788 (define_insn "*xorsi3_cbranch_scratch"
6791 (match_operator 4 "equality_operator"
6792 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6793 (match_operand:SI 2 "s_register_operand" "l"))
6795 (label_ref (match_operand 3 "" ""))
6797 (clobber (match_scratch:SI 0 "=l"))]
6801 output_asm_insn (\"eor\\t%0, %2\", operands);
6802 switch (get_attr_length (insn))
6804 case 4: return \"b%d4\\t%l3\";
6805 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6806 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6809 [(set (attr "far_jump")
6811 (eq_attr "length" "8")
6812 (const_string "yes")
6813 (const_string "no")))
6814 (set (attr "length")
6816 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6817 (le (minus (match_dup 3) (pc)) (const_int 256)))
6820 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6821 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6826 (define_insn "*xorsi3_cbranch"
6829 (match_operator 5 "equality_operator"
6830 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6831 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6833 (label_ref (match_operand 4 "" ""))
6835 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6836 (xor:SI (match_dup 2) (match_dup 3)))
6837 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6841 if (which_alternative == 0)
6842 output_asm_insn (\"eor\\t%0, %3\", operands);
6843 else if (which_alternative == 1)
6845 output_asm_insn (\"eor\\t%1, %3\", operands);
6846 output_asm_insn (\"mov\\t%0, %1\", operands);
6850 output_asm_insn (\"eor\\t%1, %3\", operands);
6851 output_asm_insn (\"str\\t%1, %0\", operands);
6854 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6856 case 4: return \"b%d5\\t%l4\";
6857 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6858 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6861 [(set (attr "far_jump")
6863 (ior (and (eq (symbol_ref ("which_alternative"))
6865 (eq_attr "length" "8"))
6866 (eq_attr "length" "10"))
6867 (const_string "yes")
6868 (const_string "no")))
6869 (set (attr "length")
6871 (eq (symbol_ref ("which_alternative"))
6874 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6875 (le (minus (match_dup 4) (pc)) (const_int 256)))
6878 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6879 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6883 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6884 (le (minus (match_dup 4) (pc)) (const_int 256)))
6887 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6888 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6893 (define_insn "*bicsi3_cbranch_scratch"
6896 (match_operator 4 "equality_operator"
6897 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6898 (match_operand:SI 1 "s_register_operand" "0"))
6900 (label_ref (match_operand 3 "" ""))
6902 (clobber (match_scratch:SI 0 "=l"))]
6906 output_asm_insn (\"bic\\t%0, %2\", operands);
6907 switch (get_attr_length (insn))
6909 case 4: return \"b%d4\\t%l3\";
6910 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6911 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6914 [(set (attr "far_jump")
6916 (eq_attr "length" "8")
6917 (const_string "yes")
6918 (const_string "no")))
6919 (set (attr "length")
6921 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6922 (le (minus (match_dup 3) (pc)) (const_int 256)))
6925 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6926 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6931 (define_insn "*bicsi3_cbranch"
6934 (match_operator 5 "equality_operator"
6935 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6936 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6938 (label_ref (match_operand 4 "" ""))
6940 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6941 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6942 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6946 if (which_alternative == 0)
6947 output_asm_insn (\"bic\\t%0, %3\", operands);
6948 else if (which_alternative <= 2)
6950 output_asm_insn (\"bic\\t%1, %3\", operands);
6951 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6952 conditions again, since we're only testing for equality. */
6953 output_asm_insn (\"mov\\t%0, %1\", operands);
6957 output_asm_insn (\"bic\\t%1, %3\", operands);
6958 output_asm_insn (\"str\\t%1, %0\", operands);
6961 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6963 case 4: return \"b%d5\\t%l4\";
6964 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6965 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6968 [(set (attr "far_jump")
6970 (ior (and (eq (symbol_ref ("which_alternative"))
6972 (eq_attr "length" "8"))
6973 (eq_attr "length" "10"))
6974 (const_string "yes")
6975 (const_string "no")))
6976 (set (attr "length")
6978 (eq (symbol_ref ("which_alternative"))
6981 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6982 (le (minus (match_dup 4) (pc)) (const_int 256)))
6985 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6986 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6990 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6991 (le (minus (match_dup 4) (pc)) (const_int 256)))
6994 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6995 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7000 (define_insn "*cbranchne_decr1"
7002 (if_then_else (match_operator 3 "equality_operator"
7003 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7005 (label_ref (match_operand 4 "" ""))
7007 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7008 (plus:SI (match_dup 2) (const_int -1)))
7009 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7014 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7016 VOIDmode, operands[2], const1_rtx);
7017 cond[1] = operands[4];
7019 if (which_alternative == 0)
7020 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7021 else if (which_alternative == 1)
7023 /* We must provide an alternative for a hi reg because reload
7024 cannot handle output reloads on a jump instruction, but we
7025 can't subtract into that. Fortunately a mov from lo to hi
7026 does not clobber the condition codes. */
7027 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7028 output_asm_insn (\"mov\\t%0, %1\", operands);
7032 /* Similarly, but the target is memory. */
7033 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7034 output_asm_insn (\"str\\t%1, %0\", operands);
7037 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7040 output_asm_insn (\"b%d0\\t%l1\", cond);
7043 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7044 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7046 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7047 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7051 [(set (attr "far_jump")
7053 (ior (and (eq (symbol_ref ("which_alternative"))
7055 (eq_attr "length" "8"))
7056 (eq_attr "length" "10"))
7057 (const_string "yes")
7058 (const_string "no")))
7059 (set_attr_alternative "length"
7063 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7064 (le (minus (match_dup 4) (pc)) (const_int 256)))
7067 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7068 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7073 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7074 (le (minus (match_dup 4) (pc)) (const_int 256)))
7077 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7078 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7083 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7084 (le (minus (match_dup 4) (pc)) (const_int 256)))
7087 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7088 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7093 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7094 (le (minus (match_dup 4) (pc)) (const_int 256)))
7097 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7098 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7103 (define_insn "*addsi3_cbranch"
7106 (match_operator 4 "comparison_operator"
7108 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7109 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7111 (label_ref (match_operand 5 "" ""))
7114 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7115 (plus:SI (match_dup 2) (match_dup 3)))
7116 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7118 && (GET_CODE (operands[4]) == EQ
7119 || GET_CODE (operands[4]) == NE
7120 || GET_CODE (operands[4]) == GE
7121 || GET_CODE (operands[4]) == LT)"
7127 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7128 cond[1] = operands[2];
7129 cond[2] = operands[3];
7131 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7132 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7134 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7136 if (which_alternative >= 3
7137 && which_alternative < 4)
7138 output_asm_insn (\"mov\\t%0, %1\", operands);
7139 else if (which_alternative >= 4)
7140 output_asm_insn (\"str\\t%1, %0\", operands);
7142 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7145 return \"b%d4\\t%l5\";
7147 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7149 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7153 [(set (attr "far_jump")
7155 (ior (and (lt (symbol_ref ("which_alternative"))
7157 (eq_attr "length" "8"))
7158 (eq_attr "length" "10"))
7159 (const_string "yes")
7160 (const_string "no")))
7161 (set (attr "length")
7163 (lt (symbol_ref ("which_alternative"))
7166 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7167 (le (minus (match_dup 5) (pc)) (const_int 256)))
7170 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7171 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7175 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7176 (le (minus (match_dup 5) (pc)) (const_int 256)))
7179 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7180 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7185 (define_insn "*addsi3_cbranch_scratch"
7188 (match_operator 3 "comparison_operator"
7190 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7191 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7193 (label_ref (match_operand 4 "" ""))
7195 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7197 && (GET_CODE (operands[3]) == EQ
7198 || GET_CODE (operands[3]) == NE
7199 || GET_CODE (operands[3]) == GE
7200 || GET_CODE (operands[3]) == LT)"
7203 switch (which_alternative)
7206 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7209 output_asm_insn (\"cmn\t%1, %2\", operands);
7212 if (INTVAL (operands[2]) < 0)
7213 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7215 output_asm_insn (\"add\t%0, %1, %2\", operands);
7218 if (INTVAL (operands[2]) < 0)
7219 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7221 output_asm_insn (\"add\t%0, %0, %2\", operands);
7225 switch (get_attr_length (insn))
7228 return \"b%d3\\t%l4\";
7230 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7232 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7236 [(set (attr "far_jump")
7238 (eq_attr "length" "8")
7239 (const_string "yes")
7240 (const_string "no")))
7241 (set (attr "length")
7243 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7244 (le (minus (match_dup 4) (pc)) (const_int 256)))
7247 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7248 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7253 (define_insn "*subsi3_cbranch"
7256 (match_operator 4 "comparison_operator"
7258 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7259 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7261 (label_ref (match_operand 5 "" ""))
7263 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7264 (minus:SI (match_dup 2) (match_dup 3)))
7265 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7267 && (GET_CODE (operands[4]) == EQ
7268 || GET_CODE (operands[4]) == NE
7269 || GET_CODE (operands[4]) == GE
7270 || GET_CODE (operands[4]) == LT)"
7273 if (which_alternative == 0)
7274 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7275 else if (which_alternative == 1)
7277 /* We must provide an alternative for a hi reg because reload
7278 cannot handle output reloads on a jump instruction, but we
7279 can't subtract into that. Fortunately a mov from lo to hi
7280 does not clobber the condition codes. */
7281 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7282 output_asm_insn (\"mov\\t%0, %1\", operands);
7286 /* Similarly, but the target is memory. */
7287 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7288 output_asm_insn (\"str\\t%1, %0\", operands);
7291 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7294 return \"b%d4\\t%l5\";
7296 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7298 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7302 [(set (attr "far_jump")
7304 (ior (and (eq (symbol_ref ("which_alternative"))
7306 (eq_attr "length" "8"))
7307 (eq_attr "length" "10"))
7308 (const_string "yes")
7309 (const_string "no")))
7310 (set (attr "length")
7312 (eq (symbol_ref ("which_alternative"))
7315 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7316 (le (minus (match_dup 5) (pc)) (const_int 256)))
7319 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7320 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7324 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7325 (le (minus (match_dup 5) (pc)) (const_int 256)))
7328 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7329 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7334 (define_insn "*subsi3_cbranch_scratch"
7337 (match_operator 0 "arm_comparison_operator"
7338 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7339 (match_operand:SI 2 "nonmemory_operand" "l"))
7341 (label_ref (match_operand 3 "" ""))
7344 && (GET_CODE (operands[0]) == EQ
7345 || GET_CODE (operands[0]) == NE
7346 || GET_CODE (operands[0]) == GE
7347 || GET_CODE (operands[0]) == LT)"
7349 output_asm_insn (\"cmp\\t%1, %2\", operands);
7350 switch (get_attr_length (insn))
7352 case 4: return \"b%d0\\t%l3\";
7353 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7354 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7357 [(set (attr "far_jump")
7359 (eq_attr "length" "8")
7360 (const_string "yes")
7361 (const_string "no")))
7362 (set (attr "length")
7364 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7365 (le (minus (match_dup 3) (pc)) (const_int 256)))
7368 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7369 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7374 ;; Comparison and test insns
7376 (define_expand "cmpsi"
7377 [(match_operand:SI 0 "s_register_operand" "")
7378 (match_operand:SI 1 "arm_add_operand" "")]
7381 arm_compare_op0 = operands[0];
7382 arm_compare_op1 = operands[1];
7387 (define_expand "cmpsf"
7388 [(match_operand:SF 0 "s_register_operand" "")
7389 (match_operand:SF 1 "arm_float_compare_operand" "")]
7390 "TARGET_32BIT && TARGET_HARD_FLOAT"
7392 arm_compare_op0 = operands[0];
7393 arm_compare_op1 = operands[1];
7398 (define_expand "cmpdf"
7399 [(match_operand:DF 0 "s_register_operand" "")
7400 (match_operand:DF 1 "arm_float_compare_operand" "")]
7401 "TARGET_32BIT && TARGET_HARD_FLOAT"
7403 arm_compare_op0 = operands[0];
7404 arm_compare_op1 = operands[1];
7409 (define_insn "*arm_cmpsi_insn"
7410 [(set (reg:CC CC_REGNUM)
7411 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7412 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7417 [(set_attr "conds" "set")]
7420 (define_insn "*arm_cmpsi_shiftsi"
7421 [(set (reg:CC CC_REGNUM)
7422 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7423 (match_operator:SI 3 "shift_operator"
7424 [(match_operand:SI 1 "s_register_operand" "r")
7425 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7428 [(set_attr "conds" "set")
7429 (set_attr "shift" "1")
7430 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7431 (const_string "alu_shift")
7432 (const_string "alu_shift_reg")))]
7435 (define_insn "*arm_cmpsi_shiftsi_swp"
7436 [(set (reg:CC_SWP CC_REGNUM)
7437 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7438 [(match_operand:SI 1 "s_register_operand" "r")
7439 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7440 (match_operand:SI 0 "s_register_operand" "r")))]
7443 [(set_attr "conds" "set")
7444 (set_attr "shift" "1")
7445 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7446 (const_string "alu_shift")
7447 (const_string "alu_shift_reg")))]
7450 (define_insn "*arm_cmpsi_negshiftsi_si"
7451 [(set (reg:CC_Z CC_REGNUM)
7453 (neg:SI (match_operator:SI 1 "shift_operator"
7454 [(match_operand:SI 2 "s_register_operand" "r")
7455 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7456 (match_operand:SI 0 "s_register_operand" "r")))]
7459 [(set_attr "conds" "set")
7460 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7461 (const_string "alu_shift")
7462 (const_string "alu_shift_reg")))]
7465 ;; Cirrus SF compare instruction
7466 (define_insn "*cirrus_cmpsf"
7467 [(set (reg:CCFP CC_REGNUM)
7468 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7469 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7470 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7471 "cfcmps%?\\tr15, %V0, %V1"
7472 [(set_attr "type" "mav_farith")
7473 (set_attr "cirrus" "compare")]
7476 ;; Cirrus DF compare instruction
7477 (define_insn "*cirrus_cmpdf"
7478 [(set (reg:CCFP CC_REGNUM)
7479 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7480 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7481 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7482 "cfcmpd%?\\tr15, %V0, %V1"
7483 [(set_attr "type" "mav_farith")
7484 (set_attr "cirrus" "compare")]
7487 ;; Cirrus DI compare instruction
7488 (define_expand "cmpdi"
7489 [(match_operand:DI 0 "cirrus_fp_register" "")
7490 (match_operand:DI 1 "cirrus_fp_register" "")]
7491 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7493 arm_compare_op0 = operands[0];
7494 arm_compare_op1 = operands[1];
7498 (define_insn "*cirrus_cmpdi"
7499 [(set (reg:CC CC_REGNUM)
7500 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7501 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7502 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7503 "cfcmp64%?\\tr15, %V0, %V1"
7504 [(set_attr "type" "mav_farith")
7505 (set_attr "cirrus" "compare")]
7508 ; This insn allows redundant compares to be removed by cse, nothing should
7509 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7510 ; is deleted later on. The match_dup will match the mode here, so that
7511 ; mode changes of the condition codes aren't lost by this even though we don't
7512 ; specify what they are.
7514 (define_insn "*deleted_compare"
7515 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7517 "\\t%@ deleted compare"
7518 [(set_attr "conds" "set")
7519 (set_attr "length" "0")]
7523 ;; Conditional branch insns
7525 (define_expand "beq"
7527 (if_then_else (eq (match_dup 1) (const_int 0))
7528 (label_ref (match_operand 0 "" ""))
7531 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7534 (define_expand "bne"
7536 (if_then_else (ne (match_dup 1) (const_int 0))
7537 (label_ref (match_operand 0 "" ""))
7540 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7543 (define_expand "bgt"
7545 (if_then_else (gt (match_dup 1) (const_int 0))
7546 (label_ref (match_operand 0 "" ""))
7549 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7552 (define_expand "ble"
7554 (if_then_else (le (match_dup 1) (const_int 0))
7555 (label_ref (match_operand 0 "" ""))
7558 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7561 (define_expand "bge"
7563 (if_then_else (ge (match_dup 1) (const_int 0))
7564 (label_ref (match_operand 0 "" ""))
7567 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7570 (define_expand "blt"
7572 (if_then_else (lt (match_dup 1) (const_int 0))
7573 (label_ref (match_operand 0 "" ""))
7576 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7579 (define_expand "bgtu"
7581 (if_then_else (gtu (match_dup 1) (const_int 0))
7582 (label_ref (match_operand 0 "" ""))
7585 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7588 (define_expand "bleu"
7590 (if_then_else (leu (match_dup 1) (const_int 0))
7591 (label_ref (match_operand 0 "" ""))
7594 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7597 (define_expand "bgeu"
7599 (if_then_else (geu (match_dup 1) (const_int 0))
7600 (label_ref (match_operand 0 "" ""))
7603 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7606 (define_expand "bltu"
7608 (if_then_else (ltu (match_dup 1) (const_int 0))
7609 (label_ref (match_operand 0 "" ""))
7612 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7615 (define_expand "bunordered"
7617 (if_then_else (unordered (match_dup 1) (const_int 0))
7618 (label_ref (match_operand 0 "" ""))
7620 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7621 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7625 (define_expand "bordered"
7627 (if_then_else (ordered (match_dup 1) (const_int 0))
7628 (label_ref (match_operand 0 "" ""))
7630 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7631 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7635 (define_expand "bungt"
7637 (if_then_else (ungt (match_dup 1) (const_int 0))
7638 (label_ref (match_operand 0 "" ""))
7640 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7641 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7644 (define_expand "bunlt"
7646 (if_then_else (unlt (match_dup 1) (const_int 0))
7647 (label_ref (match_operand 0 "" ""))
7649 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7650 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7653 (define_expand "bunge"
7655 (if_then_else (unge (match_dup 1) (const_int 0))
7656 (label_ref (match_operand 0 "" ""))
7658 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7659 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7662 (define_expand "bunle"
7664 (if_then_else (unle (match_dup 1) (const_int 0))
7665 (label_ref (match_operand 0 "" ""))
7667 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7668 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7671 ;; The following two patterns need two branch instructions, since there is
7672 ;; no single instruction that will handle all cases.
7673 (define_expand "buneq"
7675 (if_then_else (uneq (match_dup 1) (const_int 0))
7676 (label_ref (match_operand 0 "" ""))
7678 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7679 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7682 (define_expand "bltgt"
7684 (if_then_else (ltgt (match_dup 1) (const_int 0))
7685 (label_ref (match_operand 0 "" ""))
7687 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7688 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7692 ;; Patterns to match conditional branch insns.
7695 ; Special pattern to match UNEQ.
7696 (define_insn "*arm_buneq"
7698 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7699 (label_ref (match_operand 0 "" ""))
7701 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7703 gcc_assert (!arm_ccfsm_state);
7705 return \"bvs\\t%l0\;beq\\t%l0\";
7707 [(set_attr "conds" "jump_clob")
7708 (set_attr "length" "8")]
7711 ; Special pattern to match LTGT.
7712 (define_insn "*arm_bltgt"
7714 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7715 (label_ref (match_operand 0 "" ""))
7717 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7719 gcc_assert (!arm_ccfsm_state);
7721 return \"bmi\\t%l0\;bgt\\t%l0\";
7723 [(set_attr "conds" "jump_clob")
7724 (set_attr "length" "8")]
7727 (define_insn "*arm_cond_branch"
7729 (if_then_else (match_operator 1 "arm_comparison_operator"
7730 [(match_operand 2 "cc_register" "") (const_int 0)])
7731 (label_ref (match_operand 0 "" ""))
7735 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7737 arm_ccfsm_state += 2;
7740 return \"b%d1\\t%l0\";
7742 [(set_attr "conds" "use")
7743 (set_attr "type" "branch")]
7746 ; Special pattern to match reversed UNEQ.
7747 (define_insn "*arm_buneq_reversed"
7749 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7751 (label_ref (match_operand 0 "" ""))))]
7752 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7754 gcc_assert (!arm_ccfsm_state);
7756 return \"bmi\\t%l0\;bgt\\t%l0\";
7758 [(set_attr "conds" "jump_clob")
7759 (set_attr "length" "8")]
7762 ; Special pattern to match reversed LTGT.
7763 (define_insn "*arm_bltgt_reversed"
7765 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7767 (label_ref (match_operand 0 "" ""))))]
7768 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7770 gcc_assert (!arm_ccfsm_state);
7772 return \"bvs\\t%l0\;beq\\t%l0\";
7774 [(set_attr "conds" "jump_clob")
7775 (set_attr "length" "8")]
7778 (define_insn "*arm_cond_branch_reversed"
7780 (if_then_else (match_operator 1 "arm_comparison_operator"
7781 [(match_operand 2 "cc_register" "") (const_int 0)])
7783 (label_ref (match_operand 0 "" ""))))]
7786 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7788 arm_ccfsm_state += 2;
7791 return \"b%D1\\t%l0\";
7793 [(set_attr "conds" "use")
7794 (set_attr "type" "branch")]
7801 (define_expand "seq"
7802 [(set (match_operand:SI 0 "s_register_operand" "")
7803 (eq:SI (match_dup 1) (const_int 0)))]
7805 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7808 (define_expand "sne"
7809 [(set (match_operand:SI 0 "s_register_operand" "")
7810 (ne:SI (match_dup 1) (const_int 0)))]
7812 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7815 (define_expand "sgt"
7816 [(set (match_operand:SI 0 "s_register_operand" "")
7817 (gt:SI (match_dup 1) (const_int 0)))]
7819 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7822 (define_expand "sle"
7823 [(set (match_operand:SI 0 "s_register_operand" "")
7824 (le:SI (match_dup 1) (const_int 0)))]
7826 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7829 (define_expand "sge"
7830 [(set (match_operand:SI 0 "s_register_operand" "")
7831 (ge:SI (match_dup 1) (const_int 0)))]
7833 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7836 (define_expand "slt"
7837 [(set (match_operand:SI 0 "s_register_operand" "")
7838 (lt:SI (match_dup 1) (const_int 0)))]
7840 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7843 (define_expand "sgtu"
7844 [(set (match_operand:SI 0 "s_register_operand" "")
7845 (gtu:SI (match_dup 1) (const_int 0)))]
7847 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7850 (define_expand "sleu"
7851 [(set (match_operand:SI 0 "s_register_operand" "")
7852 (leu:SI (match_dup 1) (const_int 0)))]
7854 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7857 (define_expand "sgeu"
7858 [(set (match_operand:SI 0 "s_register_operand" "")
7859 (geu:SI (match_dup 1) (const_int 0)))]
7861 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7864 (define_expand "sltu"
7865 [(set (match_operand:SI 0 "s_register_operand" "")
7866 (ltu:SI (match_dup 1) (const_int 0)))]
7868 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7871 (define_expand "sunordered"
7872 [(set (match_operand:SI 0 "s_register_operand" "")
7873 (unordered:SI (match_dup 1) (const_int 0)))]
7874 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7875 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7879 (define_expand "sordered"
7880 [(set (match_operand:SI 0 "s_register_operand" "")
7881 (ordered:SI (match_dup 1) (const_int 0)))]
7882 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7883 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7887 (define_expand "sungt"
7888 [(set (match_operand:SI 0 "s_register_operand" "")
7889 (ungt:SI (match_dup 1) (const_int 0)))]
7890 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7891 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7895 (define_expand "sunge"
7896 [(set (match_operand:SI 0 "s_register_operand" "")
7897 (unge:SI (match_dup 1) (const_int 0)))]
7898 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7899 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7903 (define_expand "sunlt"
7904 [(set (match_operand:SI 0 "s_register_operand" "")
7905 (unlt:SI (match_dup 1) (const_int 0)))]
7906 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7907 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7911 (define_expand "sunle"
7912 [(set (match_operand:SI 0 "s_register_operand" "")
7913 (unle:SI (match_dup 1) (const_int 0)))]
7914 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7915 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7919 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7920 ;;; simple ARM instructions.
7922 ; (define_expand "suneq"
7923 ; [(set (match_operand:SI 0 "s_register_operand" "")
7924 ; (uneq:SI (match_dup 1) (const_int 0)))]
7925 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7926 ; "gcc_unreachable ();"
7929 ; (define_expand "sltgt"
7930 ; [(set (match_operand:SI 0 "s_register_operand" "")
7931 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7932 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7933 ; "gcc_unreachable ();"
7936 (define_insn "*mov_scc"
7937 [(set (match_operand:SI 0 "s_register_operand" "=r")
7938 (match_operator:SI 1 "arm_comparison_operator"
7939 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7941 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7942 [(set_attr "conds" "use")
7943 (set_attr "length" "8")]
7946 (define_insn "*mov_negscc"
7947 [(set (match_operand:SI 0 "s_register_operand" "=r")
7948 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7949 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7951 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7952 [(set_attr "conds" "use")
7953 (set_attr "length" "8")]
7956 (define_insn "*mov_notscc"
7957 [(set (match_operand:SI 0 "s_register_operand" "=r")
7958 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7959 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7961 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7962 [(set_attr "conds" "use")
7963 (set_attr "length" "8")]
7966 (define_expand "cstoresi4"
7967 [(set (match_operand:SI 0 "s_register_operand" "")
7968 (match_operator:SI 1 "arm_comparison_operator"
7969 [(match_operand:SI 2 "s_register_operand" "")
7970 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7973 rtx op3, scratch, scratch2;
7975 if (operands[3] == const0_rtx)
7977 switch (GET_CODE (operands[1]))
7980 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7984 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7988 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7989 NULL_RTX, 0, OPTAB_WIDEN);
7990 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7991 NULL_RTX, 0, OPTAB_WIDEN);
7992 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7993 operands[0], 1, OPTAB_WIDEN);
7997 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7999 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8000 NULL_RTX, 1, OPTAB_WIDEN);
8004 scratch = expand_binop (SImode, ashr_optab, operands[2],
8005 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8006 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8007 NULL_RTX, 0, OPTAB_WIDEN);
8008 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8012 /* LT is handled by generic code. No need for unsigned with 0. */
8019 switch (GET_CODE (operands[1]))
8022 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8023 NULL_RTX, 0, OPTAB_WIDEN);
8024 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8028 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8029 NULL_RTX, 0, OPTAB_WIDEN);
8030 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8034 op3 = force_reg (SImode, operands[3]);
8036 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8037 NULL_RTX, 1, OPTAB_WIDEN);
8038 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8039 NULL_RTX, 0, OPTAB_WIDEN);
8040 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8046 if (!thumb1_cmp_operand (op3, SImode))
8047 op3 = force_reg (SImode, op3);
8048 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8049 NULL_RTX, 0, OPTAB_WIDEN);
8050 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8051 NULL_RTX, 1, OPTAB_WIDEN);
8052 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8057 op3 = force_reg (SImode, operands[3]);
8058 scratch = force_reg (SImode, const0_rtx);
8059 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8065 if (!thumb1_cmp_operand (op3, SImode))
8066 op3 = force_reg (SImode, op3);
8067 scratch = force_reg (SImode, const0_rtx);
8068 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8074 if (!thumb1_cmp_operand (op3, SImode))
8075 op3 = force_reg (SImode, op3);
8076 scratch = gen_reg_rtx (SImode);
8077 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8078 emit_insn (gen_negsi2 (operands[0], scratch));
8082 op3 = force_reg (SImode, operands[3]);
8083 scratch = gen_reg_rtx (SImode);
8084 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8085 emit_insn (gen_negsi2 (operands[0], scratch));
8088 /* No good sequences for GT, LT. */
8095 (define_expand "cstoresi_eq0_thumb1"
8097 [(set (match_operand:SI 0 "s_register_operand" "")
8098 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8100 (clobber (match_dup:SI 2))])]
8102 "operands[2] = gen_reg_rtx (SImode);"
8105 (define_expand "cstoresi_ne0_thumb1"
8107 [(set (match_operand:SI 0 "s_register_operand" "")
8108 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8110 (clobber (match_dup:SI 2))])]
8112 "operands[2] = gen_reg_rtx (SImode);"
8115 (define_insn "*cstoresi_eq0_thumb1_insn"
8116 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8117 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8119 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8122 neg\\t%0, %1\;adc\\t%0, %0, %1
8123 neg\\t%2, %1\;adc\\t%0, %1, %2"
8124 [(set_attr "length" "4")]
8127 (define_insn "*cstoresi_ne0_thumb1_insn"
8128 [(set (match_operand:SI 0 "s_register_operand" "=l")
8129 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8131 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8133 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8134 [(set_attr "length" "4")]
8137 (define_insn "cstoresi_nltu_thumb1"
8138 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8139 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8140 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8142 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8143 [(set_attr "length" "4")]
8146 ;; Used as part of the expansion of thumb les sequence.
8147 (define_insn "thumb1_addsi3_addgeu"
8148 [(set (match_operand:SI 0 "s_register_operand" "=l")
8149 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8150 (match_operand:SI 2 "s_register_operand" "l"))
8151 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8152 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8154 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8155 [(set_attr "length" "4")]
8159 ;; Conditional move insns
8161 (define_expand "movsicc"
8162 [(set (match_operand:SI 0 "s_register_operand" "")
8163 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8164 (match_operand:SI 2 "arm_not_operand" "")
8165 (match_operand:SI 3 "arm_not_operand" "")))]
8169 enum rtx_code code = GET_CODE (operands[1]);
8172 if (code == UNEQ || code == LTGT)
8175 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8176 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8180 (define_expand "movsfcc"
8181 [(set (match_operand:SF 0 "s_register_operand" "")
8182 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8183 (match_operand:SF 2 "s_register_operand" "")
8184 (match_operand:SF 3 "nonmemory_operand" "")))]
8188 enum rtx_code code = GET_CODE (operands[1]);
8191 if (code == UNEQ || code == LTGT)
8194 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8195 Otherwise, ensure it is a valid FP add operand */
8196 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8197 || (!arm_float_add_operand (operands[3], SFmode)))
8198 operands[3] = force_reg (SFmode, operands[3]);
8200 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8201 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8205 (define_expand "movdfcc"
8206 [(set (match_operand:DF 0 "s_register_operand" "")
8207 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8208 (match_operand:DF 2 "s_register_operand" "")
8209 (match_operand:DF 3 "arm_float_add_operand" "")))]
8210 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8213 enum rtx_code code = GET_CODE (operands[1]);
8216 if (code == UNEQ || code == LTGT)
8219 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8220 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8224 (define_insn "*movsicc_insn"
8225 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8227 (match_operator 3 "arm_comparison_operator"
8228 [(match_operand 4 "cc_register" "") (const_int 0)])
8229 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8230 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8237 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8238 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8239 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8240 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8241 [(set_attr "length" "4,4,4,4,8,8,8,8")
8242 (set_attr "conds" "use")]
8245 (define_insn "*movsfcc_soft_insn"
8246 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8247 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8248 [(match_operand 4 "cc_register" "") (const_int 0)])
8249 (match_operand:SF 1 "s_register_operand" "0,r")
8250 (match_operand:SF 2 "s_register_operand" "r,0")))]
8251 "TARGET_ARM && TARGET_SOFT_FLOAT"
8255 [(set_attr "conds" "use")]
8259 ;; Jump and linkage insns
8261 (define_expand "jump"
8263 (label_ref (match_operand 0 "" "")))]
8268 (define_insn "*arm_jump"
8270 (label_ref (match_operand 0 "" "")))]
8274 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8276 arm_ccfsm_state += 2;
8279 return \"b%?\\t%l0\";
8282 [(set_attr "predicable" "yes")]
8285 (define_insn "*thumb_jump"
8287 (label_ref (match_operand 0 "" "")))]
8290 if (get_attr_length (insn) == 2)
8292 return \"bl\\t%l0\\t%@ far jump\";
8294 [(set (attr "far_jump")
8296 (eq_attr "length" "4")
8297 (const_string "yes")
8298 (const_string "no")))
8299 (set (attr "length")
8301 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8302 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8307 (define_expand "call"
8308 [(parallel [(call (match_operand 0 "memory_operand" "")
8309 (match_operand 1 "general_operand" ""))
8310 (use (match_operand 2 "" ""))
8311 (clobber (reg:SI LR_REGNUM))])]
8317 /* In an untyped call, we can get NULL for operand 2. */
8318 if (operands[2] == NULL_RTX)
8319 operands[2] = const0_rtx;
8321 /* Decide if we should generate indirect calls by loading the
8322 32-bit address of the callee into a register before performing the
8324 callee = XEXP (operands[0], 0);
8325 if (GET_CODE (callee) == SYMBOL_REF
8326 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8328 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8330 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8331 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8336 (define_expand "call_internal"
8337 [(parallel [(call (match_operand 0 "memory_operand" "")
8338 (match_operand 1 "general_operand" ""))
8339 (use (match_operand 2 "" ""))
8340 (clobber (reg:SI LR_REGNUM))])])
8342 (define_insn "*call_reg_armv5"
8343 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8344 (match_operand 1 "" ""))
8345 (use (match_operand 2 "" ""))
8346 (clobber (reg:SI LR_REGNUM))]
8347 "TARGET_ARM && arm_arch5"
8349 [(set_attr "type" "call")]
8352 (define_insn "*call_reg_arm"
8353 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8354 (match_operand 1 "" ""))
8355 (use (match_operand 2 "" ""))
8356 (clobber (reg:SI LR_REGNUM))]
8357 "TARGET_ARM && !arm_arch5"
8359 return output_call (operands);
8361 ;; length is worst case, normally it is only two
8362 [(set_attr "length" "12")
8363 (set_attr "type" "call")]
8366 (define_insn "*call_mem"
8367 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8368 (match_operand 1 "" ""))
8369 (use (match_operand 2 "" ""))
8370 (clobber (reg:SI LR_REGNUM))]
8373 return output_call_mem (operands);
8375 [(set_attr "length" "12")
8376 (set_attr "type" "call")]
8379 (define_insn "*call_reg_thumb1_v5"
8380 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8381 (match_operand 1 "" ""))
8382 (use (match_operand 2 "" ""))
8383 (clobber (reg:SI LR_REGNUM))]
8384 "TARGET_THUMB1 && arm_arch5"
8386 [(set_attr "length" "2")
8387 (set_attr "type" "call")]
8390 (define_insn "*call_reg_thumb1"
8391 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8392 (match_operand 1 "" ""))
8393 (use (match_operand 2 "" ""))
8394 (clobber (reg:SI LR_REGNUM))]
8395 "TARGET_THUMB1 && !arm_arch5"
8398 if (!TARGET_CALLER_INTERWORKING)
8399 return thumb_call_via_reg (operands[0]);
8400 else if (operands[1] == const0_rtx)
8401 return \"bl\\t%__interwork_call_via_%0\";
8402 else if (frame_pointer_needed)
8403 return \"bl\\t%__interwork_r7_call_via_%0\";
8405 return \"bl\\t%__interwork_r11_call_via_%0\";
8407 [(set_attr "type" "call")]
8410 (define_expand "call_value"
8411 [(parallel [(set (match_operand 0 "" "")
8412 (call (match_operand 1 "memory_operand" "")
8413 (match_operand 2 "general_operand" "")))
8414 (use (match_operand 3 "" ""))
8415 (clobber (reg:SI LR_REGNUM))])]
8421 /* In an untyped call, we can get NULL for operand 2. */
8422 if (operands[3] == 0)
8423 operands[3] = const0_rtx;
8425 /* Decide if we should generate indirect calls by loading the
8426 32-bit address of the callee into a register before performing the
8428 callee = XEXP (operands[1], 0);
8429 if (GET_CODE (callee) == SYMBOL_REF
8430 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8432 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8434 pat = gen_call_value_internal (operands[0], operands[1],
8435 operands[2], operands[3]);
8436 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8441 (define_expand "call_value_internal"
8442 [(parallel [(set (match_operand 0 "" "")
8443 (call (match_operand 1 "memory_operand" "")
8444 (match_operand 2 "general_operand" "")))
8445 (use (match_operand 3 "" ""))
8446 (clobber (reg:SI LR_REGNUM))])])
8448 (define_insn "*call_value_reg_armv5"
8449 [(set (match_operand 0 "" "")
8450 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8451 (match_operand 2 "" "")))
8452 (use (match_operand 3 "" ""))
8453 (clobber (reg:SI LR_REGNUM))]
8454 "TARGET_ARM && arm_arch5"
8456 [(set_attr "type" "call")]
8459 (define_insn "*call_value_reg_arm"
8460 [(set (match_operand 0 "" "")
8461 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8462 (match_operand 2 "" "")))
8463 (use (match_operand 3 "" ""))
8464 (clobber (reg:SI LR_REGNUM))]
8465 "TARGET_ARM && !arm_arch5"
8467 return output_call (&operands[1]);
8469 [(set_attr "length" "12")
8470 (set_attr "type" "call")]
8473 (define_insn "*call_value_mem"
8474 [(set (match_operand 0 "" "")
8475 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8476 (match_operand 2 "" "")))
8477 (use (match_operand 3 "" ""))
8478 (clobber (reg:SI LR_REGNUM))]
8479 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8481 return output_call_mem (&operands[1]);
8483 [(set_attr "length" "12")
8484 (set_attr "type" "call")]
8487 (define_insn "*call_value_reg_thumb1_v5"
8488 [(set (match_operand 0 "" "")
8489 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8490 (match_operand 2 "" "")))
8491 (use (match_operand 3 "" ""))
8492 (clobber (reg:SI LR_REGNUM))]
8493 "TARGET_THUMB1 && arm_arch5"
8495 [(set_attr "length" "2")
8496 (set_attr "type" "call")]
8499 (define_insn "*call_value_reg_thumb1"
8500 [(set (match_operand 0 "" "")
8501 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8502 (match_operand 2 "" "")))
8503 (use (match_operand 3 "" ""))
8504 (clobber (reg:SI LR_REGNUM))]
8505 "TARGET_THUMB1 && !arm_arch5"
8508 if (!TARGET_CALLER_INTERWORKING)
8509 return thumb_call_via_reg (operands[1]);
8510 else if (operands[2] == const0_rtx)
8511 return \"bl\\t%__interwork_call_via_%1\";
8512 else if (frame_pointer_needed)
8513 return \"bl\\t%__interwork_r7_call_via_%1\";
8515 return \"bl\\t%__interwork_r11_call_via_%1\";
8517 [(set_attr "type" "call")]
8520 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8521 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8523 (define_insn "*call_symbol"
8524 [(call (mem:SI (match_operand:SI 0 "" ""))
8525 (match_operand 1 "" ""))
8526 (use (match_operand 2 "" ""))
8527 (clobber (reg:SI LR_REGNUM))]
8529 && (GET_CODE (operands[0]) == SYMBOL_REF)
8530 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8533 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8535 [(set_attr "type" "call")]
8538 (define_insn "*call_value_symbol"
8539 [(set (match_operand 0 "" "")
8540 (call (mem:SI (match_operand:SI 1 "" ""))
8541 (match_operand:SI 2 "" "")))
8542 (use (match_operand 3 "" ""))
8543 (clobber (reg:SI LR_REGNUM))]
8545 && (GET_CODE (operands[1]) == SYMBOL_REF)
8546 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8549 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8551 [(set_attr "type" "call")]
8554 (define_insn "*call_insn"
8555 [(call (mem:SI (match_operand:SI 0 "" ""))
8556 (match_operand:SI 1 "" ""))
8557 (use (match_operand 2 "" ""))
8558 (clobber (reg:SI LR_REGNUM))]
8560 && GET_CODE (operands[0]) == SYMBOL_REF
8561 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8563 [(set_attr "length" "4")
8564 (set_attr "type" "call")]
8567 (define_insn "*call_value_insn"
8568 [(set (match_operand 0 "" "")
8569 (call (mem:SI (match_operand 1 "" ""))
8570 (match_operand 2 "" "")))
8571 (use (match_operand 3 "" ""))
8572 (clobber (reg:SI LR_REGNUM))]
8574 && GET_CODE (operands[1]) == SYMBOL_REF
8575 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8577 [(set_attr "length" "4")
8578 (set_attr "type" "call")]
8581 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8582 (define_expand "sibcall"
8583 [(parallel [(call (match_operand 0 "memory_operand" "")
8584 (match_operand 1 "general_operand" ""))
8586 (use (match_operand 2 "" ""))])]
8590 if (operands[2] == NULL_RTX)
8591 operands[2] = const0_rtx;
8595 (define_expand "sibcall_value"
8596 [(parallel [(set (match_operand 0 "" "")
8597 (call (match_operand 1 "memory_operand" "")
8598 (match_operand 2 "general_operand" "")))
8600 (use (match_operand 3 "" ""))])]
8604 if (operands[3] == NULL_RTX)
8605 operands[3] = const0_rtx;
8609 (define_insn "*sibcall_insn"
8610 [(call (mem:SI (match_operand:SI 0 "" "X"))
8611 (match_operand 1 "" ""))
8613 (use (match_operand 2 "" ""))]
8614 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8616 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8618 [(set_attr "type" "call")]
8621 (define_insn "*sibcall_value_insn"
8622 [(set (match_operand 0 "" "")
8623 (call (mem:SI (match_operand:SI 1 "" "X"))
8624 (match_operand 2 "" "")))
8626 (use (match_operand 3 "" ""))]
8627 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8629 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8631 [(set_attr "type" "call")]
8634 ;; Often the return insn will be the same as loading from memory, so set attr
8635 (define_insn "return"
8637 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8640 if (arm_ccfsm_state == 2)
8642 arm_ccfsm_state += 2;
8645 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8647 [(set_attr "type" "load1")
8648 (set_attr "length" "12")
8649 (set_attr "predicable" "yes")]
8652 (define_insn "*cond_return"
8654 (if_then_else (match_operator 0 "arm_comparison_operator"
8655 [(match_operand 1 "cc_register" "") (const_int 0)])
8658 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8661 if (arm_ccfsm_state == 2)
8663 arm_ccfsm_state += 2;
8666 return output_return_instruction (operands[0], TRUE, FALSE);
8668 [(set_attr "conds" "use")
8669 (set_attr "length" "12")
8670 (set_attr "type" "load1")]
8673 (define_insn "*cond_return_inverted"
8675 (if_then_else (match_operator 0 "arm_comparison_operator"
8676 [(match_operand 1 "cc_register" "") (const_int 0)])
8679 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8682 if (arm_ccfsm_state == 2)
8684 arm_ccfsm_state += 2;
8687 return output_return_instruction (operands[0], TRUE, TRUE);
8689 [(set_attr "conds" "use")
8690 (set_attr "length" "12")
8691 (set_attr "type" "load1")]
8694 ;; Generate a sequence of instructions to determine if the processor is
8695 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8698 (define_expand "return_addr_mask"
8700 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8702 (set (match_operand:SI 0 "s_register_operand" "")
8703 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8705 (const_int 67108860)))] ; 0x03fffffc
8708 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8711 (define_insn "*check_arch2"
8712 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8713 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8716 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8717 [(set_attr "length" "8")
8718 (set_attr "conds" "set")]
8721 ;; Call subroutine returning any type.
8723 (define_expand "untyped_call"
8724 [(parallel [(call (match_operand 0 "" "")
8726 (match_operand 1 "" "")
8727 (match_operand 2 "" "")])]
8732 rtx par = gen_rtx_PARALLEL (VOIDmode,
8733 rtvec_alloc (XVECLEN (operands[2], 0)));
8734 rtx addr = gen_reg_rtx (Pmode);
8738 emit_move_insn (addr, XEXP (operands[1], 0));
8739 mem = change_address (operands[1], BLKmode, addr);
8741 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8743 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8745 /* Default code only uses r0 as a return value, but we could
8746 be using anything up to 4 registers. */
8747 if (REGNO (src) == R0_REGNUM)
8748 src = gen_rtx_REG (TImode, R0_REGNUM);
8750 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8752 size += GET_MODE_SIZE (GET_MODE (src));
8755 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8760 for (i = 0; i < XVECLEN (par, 0); i++)
8762 HOST_WIDE_INT offset = 0;
8763 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8766 emit_move_insn (addr, plus_constant (addr, size));
8768 mem = change_address (mem, GET_MODE (reg), NULL);
8769 if (REGNO (reg) == R0_REGNUM)
8771 /* On thumb we have to use a write-back instruction. */
8772 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8773 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8774 size = TARGET_ARM ? 16 : 0;
8778 emit_move_insn (mem, reg);
8779 size = GET_MODE_SIZE (GET_MODE (reg));
8783 /* The optimizer does not know that the call sets the function value
8784 registers we stored in the result block. We avoid problems by
8785 claiming that all hard registers are used and clobbered at this
8787 emit_insn (gen_blockage ());
8793 (define_expand "untyped_return"
8794 [(match_operand:BLK 0 "memory_operand" "")
8795 (match_operand 1 "" "")]
8800 rtx addr = gen_reg_rtx (Pmode);
8804 emit_move_insn (addr, XEXP (operands[0], 0));
8805 mem = change_address (operands[0], BLKmode, addr);
8807 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8809 HOST_WIDE_INT offset = 0;
8810 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8813 emit_move_insn (addr, plus_constant (addr, size));
8815 mem = change_address (mem, GET_MODE (reg), NULL);
8816 if (REGNO (reg) == R0_REGNUM)
8818 /* On thumb we have to use a write-back instruction. */
8819 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8820 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8821 size = TARGET_ARM ? 16 : 0;
8825 emit_move_insn (reg, mem);
8826 size = GET_MODE_SIZE (GET_MODE (reg));
8830 /* Emit USE insns before the return. */
8831 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8832 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8834 /* Construct the return. */
8835 expand_naked_return ();
8841 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8842 ;; all of memory. This blocks insns from being moved across this point.
8844 (define_insn "blockage"
8845 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8848 [(set_attr "length" "0")
8849 (set_attr "type" "block")]
8852 (define_expand "casesi"
8853 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8854 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8855 (match_operand:SI 2 "const_int_operand" "") ; total range
8856 (match_operand:SI 3 "" "") ; table label
8857 (match_operand:SI 4 "" "")] ; Out of range label
8862 if (operands[1] != const0_rtx)
8864 reg = gen_reg_rtx (SImode);
8866 emit_insn (gen_addsi3 (reg, operands[0],
8867 GEN_INT (-INTVAL (operands[1]))));
8871 if (!const_ok_for_arm (INTVAL (operands[2])))
8872 operands[2] = force_reg (SImode, operands[2]);
8876 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8877 operands[3], operands[4]));
8881 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8882 operands[2], operands[3], operands[4]));
8886 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8887 operands[3], operands[4]));
8893 ;; The USE in this pattern is needed to tell flow analysis that this is
8894 ;; a CASESI insn. It has no other purpose.
8895 (define_insn "arm_casesi_internal"
8896 [(parallel [(set (pc)
8898 (leu (match_operand:SI 0 "s_register_operand" "r")
8899 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8900 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8901 (label_ref (match_operand 2 "" ""))))
8902 (label_ref (match_operand 3 "" ""))))
8903 (clobber (reg:CC CC_REGNUM))
8904 (use (label_ref (match_dup 2)))])]
8908 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8909 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8911 [(set_attr "conds" "clob")
8912 (set_attr "length" "12")]
8915 (define_expand "indirect_jump"
8917 (match_operand:SI 0 "s_register_operand" ""))]
8920 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8921 address and use bx. */
8925 tmp = gen_reg_rtx (SImode);
8926 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8932 ;; NB Never uses BX.
8933 (define_insn "*arm_indirect_jump"
8935 (match_operand:SI 0 "s_register_operand" "r"))]
8937 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8938 [(set_attr "predicable" "yes")]
8941 (define_insn "*load_indirect_jump"
8943 (match_operand:SI 0 "memory_operand" "m"))]
8945 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8946 [(set_attr "type" "load1")
8947 (set_attr "pool_range" "4096")
8948 (set_attr "neg_pool_range" "4084")
8949 (set_attr "predicable" "yes")]
8952 ;; NB Never uses BX.
8953 (define_insn "*thumb1_indirect_jump"
8955 (match_operand:SI 0 "register_operand" "l*r"))]
8958 [(set_attr "conds" "clob")
8959 (set_attr "length" "2")]
8969 if (TARGET_UNIFIED_ASM)
8972 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8973 return \"mov\\tr8, r8\";
8975 [(set (attr "length")
8976 (if_then_else (eq_attr "is_thumb" "yes")
8982 ;; Patterns to allow combination of arithmetic, cond code and shifts
8984 (define_insn "*arith_shiftsi"
8985 [(set (match_operand:SI 0 "s_register_operand" "=r")
8986 (match_operator:SI 1 "shiftable_operator"
8987 [(match_operator:SI 3 "shift_operator"
8988 [(match_operand:SI 4 "s_register_operand" "r")
8989 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8990 (match_operand:SI 2 "s_register_operand" "r")]))]
8992 "%i1%?\\t%0, %2, %4%S3"
8993 [(set_attr "predicable" "yes")
8994 (set_attr "shift" "4")
8995 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8996 (const_string "alu_shift")
8997 (const_string "alu_shift_reg")))]
9001 [(set (match_operand:SI 0 "s_register_operand" "")
9002 (match_operator:SI 1 "shiftable_operator"
9003 [(match_operator:SI 2 "shiftable_operator"
9004 [(match_operator:SI 3 "shift_operator"
9005 [(match_operand:SI 4 "s_register_operand" "")
9006 (match_operand:SI 5 "reg_or_int_operand" "")])
9007 (match_operand:SI 6 "s_register_operand" "")])
9008 (match_operand:SI 7 "arm_rhs_operand" "")]))
9009 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9012 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9015 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9018 (define_insn "*arith_shiftsi_compare0"
9019 [(set (reg:CC_NOOV CC_REGNUM)
9020 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9021 [(match_operator:SI 3 "shift_operator"
9022 [(match_operand:SI 4 "s_register_operand" "r")
9023 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9024 (match_operand:SI 2 "s_register_operand" "r")])
9026 (set (match_operand:SI 0 "s_register_operand" "=r")
9027 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9030 "%i1%.\\t%0, %2, %4%S3"
9031 [(set_attr "conds" "set")
9032 (set_attr "shift" "4")
9033 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9034 (const_string "alu_shift")
9035 (const_string "alu_shift_reg")))]
9038 (define_insn "*arith_shiftsi_compare0_scratch"
9039 [(set (reg:CC_NOOV CC_REGNUM)
9040 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9041 [(match_operator:SI 3 "shift_operator"
9042 [(match_operand:SI 4 "s_register_operand" "r")
9043 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9044 (match_operand:SI 2 "s_register_operand" "r")])
9046 (clobber (match_scratch:SI 0 "=r"))]
9048 "%i1%.\\t%0, %2, %4%S3"
9049 [(set_attr "conds" "set")
9050 (set_attr "shift" "4")
9051 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9052 (const_string "alu_shift")
9053 (const_string "alu_shift_reg")))]
9056 (define_insn "*sub_shiftsi"
9057 [(set (match_operand:SI 0 "s_register_operand" "=r")
9058 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9059 (match_operator:SI 2 "shift_operator"
9060 [(match_operand:SI 3 "s_register_operand" "r")
9061 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9063 "sub%?\\t%0, %1, %3%S2"
9064 [(set_attr "predicable" "yes")
9065 (set_attr "shift" "3")
9066 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9067 (const_string "alu_shift")
9068 (const_string "alu_shift_reg")))]
9071 (define_insn "*sub_shiftsi_compare0"
9072 [(set (reg:CC_NOOV CC_REGNUM)
9074 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9075 (match_operator:SI 2 "shift_operator"
9076 [(match_operand:SI 3 "s_register_operand" "r")
9077 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9079 (set (match_operand:SI 0 "s_register_operand" "=r")
9080 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9083 "sub%.\\t%0, %1, %3%S2"
9084 [(set_attr "conds" "set")
9085 (set_attr "shift" "3")
9086 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9087 (const_string "alu_shift")
9088 (const_string "alu_shift_reg")))]
9091 (define_insn "*sub_shiftsi_compare0_scratch"
9092 [(set (reg:CC_NOOV CC_REGNUM)
9094 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9095 (match_operator:SI 2 "shift_operator"
9096 [(match_operand:SI 3 "s_register_operand" "r")
9097 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9099 (clobber (match_scratch:SI 0 "=r"))]
9101 "sub%.\\t%0, %1, %3%S2"
9102 [(set_attr "conds" "set")
9103 (set_attr "shift" "3")
9104 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9105 (const_string "alu_shift")
9106 (const_string "alu_shift_reg")))]
9111 (define_insn "*and_scc"
9112 [(set (match_operand:SI 0 "s_register_operand" "=r")
9113 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9114 [(match_operand 3 "cc_register" "") (const_int 0)])
9115 (match_operand:SI 2 "s_register_operand" "r")))]
9117 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9118 [(set_attr "conds" "use")
9119 (set_attr "length" "8")]
9122 (define_insn "*ior_scc"
9123 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9124 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9125 [(match_operand 3 "cc_register" "") (const_int 0)])
9126 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9130 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9131 [(set_attr "conds" "use")
9132 (set_attr "length" "4,8")]
9135 (define_insn "*compare_scc"
9136 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9137 (match_operator:SI 1 "arm_comparison_operator"
9138 [(match_operand:SI 2 "s_register_operand" "r,r")
9139 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9140 (clobber (reg:CC CC_REGNUM))]
9143 if (operands[3] == const0_rtx)
9145 if (GET_CODE (operands[1]) == LT)
9146 return \"mov\\t%0, %2, lsr #31\";
9148 if (GET_CODE (operands[1]) == GE)
9149 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9151 if (GET_CODE (operands[1]) == EQ)
9152 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9155 if (GET_CODE (operands[1]) == NE)
9157 if (which_alternative == 1)
9158 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9159 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9161 if (which_alternative == 1)
9162 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9164 output_asm_insn (\"cmp\\t%2, %3\", operands);
9165 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9167 [(set_attr "conds" "clob")
9168 (set_attr "length" "12")]
9171 (define_insn "*cond_move"
9172 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9173 (if_then_else:SI (match_operator 3 "equality_operator"
9174 [(match_operator 4 "arm_comparison_operator"
9175 [(match_operand 5 "cc_register" "") (const_int 0)])
9177 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9178 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9181 if (GET_CODE (operands[3]) == NE)
9183 if (which_alternative != 1)
9184 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9185 if (which_alternative != 0)
9186 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9189 if (which_alternative != 0)
9190 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9191 if (which_alternative != 1)
9192 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9195 [(set_attr "conds" "use")
9196 (set_attr "length" "4,4,8")]
9199 (define_insn "*cond_arith"
9200 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9201 (match_operator:SI 5 "shiftable_operator"
9202 [(match_operator:SI 4 "arm_comparison_operator"
9203 [(match_operand:SI 2 "s_register_operand" "r,r")
9204 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9205 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9206 (clobber (reg:CC CC_REGNUM))]
9209 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9210 return \"%i5\\t%0, %1, %2, lsr #31\";
9212 output_asm_insn (\"cmp\\t%2, %3\", operands);
9213 if (GET_CODE (operands[5]) == AND)
9214 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9215 else if (GET_CODE (operands[5]) == MINUS)
9216 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9217 else if (which_alternative != 0)
9218 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9219 return \"%i5%d4\\t%0, %1, #1\";
9221 [(set_attr "conds" "clob")
9222 (set_attr "length" "12")]
9225 (define_insn "*cond_sub"
9226 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9227 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9228 (match_operator:SI 4 "arm_comparison_operator"
9229 [(match_operand:SI 2 "s_register_operand" "r,r")
9230 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9231 (clobber (reg:CC CC_REGNUM))]
9234 output_asm_insn (\"cmp\\t%2, %3\", operands);
9235 if (which_alternative != 0)
9236 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9237 return \"sub%d4\\t%0, %1, #1\";
9239 [(set_attr "conds" "clob")
9240 (set_attr "length" "8,12")]
9243 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9244 (define_insn "*cmp_ite0"
9245 [(set (match_operand 6 "dominant_cc_register" "")
9248 (match_operator 4 "arm_comparison_operator"
9249 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9250 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9251 (match_operator:SI 5 "arm_comparison_operator"
9252 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9253 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9259 static const char * const opcodes[4][2] =
9261 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9262 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9263 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9264 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9265 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9266 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9267 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9268 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9271 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9273 return opcodes[which_alternative][swap];
9275 [(set_attr "conds" "set")
9276 (set_attr "length" "8")]
9279 (define_insn "*cmp_ite1"
9280 [(set (match_operand 6 "dominant_cc_register" "")
9283 (match_operator 4 "arm_comparison_operator"
9284 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9285 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9286 (match_operator:SI 5 "arm_comparison_operator"
9287 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9288 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9294 static const char * const opcodes[4][2] =
9296 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9297 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9298 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9299 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9300 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9301 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9302 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9303 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9306 comparison_dominates_p (GET_CODE (operands[5]),
9307 reverse_condition (GET_CODE (operands[4])));
9309 return opcodes[which_alternative][swap];
9311 [(set_attr "conds" "set")
9312 (set_attr "length" "8")]
9315 (define_insn "*cmp_and"
9316 [(set (match_operand 6 "dominant_cc_register" "")
9319 (match_operator 4 "arm_comparison_operator"
9320 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9321 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9322 (match_operator:SI 5 "arm_comparison_operator"
9323 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9324 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9329 static const char *const opcodes[4][2] =
9331 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9332 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9333 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9334 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9335 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9336 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9337 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9338 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9341 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9343 return opcodes[which_alternative][swap];
9345 [(set_attr "conds" "set")
9346 (set_attr "predicable" "no")
9347 (set_attr "length" "8")]
9350 (define_insn "*cmp_ior"
9351 [(set (match_operand 6 "dominant_cc_register" "")
9354 (match_operator 4 "arm_comparison_operator"
9355 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9356 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9357 (match_operator:SI 5 "arm_comparison_operator"
9358 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9359 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9364 static const char *const opcodes[4][2] =
9366 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9367 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9368 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9369 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9370 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9371 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9372 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9373 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9376 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9378 return opcodes[which_alternative][swap];
9381 [(set_attr "conds" "set")
9382 (set_attr "length" "8")]
9385 (define_insn_and_split "*ior_scc_scc"
9386 [(set (match_operand:SI 0 "s_register_operand" "=r")
9387 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9388 [(match_operand:SI 1 "s_register_operand" "r")
9389 (match_operand:SI 2 "arm_add_operand" "rIL")])
9390 (match_operator:SI 6 "arm_comparison_operator"
9391 [(match_operand:SI 4 "s_register_operand" "r")
9392 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9393 (clobber (reg:CC CC_REGNUM))]
9395 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9398 "TARGET_ARM && reload_completed"
9402 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9403 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9405 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9407 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9410 [(set_attr "conds" "clob")
9411 (set_attr "length" "16")])
9413 ; If the above pattern is followed by a CMP insn, then the compare is
9414 ; redundant, since we can rework the conditional instruction that follows.
9415 (define_insn_and_split "*ior_scc_scc_cmp"
9416 [(set (match_operand 0 "dominant_cc_register" "")
9417 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9418 [(match_operand:SI 1 "s_register_operand" "r")
9419 (match_operand:SI 2 "arm_add_operand" "rIL")])
9420 (match_operator:SI 6 "arm_comparison_operator"
9421 [(match_operand:SI 4 "s_register_operand" "r")
9422 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9424 (set (match_operand:SI 7 "s_register_operand" "=r")
9425 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9426 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9429 "TARGET_ARM && reload_completed"
9433 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9434 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9436 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9438 [(set_attr "conds" "set")
9439 (set_attr "length" "16")])
9441 (define_insn_and_split "*and_scc_scc"
9442 [(set (match_operand:SI 0 "s_register_operand" "=r")
9443 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9444 [(match_operand:SI 1 "s_register_operand" "r")
9445 (match_operand:SI 2 "arm_add_operand" "rIL")])
9446 (match_operator:SI 6 "arm_comparison_operator"
9447 [(match_operand:SI 4 "s_register_operand" "r")
9448 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9449 (clobber (reg:CC CC_REGNUM))]
9451 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9454 "TARGET_ARM && reload_completed
9455 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9460 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9461 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9463 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9465 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9468 [(set_attr "conds" "clob")
9469 (set_attr "length" "16")])
9471 ; If the above pattern is followed by a CMP insn, then the compare is
9472 ; redundant, since we can rework the conditional instruction that follows.
9473 (define_insn_and_split "*and_scc_scc_cmp"
9474 [(set (match_operand 0 "dominant_cc_register" "")
9475 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9476 [(match_operand:SI 1 "s_register_operand" "r")
9477 (match_operand:SI 2 "arm_add_operand" "rIL")])
9478 (match_operator:SI 6 "arm_comparison_operator"
9479 [(match_operand:SI 4 "s_register_operand" "r")
9480 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9482 (set (match_operand:SI 7 "s_register_operand" "=r")
9483 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9484 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9487 "TARGET_ARM && reload_completed"
9491 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9492 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9494 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9496 [(set_attr "conds" "set")
9497 (set_attr "length" "16")])
9499 ;; If there is no dominance in the comparison, then we can still save an
9500 ;; instruction in the AND case, since we can know that the second compare
9501 ;; need only zero the value if false (if true, then the value is already
9503 (define_insn_and_split "*and_scc_scc_nodom"
9504 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9505 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9506 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9507 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9508 (match_operator:SI 6 "arm_comparison_operator"
9509 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9510 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9511 (clobber (reg:CC CC_REGNUM))]
9513 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9516 "TARGET_ARM && reload_completed"
9517 [(parallel [(set (match_dup 0)
9518 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9519 (clobber (reg:CC CC_REGNUM))])
9520 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9522 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9525 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9526 operands[4], operands[5]),
9528 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9530 [(set_attr "conds" "clob")
9531 (set_attr "length" "20")])
9534 [(set (reg:CC_NOOV CC_REGNUM)
9535 (compare:CC_NOOV (ior:SI
9536 (and:SI (match_operand:SI 0 "s_register_operand" "")
9538 (match_operator:SI 1 "comparison_operator"
9539 [(match_operand:SI 2 "s_register_operand" "")
9540 (match_operand:SI 3 "arm_add_operand" "")]))
9542 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9545 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9547 (set (reg:CC_NOOV CC_REGNUM)
9548 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9553 [(set (reg:CC_NOOV CC_REGNUM)
9554 (compare:CC_NOOV (ior:SI
9555 (match_operator:SI 1 "comparison_operator"
9556 [(match_operand:SI 2 "s_register_operand" "")
9557 (match_operand:SI 3 "arm_add_operand" "")])
9558 (and:SI (match_operand:SI 0 "s_register_operand" "")
9561 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9564 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9566 (set (reg:CC_NOOV CC_REGNUM)
9567 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9570 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9572 (define_insn "*negscc"
9573 [(set (match_operand:SI 0 "s_register_operand" "=r")
9574 (neg:SI (match_operator 3 "arm_comparison_operator"
9575 [(match_operand:SI 1 "s_register_operand" "r")
9576 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9577 (clobber (reg:CC CC_REGNUM))]
9580 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9581 return \"mov\\t%0, %1, asr #31\";
9583 if (GET_CODE (operands[3]) == NE)
9584 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9586 output_asm_insn (\"cmp\\t%1, %2\", operands);
9587 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9588 return \"mvn%d3\\t%0, #0\";
9590 [(set_attr "conds" "clob")
9591 (set_attr "length" "12")]
9594 (define_insn "movcond"
9595 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9597 (match_operator 5 "arm_comparison_operator"
9598 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9599 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9600 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9601 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9602 (clobber (reg:CC CC_REGNUM))]
9605 if (GET_CODE (operands[5]) == LT
9606 && (operands[4] == const0_rtx))
9608 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9610 if (operands[2] == const0_rtx)
9611 return \"and\\t%0, %1, %3, asr #31\";
9612 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9614 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9616 if (operands[1] == const0_rtx)
9617 return \"bic\\t%0, %2, %3, asr #31\";
9618 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9620 /* The only case that falls through to here is when both ops 1 & 2
9624 if (GET_CODE (operands[5]) == GE
9625 && (operands[4] == const0_rtx))
9627 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9629 if (operands[2] == const0_rtx)
9630 return \"bic\\t%0, %1, %3, asr #31\";
9631 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9633 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9635 if (operands[1] == const0_rtx)
9636 return \"and\\t%0, %2, %3, asr #31\";
9637 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9639 /* The only case that falls through to here is when both ops 1 & 2
9642 if (GET_CODE (operands[4]) == CONST_INT
9643 && !const_ok_for_arm (INTVAL (operands[4])))
9644 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9646 output_asm_insn (\"cmp\\t%3, %4\", operands);
9647 if (which_alternative != 0)
9648 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9649 if (which_alternative != 1)
9650 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9653 [(set_attr "conds" "clob")
9654 (set_attr "length" "8,8,12")]
9657 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9659 (define_insn "*ifcompare_plus_move"
9660 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9661 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9662 [(match_operand:SI 4 "s_register_operand" "r,r")
9663 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9665 (match_operand:SI 2 "s_register_operand" "r,r")
9666 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9667 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9668 (clobber (reg:CC CC_REGNUM))]
9671 [(set_attr "conds" "clob")
9672 (set_attr "length" "8,12")]
9675 (define_insn "*if_plus_move"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9678 (match_operator 4 "arm_comparison_operator"
9679 [(match_operand 5 "cc_register" "") (const_int 0)])
9681 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9682 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9683 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9687 sub%d4\\t%0, %2, #%n3
9688 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9689 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9690 [(set_attr "conds" "use")
9691 (set_attr "length" "4,4,8,8")
9692 (set_attr "type" "*,*,*,*")]
9695 (define_insn "*ifcompare_move_plus"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9697 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9698 [(match_operand:SI 4 "s_register_operand" "r,r")
9699 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9700 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9702 (match_operand:SI 2 "s_register_operand" "r,r")
9703 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9704 (clobber (reg:CC CC_REGNUM))]
9707 [(set_attr "conds" "clob")
9708 (set_attr "length" "8,12")]
9711 (define_insn "*if_move_plus"
9712 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9714 (match_operator 4 "arm_comparison_operator"
9715 [(match_operand 5 "cc_register" "") (const_int 0)])
9716 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9718 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9719 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9723 sub%D4\\t%0, %2, #%n3
9724 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9725 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9726 [(set_attr "conds" "use")
9727 (set_attr "length" "4,4,8,8")
9728 (set_attr "type" "*,*,*,*")]
9731 (define_insn "*ifcompare_arith_arith"
9732 [(set (match_operand:SI 0 "s_register_operand" "=r")
9733 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9734 [(match_operand:SI 5 "s_register_operand" "r")
9735 (match_operand:SI 6 "arm_add_operand" "rIL")])
9736 (match_operator:SI 8 "shiftable_operator"
9737 [(match_operand:SI 1 "s_register_operand" "r")
9738 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9739 (match_operator:SI 7 "shiftable_operator"
9740 [(match_operand:SI 3 "s_register_operand" "r")
9741 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9742 (clobber (reg:CC CC_REGNUM))]
9745 [(set_attr "conds" "clob")
9746 (set_attr "length" "12")]
9749 (define_insn "*if_arith_arith"
9750 [(set (match_operand:SI 0 "s_register_operand" "=r")
9751 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9752 [(match_operand 8 "cc_register" "") (const_int 0)])
9753 (match_operator:SI 6 "shiftable_operator"
9754 [(match_operand:SI 1 "s_register_operand" "r")
9755 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9756 (match_operator:SI 7 "shiftable_operator"
9757 [(match_operand:SI 3 "s_register_operand" "r")
9758 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9760 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9761 [(set_attr "conds" "use")
9762 (set_attr "length" "8")]
9765 (define_insn "*ifcompare_arith_move"
9766 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9767 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9768 [(match_operand:SI 2 "s_register_operand" "r,r")
9769 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9770 (match_operator:SI 7 "shiftable_operator"
9771 [(match_operand:SI 4 "s_register_operand" "r,r")
9772 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9773 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9774 (clobber (reg:CC CC_REGNUM))]
9777 /* If we have an operation where (op x 0) is the identity operation and
9778 the conditional operator is LT or GE and we are comparing against zero and
9779 everything is in registers then we can do this in two instructions. */
9780 if (operands[3] == const0_rtx
9781 && GET_CODE (operands[7]) != AND
9782 && GET_CODE (operands[5]) == REG
9783 && GET_CODE (operands[1]) == REG
9784 && REGNO (operands[1]) == REGNO (operands[4])
9785 && REGNO (operands[4]) != REGNO (operands[0]))
9787 if (GET_CODE (operands[6]) == LT)
9788 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9789 else if (GET_CODE (operands[6]) == GE)
9790 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9792 if (GET_CODE (operands[3]) == CONST_INT
9793 && !const_ok_for_arm (INTVAL (operands[3])))
9794 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9796 output_asm_insn (\"cmp\\t%2, %3\", operands);
9797 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9798 if (which_alternative != 0)
9799 return \"mov%D6\\t%0, %1\";
9802 [(set_attr "conds" "clob")
9803 (set_attr "length" "8,12")]
9806 (define_insn "*if_arith_move"
9807 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9808 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9809 [(match_operand 6 "cc_register" "") (const_int 0)])
9810 (match_operator:SI 5 "shiftable_operator"
9811 [(match_operand:SI 2 "s_register_operand" "r,r")
9812 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9813 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9817 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9818 [(set_attr "conds" "use")
9819 (set_attr "length" "4,8")
9820 (set_attr "type" "*,*")]
9823 (define_insn "*ifcompare_move_arith"
9824 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9825 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9826 [(match_operand:SI 4 "s_register_operand" "r,r")
9827 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9828 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9829 (match_operator:SI 7 "shiftable_operator"
9830 [(match_operand:SI 2 "s_register_operand" "r,r")
9831 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9832 (clobber (reg:CC CC_REGNUM))]
9835 /* If we have an operation where (op x 0) is the identity operation and
9836 the conditional operator is LT or GE and we are comparing against zero and
9837 everything is in registers then we can do this in two instructions */
9838 if (operands[5] == const0_rtx
9839 && GET_CODE (operands[7]) != AND
9840 && GET_CODE (operands[3]) == REG
9841 && GET_CODE (operands[1]) == REG
9842 && REGNO (operands[1]) == REGNO (operands[2])
9843 && REGNO (operands[2]) != REGNO (operands[0]))
9845 if (GET_CODE (operands[6]) == GE)
9846 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9847 else if (GET_CODE (operands[6]) == LT)
9848 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9851 if (GET_CODE (operands[5]) == CONST_INT
9852 && !const_ok_for_arm (INTVAL (operands[5])))
9853 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9855 output_asm_insn (\"cmp\\t%4, %5\", operands);
9857 if (which_alternative != 0)
9858 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9859 return \"%I7%D6\\t%0, %2, %3\";
9861 [(set_attr "conds" "clob")
9862 (set_attr "length" "8,12")]
9865 (define_insn "*if_move_arith"
9866 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9868 (match_operator 4 "arm_comparison_operator"
9869 [(match_operand 6 "cc_register" "") (const_int 0)])
9870 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9871 (match_operator:SI 5 "shiftable_operator"
9872 [(match_operand:SI 2 "s_register_operand" "r,r")
9873 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9877 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9878 [(set_attr "conds" "use")
9879 (set_attr "length" "4,8")
9880 (set_attr "type" "*,*")]
9883 (define_insn "*ifcompare_move_not"
9884 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9886 (match_operator 5 "arm_comparison_operator"
9887 [(match_operand:SI 3 "s_register_operand" "r,r")
9888 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9889 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9891 (match_operand:SI 2 "s_register_operand" "r,r"))))
9892 (clobber (reg:CC CC_REGNUM))]
9895 [(set_attr "conds" "clob")
9896 (set_attr "length" "8,12")]
9899 (define_insn "*if_move_not"
9900 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9902 (match_operator 4 "arm_comparison_operator"
9903 [(match_operand 3 "cc_register" "") (const_int 0)])
9904 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9905 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9909 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9910 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9911 [(set_attr "conds" "use")
9912 (set_attr "length" "4,8,8")]
9915 (define_insn "*ifcompare_not_move"
9916 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9918 (match_operator 5 "arm_comparison_operator"
9919 [(match_operand:SI 3 "s_register_operand" "r,r")
9920 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9922 (match_operand:SI 2 "s_register_operand" "r,r"))
9923 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9924 (clobber (reg:CC CC_REGNUM))]
9927 [(set_attr "conds" "clob")
9928 (set_attr "length" "8,12")]
9931 (define_insn "*if_not_move"
9932 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9934 (match_operator 4 "arm_comparison_operator"
9935 [(match_operand 3 "cc_register" "") (const_int 0)])
9936 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9937 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9941 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9942 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9943 [(set_attr "conds" "use")
9944 (set_attr "length" "4,8,8")]
9947 (define_insn "*ifcompare_shift_move"
9948 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9950 (match_operator 6 "arm_comparison_operator"
9951 [(match_operand:SI 4 "s_register_operand" "r,r")
9952 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9953 (match_operator:SI 7 "shift_operator"
9954 [(match_operand:SI 2 "s_register_operand" "r,r")
9955 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9956 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9957 (clobber (reg:CC CC_REGNUM))]
9960 [(set_attr "conds" "clob")
9961 (set_attr "length" "8,12")]
9964 (define_insn "*if_shift_move"
9965 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9967 (match_operator 5 "arm_comparison_operator"
9968 [(match_operand 6 "cc_register" "") (const_int 0)])
9969 (match_operator:SI 4 "shift_operator"
9970 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9971 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9972 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9976 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9977 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9978 [(set_attr "conds" "use")
9979 (set_attr "shift" "2")
9980 (set_attr "length" "4,8,8")
9981 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9982 (const_string "alu_shift")
9983 (const_string "alu_shift_reg")))]
9986 (define_insn "*ifcompare_move_shift"
9987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9989 (match_operator 6 "arm_comparison_operator"
9990 [(match_operand:SI 4 "s_register_operand" "r,r")
9991 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9992 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9993 (match_operator:SI 7 "shift_operator"
9994 [(match_operand:SI 2 "s_register_operand" "r,r")
9995 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9996 (clobber (reg:CC CC_REGNUM))]
9999 [(set_attr "conds" "clob")
10000 (set_attr "length" "8,12")]
10003 (define_insn "*if_move_shift"
10004 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10006 (match_operator 5 "arm_comparison_operator"
10007 [(match_operand 6 "cc_register" "") (const_int 0)])
10008 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10009 (match_operator:SI 4 "shift_operator"
10010 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10011 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10015 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10016 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10017 [(set_attr "conds" "use")
10018 (set_attr "shift" "2")
10019 (set_attr "length" "4,8,8")
10020 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10021 (const_string "alu_shift")
10022 (const_string "alu_shift_reg")))]
10025 (define_insn "*ifcompare_shift_shift"
10026 [(set (match_operand:SI 0 "s_register_operand" "=r")
10028 (match_operator 7 "arm_comparison_operator"
10029 [(match_operand:SI 5 "s_register_operand" "r")
10030 (match_operand:SI 6 "arm_add_operand" "rIL")])
10031 (match_operator:SI 8 "shift_operator"
10032 [(match_operand:SI 1 "s_register_operand" "r")
10033 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10034 (match_operator:SI 9 "shift_operator"
10035 [(match_operand:SI 3 "s_register_operand" "r")
10036 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10037 (clobber (reg:CC CC_REGNUM))]
10040 [(set_attr "conds" "clob")
10041 (set_attr "length" "12")]
10044 (define_insn "*if_shift_shift"
10045 [(set (match_operand:SI 0 "s_register_operand" "=r")
10047 (match_operator 5 "arm_comparison_operator"
10048 [(match_operand 8 "cc_register" "") (const_int 0)])
10049 (match_operator:SI 6 "shift_operator"
10050 [(match_operand:SI 1 "s_register_operand" "r")
10051 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10052 (match_operator:SI 7 "shift_operator"
10053 [(match_operand:SI 3 "s_register_operand" "r")
10054 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10056 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10057 [(set_attr "conds" "use")
10058 (set_attr "shift" "1")
10059 (set_attr "length" "8")
10060 (set (attr "type") (if_then_else
10061 (and (match_operand 2 "const_int_operand" "")
10062 (match_operand 4 "const_int_operand" ""))
10063 (const_string "alu_shift")
10064 (const_string "alu_shift_reg")))]
10067 (define_insn "*ifcompare_not_arith"
10068 [(set (match_operand:SI 0 "s_register_operand" "=r")
10070 (match_operator 6 "arm_comparison_operator"
10071 [(match_operand:SI 4 "s_register_operand" "r")
10072 (match_operand:SI 5 "arm_add_operand" "rIL")])
10073 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10074 (match_operator:SI 7 "shiftable_operator"
10075 [(match_operand:SI 2 "s_register_operand" "r")
10076 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10077 (clobber (reg:CC CC_REGNUM))]
10080 [(set_attr "conds" "clob")
10081 (set_attr "length" "12")]
10084 (define_insn "*if_not_arith"
10085 [(set (match_operand:SI 0 "s_register_operand" "=r")
10087 (match_operator 5 "arm_comparison_operator"
10088 [(match_operand 4 "cc_register" "") (const_int 0)])
10089 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10090 (match_operator:SI 6 "shiftable_operator"
10091 [(match_operand:SI 2 "s_register_operand" "r")
10092 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10094 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10095 [(set_attr "conds" "use")
10096 (set_attr "length" "8")]
10099 (define_insn "*ifcompare_arith_not"
10100 [(set (match_operand:SI 0 "s_register_operand" "=r")
10102 (match_operator 6 "arm_comparison_operator"
10103 [(match_operand:SI 4 "s_register_operand" "r")
10104 (match_operand:SI 5 "arm_add_operand" "rIL")])
10105 (match_operator:SI 7 "shiftable_operator"
10106 [(match_operand:SI 2 "s_register_operand" "r")
10107 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10108 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10109 (clobber (reg:CC CC_REGNUM))]
10112 [(set_attr "conds" "clob")
10113 (set_attr "length" "12")]
10116 (define_insn "*if_arith_not"
10117 [(set (match_operand:SI 0 "s_register_operand" "=r")
10119 (match_operator 5 "arm_comparison_operator"
10120 [(match_operand 4 "cc_register" "") (const_int 0)])
10121 (match_operator:SI 6 "shiftable_operator"
10122 [(match_operand:SI 2 "s_register_operand" "r")
10123 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10124 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10126 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10127 [(set_attr "conds" "use")
10128 (set_attr "length" "8")]
10131 (define_insn "*ifcompare_neg_move"
10132 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10134 (match_operator 5 "arm_comparison_operator"
10135 [(match_operand:SI 3 "s_register_operand" "r,r")
10136 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10137 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10138 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10139 (clobber (reg:CC CC_REGNUM))]
10142 [(set_attr "conds" "clob")
10143 (set_attr "length" "8,12")]
10146 (define_insn "*if_neg_move"
10147 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10149 (match_operator 4 "arm_comparison_operator"
10150 [(match_operand 3 "cc_register" "") (const_int 0)])
10151 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10152 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10155 rsb%d4\\t%0, %2, #0
10156 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10157 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10158 [(set_attr "conds" "use")
10159 (set_attr "length" "4,8,8")]
10162 (define_insn "*ifcompare_move_neg"
10163 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10165 (match_operator 5 "arm_comparison_operator"
10166 [(match_operand:SI 3 "s_register_operand" "r,r")
10167 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10168 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10169 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10170 (clobber (reg:CC CC_REGNUM))]
10173 [(set_attr "conds" "clob")
10174 (set_attr "length" "8,12")]
10177 (define_insn "*if_move_neg"
10178 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10180 (match_operator 4 "arm_comparison_operator"
10181 [(match_operand 3 "cc_register" "") (const_int 0)])
10182 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10183 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10186 rsb%D4\\t%0, %2, #0
10187 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10188 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10189 [(set_attr "conds" "use")
10190 (set_attr "length" "4,8,8")]
10193 (define_insn "*arith_adjacentmem"
10194 [(set (match_operand:SI 0 "s_register_operand" "=r")
10195 (match_operator:SI 1 "shiftable_operator"
10196 [(match_operand:SI 2 "memory_operand" "m")
10197 (match_operand:SI 3 "memory_operand" "m")]))
10198 (clobber (match_scratch:SI 4 "=r"))]
10199 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10205 HOST_WIDE_INT val1 = 0, val2 = 0;
10207 if (REGNO (operands[0]) > REGNO (operands[4]))
10209 ldm[1] = operands[4];
10210 ldm[2] = operands[0];
10214 ldm[1] = operands[0];
10215 ldm[2] = operands[4];
10218 base_reg = XEXP (operands[2], 0);
10220 if (!REG_P (base_reg))
10222 val1 = INTVAL (XEXP (base_reg, 1));
10223 base_reg = XEXP (base_reg, 0);
10226 if (!REG_P (XEXP (operands[3], 0)))
10227 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10229 arith[0] = operands[0];
10230 arith[3] = operands[1];
10244 if (val1 !=0 && val2 != 0)
10248 if (val1 == 4 || val2 == 4)
10249 /* Other val must be 8, since we know they are adjacent and neither
10251 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10252 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10254 ldm[0] = ops[0] = operands[4];
10256 ops[2] = GEN_INT (val1);
10257 output_add_immediate (ops);
10259 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10261 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10265 /* Offset is out of range for a single add, so use two ldr. */
10268 ops[2] = GEN_INT (val1);
10269 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10271 ops[2] = GEN_INT (val2);
10272 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10275 else if (val1 != 0)
10278 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10280 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10285 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10287 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10289 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10292 [(set_attr "length" "12")
10293 (set_attr "predicable" "yes")
10294 (set_attr "type" "load1")]
10297 ; This pattern is never tried by combine, so do it as a peephole
10300 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10301 (match_operand:SI 1 "arm_general_register_operand" ""))
10302 (set (reg:CC CC_REGNUM)
10303 (compare:CC (match_dup 1) (const_int 0)))]
10305 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10306 (set (match_dup 0) (match_dup 1))])]
10310 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10311 ; reversed, check that the memory references aren't volatile.
10314 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10315 (match_operand:SI 4 "memory_operand" "m"))
10316 (set (match_operand:SI 1 "s_register_operand" "=rk")
10317 (match_operand:SI 5 "memory_operand" "m"))
10318 (set (match_operand:SI 2 "s_register_operand" "=rk")
10319 (match_operand:SI 6 "memory_operand" "m"))
10320 (set (match_operand:SI 3 "s_register_operand" "=rk")
10321 (match_operand:SI 7 "memory_operand" "m"))]
10322 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10324 return emit_ldm_seq (operands, 4);
10329 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10330 (match_operand:SI 3 "memory_operand" "m"))
10331 (set (match_operand:SI 1 "s_register_operand" "=rk")
10332 (match_operand:SI 4 "memory_operand" "m"))
10333 (set (match_operand:SI 2 "s_register_operand" "=rk")
10334 (match_operand:SI 5 "memory_operand" "m"))]
10335 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10337 return emit_ldm_seq (operands, 3);
10342 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10343 (match_operand:SI 2 "memory_operand" "m"))
10344 (set (match_operand:SI 1 "s_register_operand" "=rk")
10345 (match_operand:SI 3 "memory_operand" "m"))]
10346 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10348 return emit_ldm_seq (operands, 2);
10353 [(set (match_operand:SI 4 "memory_operand" "=m")
10354 (match_operand:SI 0 "s_register_operand" "rk"))
10355 (set (match_operand:SI 5 "memory_operand" "=m")
10356 (match_operand:SI 1 "s_register_operand" "rk"))
10357 (set (match_operand:SI 6 "memory_operand" "=m")
10358 (match_operand:SI 2 "s_register_operand" "rk"))
10359 (set (match_operand:SI 7 "memory_operand" "=m")
10360 (match_operand:SI 3 "s_register_operand" "rk"))]
10361 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10363 return emit_stm_seq (operands, 4);
10368 [(set (match_operand:SI 3 "memory_operand" "=m")
10369 (match_operand:SI 0 "s_register_operand" "rk"))
10370 (set (match_operand:SI 4 "memory_operand" "=m")
10371 (match_operand:SI 1 "s_register_operand" "rk"))
10372 (set (match_operand:SI 5 "memory_operand" "=m")
10373 (match_operand:SI 2 "s_register_operand" "rk"))]
10374 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10376 return emit_stm_seq (operands, 3);
10381 [(set (match_operand:SI 2 "memory_operand" "=m")
10382 (match_operand:SI 0 "s_register_operand" "rk"))
10383 (set (match_operand:SI 3 "memory_operand" "=m")
10384 (match_operand:SI 1 "s_register_operand" "rk"))]
10385 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10387 return emit_stm_seq (operands, 2);
10392 [(set (match_operand:SI 0 "s_register_operand" "")
10393 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10395 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10396 [(match_operand:SI 3 "s_register_operand" "")
10397 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10398 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10400 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10401 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10406 ;; This split can be used because CC_Z mode implies that the following
10407 ;; branch will be an equality, or an unsigned inequality, so the sign
10408 ;; extension is not needed.
10411 [(set (reg:CC_Z CC_REGNUM)
10413 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10415 (match_operand 1 "const_int_operand" "")))
10416 (clobber (match_scratch:SI 2 ""))]
10418 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10419 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10420 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10421 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10423 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10426 ;; ??? Check the patterns above for Thumb-2 usefulness
10428 (define_expand "prologue"
10429 [(clobber (const_int 0))]
10432 arm_expand_prologue ();
10434 thumb1_expand_prologue ();
10439 (define_expand "epilogue"
10440 [(clobber (const_int 0))]
10443 if (crtl->calls_eh_return)
10444 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10446 thumb1_expand_epilogue ();
10447 else if (USE_RETURN_INSN (FALSE))
10449 emit_jump_insn (gen_return ());
10452 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10454 gen_rtx_RETURN (VOIDmode)),
10455 VUNSPEC_EPILOGUE));
10460 ;; Note - although unspec_volatile's USE all hard registers,
10461 ;; USEs are ignored after relaod has completed. Thus we need
10462 ;; to add an unspec of the link register to ensure that flow
10463 ;; does not think that it is unused by the sibcall branch that
10464 ;; will replace the standard function epilogue.
10465 (define_insn "sibcall_epilogue"
10466 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10467 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10470 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10471 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10472 return arm_output_epilogue (next_nonnote_insn (insn));
10474 ;; Length is absolute worst case
10475 [(set_attr "length" "44")
10476 (set_attr "type" "block")
10477 ;; We don't clobber the conditions, but the potential length of this
10478 ;; operation is sufficient to make conditionalizing the sequence
10479 ;; unlikely to be profitable.
10480 (set_attr "conds" "clob")]
10483 (define_insn "*epilogue_insns"
10484 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10488 return arm_output_epilogue (NULL);
10489 else /* TARGET_THUMB1 */
10490 return thumb_unexpanded_epilogue ();
10492 ; Length is absolute worst case
10493 [(set_attr "length" "44")
10494 (set_attr "type" "block")
10495 ;; We don't clobber the conditions, but the potential length of this
10496 ;; operation is sufficient to make conditionalizing the sequence
10497 ;; unlikely to be profitable.
10498 (set_attr "conds" "clob")]
10501 (define_expand "eh_epilogue"
10502 [(use (match_operand:SI 0 "register_operand" ""))
10503 (use (match_operand:SI 1 "register_operand" ""))
10504 (use (match_operand:SI 2 "register_operand" ""))]
10508 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10509 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10511 rtx ra = gen_rtx_REG (Pmode, 2);
10513 emit_move_insn (ra, operands[2]);
10516 /* This is a hack -- we may have crystalized the function type too
10518 cfun->machine->func_type = 0;
10522 ;; This split is only used during output to reduce the number of patterns
10523 ;; that need assembler instructions adding to them. We allowed the setting
10524 ;; of the conditions to be implicit during rtl generation so that
10525 ;; the conditional compare patterns would work. However this conflicts to
10526 ;; some extent with the conditional data operations, so we have to split them
10529 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10530 ;; conditional execution sufficient?
10533 [(set (match_operand:SI 0 "s_register_operand" "")
10534 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10535 [(match_operand 2 "" "") (match_operand 3 "" "")])
10537 (match_operand 4 "" "")))
10538 (clobber (reg:CC CC_REGNUM))]
10539 "TARGET_ARM && reload_completed"
10540 [(set (match_dup 5) (match_dup 6))
10541 (cond_exec (match_dup 7)
10542 (set (match_dup 0) (match_dup 4)))]
10545 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10546 operands[2], operands[3]);
10547 enum rtx_code rc = GET_CODE (operands[1]);
10549 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10550 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10551 if (mode == CCFPmode || mode == CCFPEmode)
10552 rc = reverse_condition_maybe_unordered (rc);
10554 rc = reverse_condition (rc);
10556 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10561 [(set (match_operand:SI 0 "s_register_operand" "")
10562 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10563 [(match_operand 2 "" "") (match_operand 3 "" "")])
10564 (match_operand 4 "" "")
10566 (clobber (reg:CC CC_REGNUM))]
10567 "TARGET_ARM && reload_completed"
10568 [(set (match_dup 5) (match_dup 6))
10569 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10570 (set (match_dup 0) (match_dup 4)))]
10573 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10574 operands[2], operands[3]);
10576 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10577 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10582 [(set (match_operand:SI 0 "s_register_operand" "")
10583 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10584 [(match_operand 2 "" "") (match_operand 3 "" "")])
10585 (match_operand 4 "" "")
10586 (match_operand 5 "" "")))
10587 (clobber (reg:CC CC_REGNUM))]
10588 "TARGET_ARM && reload_completed"
10589 [(set (match_dup 6) (match_dup 7))
10590 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10591 (set (match_dup 0) (match_dup 4)))
10592 (cond_exec (match_dup 8)
10593 (set (match_dup 0) (match_dup 5)))]
10596 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10597 operands[2], operands[3]);
10598 enum rtx_code rc = GET_CODE (operands[1]);
10600 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10601 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10602 if (mode == CCFPmode || mode == CCFPEmode)
10603 rc = reverse_condition_maybe_unordered (rc);
10605 rc = reverse_condition (rc);
10607 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10612 [(set (match_operand:SI 0 "s_register_operand" "")
10613 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10614 [(match_operand:SI 2 "s_register_operand" "")
10615 (match_operand:SI 3 "arm_add_operand" "")])
10616 (match_operand:SI 4 "arm_rhs_operand" "")
10618 (match_operand:SI 5 "s_register_operand" ""))))
10619 (clobber (reg:CC CC_REGNUM))]
10620 "TARGET_ARM && reload_completed"
10621 [(set (match_dup 6) (match_dup 7))
10622 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10623 (set (match_dup 0) (match_dup 4)))
10624 (cond_exec (match_dup 8)
10625 (set (match_dup 0) (not:SI (match_dup 5))))]
10628 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10629 operands[2], operands[3]);
10630 enum rtx_code rc = GET_CODE (operands[1]);
10632 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10633 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10634 if (mode == CCFPmode || mode == CCFPEmode)
10635 rc = reverse_condition_maybe_unordered (rc);
10637 rc = reverse_condition (rc);
10639 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10643 (define_insn "*cond_move_not"
10644 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10645 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10646 [(match_operand 3 "cc_register" "") (const_int 0)])
10647 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10649 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10653 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10654 [(set_attr "conds" "use")
10655 (set_attr "length" "4,8")]
10658 ;; The next two patterns occur when an AND operation is followed by a
10659 ;; scc insn sequence
10661 (define_insn "*sign_extract_onebit"
10662 [(set (match_operand:SI 0 "s_register_operand" "=r")
10663 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10665 (match_operand:SI 2 "const_int_operand" "n")))
10666 (clobber (reg:CC CC_REGNUM))]
10669 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10670 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10671 return \"mvnne\\t%0, #0\";
10673 [(set_attr "conds" "clob")
10674 (set_attr "length" "8")]
10677 (define_insn "*not_signextract_onebit"
10678 [(set (match_operand:SI 0 "s_register_operand" "=r")
10680 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10682 (match_operand:SI 2 "const_int_operand" "n"))))
10683 (clobber (reg:CC CC_REGNUM))]
10686 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10687 output_asm_insn (\"tst\\t%1, %2\", operands);
10688 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10689 return \"movne\\t%0, #0\";
10691 [(set_attr "conds" "clob")
10692 (set_attr "length" "12")]
10694 ;; ??? The above patterns need auditing for Thumb-2
10696 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10697 ;; expressions. For simplicity, the first register is also in the unspec
10699 (define_insn "*push_multi"
10700 [(match_parallel 2 "multi_register_push"
10701 [(set (match_operand:BLK 0 "memory_operand" "=m")
10702 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10703 UNSPEC_PUSH_MULT))])]
10707 int num_saves = XVECLEN (operands[2], 0);
10709 /* For the StrongARM at least it is faster to
10710 use STR to store only a single register.
10711 In Thumb mode always use push, and the assembler will pick
10712 something appropriate. */
10713 if (num_saves == 1 && TARGET_ARM)
10714 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10721 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10723 strcpy (pattern, \"push\\t{%1\");
10725 for (i = 1; i < num_saves; i++)
10727 strcat (pattern, \", %|\");
10729 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10732 strcat (pattern, \"}\");
10733 output_asm_insn (pattern, operands);
10738 [(set_attr "type" "store4")]
10741 (define_insn "stack_tie"
10742 [(set (mem:BLK (scratch))
10743 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10744 (match_operand:SI 1 "s_register_operand" "rk")]
10748 [(set_attr "length" "0")]
10751 ;; Similarly for the floating point registers
10752 (define_insn "*push_fp_multi"
10753 [(match_parallel 2 "multi_register_push"
10754 [(set (match_operand:BLK 0 "memory_operand" "=m")
10755 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10756 UNSPEC_PUSH_MULT))])]
10757 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10762 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10763 output_asm_insn (pattern, operands);
10766 [(set_attr "type" "f_store")]
10769 ;; Special patterns for dealing with the constant pool
10771 (define_insn "align_4"
10772 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10775 assemble_align (32);
10780 (define_insn "align_8"
10781 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10784 assemble_align (64);
10789 (define_insn "consttable_end"
10790 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10793 making_const_table = FALSE;
10798 (define_insn "consttable_1"
10799 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10802 making_const_table = TRUE;
10803 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10804 assemble_zeros (3);
10807 [(set_attr "length" "4")]
10810 (define_insn "consttable_2"
10811 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10814 making_const_table = TRUE;
10815 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10816 assemble_zeros (2);
10819 [(set_attr "length" "4")]
10822 (define_insn "consttable_4"
10823 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10827 making_const_table = TRUE;
10828 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10833 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10834 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10838 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10843 [(set_attr "length" "4")]
10846 (define_insn "consttable_8"
10847 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10851 making_const_table = TRUE;
10852 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10857 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10858 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10862 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10867 [(set_attr "length" "8")]
10870 (define_insn "consttable_16"
10871 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10875 making_const_table = TRUE;
10876 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10881 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10882 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10886 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10891 [(set_attr "length" "16")]
10894 ;; Miscellaneous Thumb patterns
10896 (define_expand "tablejump"
10897 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10898 (use (label_ref (match_operand 1 "" "")))])]
10903 /* Hopefully, CSE will eliminate this copy. */
10904 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10905 rtx reg2 = gen_reg_rtx (SImode);
10907 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10908 operands[0] = reg2;
10913 ;; NB never uses BX.
10914 (define_insn "*thumb1_tablejump"
10915 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10916 (use (label_ref (match_operand 1 "" "")))]
10919 [(set_attr "length" "2")]
10922 ;; V5 Instructions,
10924 (define_insn "clzsi2"
10925 [(set (match_operand:SI 0 "s_register_operand" "=r")
10926 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10927 "TARGET_32BIT && arm_arch5"
10929 [(set_attr "predicable" "yes")
10930 (set_attr "insn" "clz")])
10932 ;; V5E instructions.
10934 (define_insn "prefetch"
10935 [(prefetch (match_operand:SI 0 "address_operand" "p")
10936 (match_operand:SI 1 "" "")
10937 (match_operand:SI 2 "" ""))]
10938 "TARGET_32BIT && arm_arch5e"
10941 ;; General predication pattern
10944 [(match_operator 0 "arm_comparison_operator"
10945 [(match_operand 1 "cc_register" "")
10951 (define_insn "prologue_use"
10952 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10954 "%@ %0 needed for prologue"
10958 ;; Patterns for exception handling
10960 (define_expand "eh_return"
10961 [(use (match_operand 0 "general_operand" ""))]
10966 emit_insn (gen_arm_eh_return (operands[0]));
10968 emit_insn (gen_thumb_eh_return (operands[0]));
10973 ;; We can't expand this before we know where the link register is stored.
10974 (define_insn_and_split "arm_eh_return"
10975 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10977 (clobber (match_scratch:SI 1 "=&r"))]
10980 "&& reload_completed"
10984 arm_set_return_address (operands[0], operands[1]);
10989 (define_insn_and_split "thumb_eh_return"
10990 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10992 (clobber (match_scratch:SI 1 "=&l"))]
10995 "&& reload_completed"
10999 thumb_set_return_address (operands[0], operands[1]);
11007 (define_insn "load_tp_hard"
11008 [(set (match_operand:SI 0 "register_operand" "=r")
11009 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11011 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11012 [(set_attr "predicable" "yes")]
11015 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11016 (define_insn "load_tp_soft"
11017 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11018 (clobber (reg:SI LR_REGNUM))
11019 (clobber (reg:SI IP_REGNUM))
11020 (clobber (reg:CC CC_REGNUM))]
11022 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11023 [(set_attr "conds" "clob")]
11026 ;; Load the FPA co-processor patterns
11028 ;; Load the Maverick co-processor patterns
11029 (include "cirrus.md")
11030 ;; Vector bits common to IWMMXT and Neon
11031 (include "vec-common.md")
11032 ;; Load the Intel Wireless Multimedia Extension patterns
11033 (include "iwmmxt.md")
11034 ;; Load the VFP co-processor patterns
11036 ;; Thumb-2 patterns
11037 (include "thumb2.md")
11039 (include "neon.md")