1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
107 ;; UNSPEC_VOLATILE Usage:
110 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
112 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
113 ; instruction epilogue sequence that isn't expanded
114 ; into normal RTL. Used for both normal and sibcall
116 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
117 ; for inlined constants.
118 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
120 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
122 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
124 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
126 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
128 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
130 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
131 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
132 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
133 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
134 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
135 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
136 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
141 ;;---------------------------------------------------------------------------
144 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
145 ; generating ARM code. This is used to control the length of some insn
146 ; patterns that share the same RTL in both ARM and Thumb code.
147 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
149 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
150 ; scheduling decisions for the load unit and the multiplier.
151 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
153 ; IS_XSCALE is set to 'yes' when compiling for XScale.
154 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
156 ;; Operand number of an input operand that is shifted. Zero if the
157 ;; given instruction does not shift one of its input operands.
158 (define_attr "shift" "" (const_int 0))
160 ; Floating Point Unit. If we only have floating point emulation, then there
161 ; is no point in scheduling the floating point insns. (Well, for best
162 ; performance we should try and group them together).
163 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon,neon_fp16"
164 (const (symbol_ref "arm_fpu_attr")))
166 ; LENGTH of an instruction (in bytes)
167 (define_attr "length" "" (const_int 4))
169 ; POOL_RANGE is how far away from a constant pool entry that this insn
170 ; can be placed. If the distance is zero, then this insn will never
171 ; reference the pool.
172 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
173 ; before its address.
174 (define_attr "pool_range" "" (const_int 0))
175 (define_attr "neg_pool_range" "" (const_int 0))
177 ; An assembler sequence may clobber the condition codes without us knowing.
178 ; If such an insn references the pool, then we have no way of knowing how,
179 ; so use the most conservative value for pool_range.
180 (define_asm_attributes
181 [(set_attr "conds" "clob")
182 (set_attr "length" "4")
183 (set_attr "pool_range" "250")])
185 ;; The instruction used to implement a particular pattern. This
186 ;; information is used by pipeline descriptions to provide accurate
187 ;; scheduling information.
190 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
191 (const_string "other"))
193 ; TYPE attribute is used to detect floating point instructions which, if
194 ; running on a co-processor can run in parallel with other, basic instructions
195 ; If write-buffer scheduling is enabled then it can also be used in the
196 ; scheduling of writes.
198 ; Classification of each insn
199 ; Note: vfp.md has different meanings for some of these, and some further
200 ; types as well. See that file for details.
201 ; alu any alu instruction that doesn't hit memory or fp
202 ; regs or have a shifted source operand
203 ; alu_shift any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a constant
205 ; alu_shift_reg any data instruction that doesn't hit memory or fp
206 ; regs, but has a source operand shifted by a register value
207 ; mult a multiply instruction
208 ; block blockage insn, this blocks all functional units
209 ; float a floating point arithmetic operation (subject to expansion)
210 ; fdivd DFmode floating point division
211 ; fdivs SFmode floating point division
212 ; fmul Floating point multiply
213 ; ffmul Fast floating point multiply
214 ; farith Floating point arithmetic (4 cycle)
215 ; ffarith Fast floating point arithmetic (2 cycle)
216 ; float_em a floating point arithmetic operation that is normally emulated
217 ; even on a machine with an fpa.
218 ; f_load a floating point load from memory
219 ; f_store a floating point store to memory
220 ; f_load[sd] single/double load from memory
221 ; f_store[sd] single/double store to memory
222 ; f_flag a transfer of co-processor flags to the CPSR
223 ; f_mem_r a transfer of a floating point register to a real reg via mem
224 ; r_mem_f the reverse of f_mem_r
225 ; f_2_r fast transfer float to arm (no memory needed)
226 ; r_2_f fast transfer arm to float
227 ; f_cvt convert floating<->integral
229 ; call a subroutine call
230 ; load_byte load byte(s) from memory to arm registers
231 ; load1 load 1 word from memory to arm registers
232 ; load2 load 2 words from memory to arm registers
233 ; load3 load 3 words from memory to arm registers
234 ; load4 load 4 words from memory to arm registers
235 ; store store 1 word to memory from arm registers
236 ; store2 store 2 words
237 ; store3 store 3 words
238 ; store4 store 4 (or more) words
239 ; Additions for Cirrus Maverick co-processor:
240 ; mav_farith Floating point arithmetic (4 cycle)
241 ; mav_dmult Double multiplies (7 cycle)
245 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
247 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
248 (const_string "mult")
249 (const_string "alu")))
251 ; Load scheduling, set from the arm_ld_sched variable
252 ; initialized by arm_override_options()
253 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
255 ;; Classification of NEON instructions for scheduling purposes.
256 ;; Do not set this attribute and the "type" attribute together in
257 ;; any one instruction pattern.
258 (define_attr "neon_type"
269 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
270 neon_mul_qqq_8_16_32_ddd_32,\
271 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
272 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
274 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
275 neon_mla_qqq_32_qqd_32_scalar,\
276 neon_mul_ddd_16_scalar_32_16_long_scalar,\
277 neon_mul_qqd_32_scalar,\
278 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
283 neon_vqshl_vrshl_vqrshl_qqq,\
285 neon_fp_vadd_ddd_vabs_dd,\
286 neon_fp_vadd_qqq_vabs_qq,\
292 neon_fp_vmla_ddd_scalar,\
293 neon_fp_vmla_qqq_scalar,\
294 neon_fp_vrecps_vrsqrts_ddd,\
295 neon_fp_vrecps_vrsqrts_qqq,\
303 neon_vld2_2_regs_vld1_vld2_all_lanes,\
306 neon_vst1_1_2_regs_vst2_2_regs,\
308 neon_vst2_4_regs_vst3_vst4,\
310 neon_vld1_vld2_lane,\
311 neon_vld3_vld4_lane,\
312 neon_vst1_vst2_lane,\
313 neon_vst3_vst4_lane,\
314 neon_vld3_vld4_all_lanes,\
322 (const_string "none"))
324 ; condition codes: this one is used by final_prescan_insn to speed up
325 ; conditionalizing instructions. It saves having to scan the rtl to see if
326 ; it uses or alters the condition codes.
328 ; USE means that the condition codes are used by the insn in the process of
329 ; outputting code, this means (at present) that we can't use the insn in
332 ; SET means that the purpose of the insn is to set the condition codes in a
333 ; well defined manner.
335 ; CLOB means that the condition codes are altered in an undefined manner, if
336 ; they are altered at all
338 ; UNCONDITIONAL means the instions can not be conditionally executed.
340 ; NOCOND means that the condition codes are neither altered nor affect the
341 ; output of this insn
343 (define_attr "conds" "use,set,clob,unconditional,nocond"
344 (if_then_else (eq_attr "type" "call")
345 (const_string "clob")
346 (if_then_else (eq_attr "neon_type" "none")
347 (const_string "nocond")
348 (const_string "unconditional"))))
350 ; Predicable means that the insn can be conditionally executed based on
351 ; an automatically added predicate (additional patterns are generated by
352 ; gen...). We default to 'no' because no Thumb patterns match this rule
353 ; and not all ARM patterns do.
354 (define_attr "predicable" "no,yes" (const_string "no"))
356 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
357 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
358 ; suffer blockages enough to warrant modelling this (and it can adversely
359 ; affect the schedule).
360 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
362 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
363 ; to stall the processor. Used with model_wbuf above.
364 (define_attr "write_conflict" "no,yes"
365 (if_then_else (eq_attr "type"
366 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
368 (const_string "no")))
370 ; Classify the insns into those that take one cycle and those that take more
371 ; than one on the main cpu execution unit.
372 (define_attr "core_cycles" "single,multi"
373 (if_then_else (eq_attr "type"
374 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
375 (const_string "single")
376 (const_string "multi")))
378 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
379 ;; distant label. Only applicable to Thumb code.
380 (define_attr "far_jump" "yes,no" (const_string "no"))
383 ;; The number of machine instructions this pattern expands to.
384 ;; Used for Thumb-2 conditional execution.
385 (define_attr "ce_count" "" (const_int 1))
387 ;;---------------------------------------------------------------------------
390 ; A list of modes that are exactly 64 bits in size. We use this to expand
391 ; some splits that are the same for all modes when operating on ARM
393 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
395 ;;---------------------------------------------------------------------------
398 (include "predicates.md")
399 (include "constraints.md")
401 ;;---------------------------------------------------------------------------
402 ;; Pipeline descriptions
404 ;; Processor type. This is created automatically from arm-cores.def.
405 (include "arm-tune.md")
407 (define_attr "tune_cortexr4" "yes,no"
409 (eq_attr "tune" "cortexr4,cortexr4f")
411 (const_string "no"))))
413 ;; True if the generic scheduling description should be used.
415 (define_attr "generic_sched" "yes,no"
417 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
418 (eq_attr "tune_cortexr4" "yes"))
420 (const_string "yes"))))
422 (define_attr "generic_vfp" "yes,no"
424 (and (eq_attr "fpu" "vfp")
425 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
426 (eq_attr "tune_cortexr4" "no"))
428 (const_string "no"))))
430 (include "arm-generic.md")
431 (include "arm926ejs.md")
432 (include "arm1020e.md")
433 (include "arm1026ejs.md")
434 (include "arm1136jfs.md")
435 (include "cortex-a8.md")
436 (include "cortex-a9.md")
437 (include "cortex-r4.md")
438 (include "cortex-r4f.md")
442 ;;---------------------------------------------------------------------------
447 ;; Note: For DImode insns, there is normally no reason why operands should
448 ;; not be in the same register, what we don't want is for something being
449 ;; written to partially overlap something that is an input.
450 ;; Cirrus 64bit additions should not be split because we have a native
451 ;; 64bit addition instructions.
453 (define_expand "adddi3"
455 [(set (match_operand:DI 0 "s_register_operand" "")
456 (plus:DI (match_operand:DI 1 "s_register_operand" "")
457 (match_operand:DI 2 "s_register_operand" "")))
458 (clobber (reg:CC CC_REGNUM))])]
461 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
463 if (!cirrus_fp_register (operands[0], DImode))
464 operands[0] = force_reg (DImode, operands[0]);
465 if (!cirrus_fp_register (operands[1], DImode))
466 operands[1] = force_reg (DImode, operands[1]);
467 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
473 if (GET_CODE (operands[1]) != REG)
474 operands[1] = force_reg (DImode, operands[1]);
475 if (GET_CODE (operands[2]) != REG)
476 operands[2] = force_reg (DImode, operands[2]);
481 (define_insn "*thumb1_adddi3"
482 [(set (match_operand:DI 0 "register_operand" "=l")
483 (plus:DI (match_operand:DI 1 "register_operand" "%0")
484 (match_operand:DI 2 "register_operand" "l")))
485 (clobber (reg:CC CC_REGNUM))
488 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
489 [(set_attr "length" "4")]
492 (define_insn_and_split "*arm_adddi3"
493 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
494 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
495 (match_operand:DI 2 "s_register_operand" "r, 0")))
496 (clobber (reg:CC CC_REGNUM))]
497 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
499 "TARGET_32BIT && reload_completed"
500 [(parallel [(set (reg:CC_C CC_REGNUM)
501 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
503 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
504 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
505 (plus:SI (match_dup 4) (match_dup 5))))]
508 operands[3] = gen_highpart (SImode, operands[0]);
509 operands[0] = gen_lowpart (SImode, operands[0]);
510 operands[4] = gen_highpart (SImode, operands[1]);
511 operands[1] = gen_lowpart (SImode, operands[1]);
512 operands[5] = gen_highpart (SImode, operands[2]);
513 operands[2] = gen_lowpart (SImode, operands[2]);
515 [(set_attr "conds" "clob")
516 (set_attr "length" "8")]
519 (define_insn_and_split "*adddi_sesidi_di"
520 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
521 (plus:DI (sign_extend:DI
522 (match_operand:SI 2 "s_register_operand" "r,r"))
523 (match_operand:DI 1 "s_register_operand" "r,0")))
524 (clobber (reg:CC CC_REGNUM))]
525 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
527 "TARGET_32BIT && reload_completed"
528 [(parallel [(set (reg:CC_C CC_REGNUM)
529 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
531 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
532 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
533 (plus:SI (ashiftrt:SI (match_dup 2)
538 operands[3] = gen_highpart (SImode, operands[0]);
539 operands[0] = gen_lowpart (SImode, operands[0]);
540 operands[4] = gen_highpart (SImode, operands[1]);
541 operands[1] = gen_lowpart (SImode, operands[1]);
542 operands[2] = gen_lowpart (SImode, operands[2]);
544 [(set_attr "conds" "clob")
545 (set_attr "length" "8")]
548 (define_insn_and_split "*adddi_zesidi_di"
549 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
550 (plus:DI (zero_extend:DI
551 (match_operand:SI 2 "s_register_operand" "r,r"))
552 (match_operand:DI 1 "s_register_operand" "r,0")))
553 (clobber (reg:CC CC_REGNUM))]
554 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
556 "TARGET_32BIT && reload_completed"
557 [(parallel [(set (reg:CC_C CC_REGNUM)
558 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
560 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
561 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
562 (plus:SI (match_dup 4) (const_int 0))))]
565 operands[3] = gen_highpart (SImode, operands[0]);
566 operands[0] = gen_lowpart (SImode, operands[0]);
567 operands[4] = gen_highpart (SImode, operands[1]);
568 operands[1] = gen_lowpart (SImode, operands[1]);
569 operands[2] = gen_lowpart (SImode, operands[2]);
571 [(set_attr "conds" "clob")
572 (set_attr "length" "8")]
575 (define_expand "addsi3"
576 [(set (match_operand:SI 0 "s_register_operand" "")
577 (plus:SI (match_operand:SI 1 "s_register_operand" "")
578 (match_operand:SI 2 "reg_or_int_operand" "")))]
581 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
583 arm_split_constant (PLUS, SImode, NULL_RTX,
584 INTVAL (operands[2]), operands[0], operands[1],
585 optimize && can_create_pseudo_p ());
591 ; If there is a scratch available, this will be faster than synthesizing the
594 [(match_scratch:SI 3 "r")
595 (set (match_operand:SI 0 "arm_general_register_operand" "")
596 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
597 (match_operand:SI 2 "const_int_operand" "")))]
599 !(const_ok_for_arm (INTVAL (operands[2]))
600 || const_ok_for_arm (-INTVAL (operands[2])))
601 && const_ok_for_arm (~INTVAL (operands[2]))"
602 [(set (match_dup 3) (match_dup 2))
603 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
607 ;; The r/r/k alternative is required when reloading the address
608 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
609 ;; put the duplicated register first, and not try the commutative version.
610 (define_insn_and_split "*arm_addsi3"
611 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
612 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
613 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
623 && GET_CODE (operands[2]) == CONST_INT
624 && !(const_ok_for_arm (INTVAL (operands[2]))
625 || const_ok_for_arm (-INTVAL (operands[2])))
626 && (reload_completed || !arm_eliminable_register (operands[1]))"
627 [(clobber (const_int 0))]
629 arm_split_constant (PLUS, SImode, curr_insn,
630 INTVAL (operands[2]), operands[0],
634 [(set_attr "length" "4,4,4,4,4,16")
635 (set_attr "predicable" "yes")]
638 ;; Register group 'k' is a single register group containing only the stack
639 ;; register. Trying to reload it will always fail catastrophically,
640 ;; so never allow those alternatives to match if reloading is needed.
642 (define_insn_and_split "*thumb1_addsi3"
643 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
644 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
645 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
648 static const char * const asms[] =
650 \"add\\t%0, %0, %2\",
651 \"sub\\t%0, %0, #%n2\",
652 \"add\\t%0, %1, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %1, %2\",
656 \"add\\t%0, %1, %2\",
660 if ((which_alternative == 2 || which_alternative == 6)
661 && GET_CODE (operands[2]) == CONST_INT
662 && INTVAL (operands[2]) < 0)
663 return \"sub\\t%0, %1, #%n2\";
664 return asms[which_alternative];
666 "&& reload_completed && CONST_INT_P (operands[2])
667 && operands[1] != stack_pointer_rtx
668 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
669 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
670 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
672 HOST_WIDE_INT offset = INTVAL (operands[2]);
675 else if (offset < -255)
678 operands[3] = GEN_INT (offset);
679 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
681 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
684 ;; Reloading and elimination of the frame pointer can
685 ;; sometimes cause this optimization to be missed.
687 [(set (match_operand:SI 0 "arm_general_register_operand" "")
688 (match_operand:SI 1 "const_int_operand" ""))
690 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
692 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
693 && (INTVAL (operands[1]) & 3) == 0"
694 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
698 ;; ??? Make Thumb-2 variants which prefer low regs
699 (define_insn "*addsi3_compare0"
700 [(set (reg:CC_NOOV CC_REGNUM)
702 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
703 (match_operand:SI 2 "arm_add_operand" "rI,L"))
705 (set (match_operand:SI 0 "s_register_operand" "=r,r")
706 (plus:SI (match_dup 1) (match_dup 2)))]
710 sub%.\\t%0, %1, #%n2"
711 [(set_attr "conds" "set")]
714 (define_insn "*addsi3_compare0_scratch"
715 [(set (reg:CC_NOOV CC_REGNUM)
717 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
718 (match_operand:SI 1 "arm_add_operand" "rI,L"))
724 [(set_attr "conds" "set")]
727 (define_insn "*compare_negsi_si"
728 [(set (reg:CC_Z CC_REGNUM)
730 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
731 (match_operand:SI 1 "s_register_operand" "r")))]
734 [(set_attr "conds" "set")]
737 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
738 ;; addend is a constant.
739 (define_insn "*cmpsi2_addneg"
740 [(set (reg:CC CC_REGNUM)
742 (match_operand:SI 1 "s_register_operand" "r,r")
743 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
744 (set (match_operand:SI 0 "s_register_operand" "=r,r")
745 (plus:SI (match_dup 1)
746 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
747 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
750 add%.\\t%0, %1, #%n2"
751 [(set_attr "conds" "set")]
754 ;; Convert the sequence
756 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
760 ;; bcs dest ((unsigned)rn >= 1)
761 ;; similarly for the beq variant using bcc.
762 ;; This is a common looping idiom (while (n--))
764 [(set (match_operand:SI 0 "arm_general_register_operand" "")
765 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
767 (set (match_operand 2 "cc_register" "")
768 (compare (match_dup 0) (const_int -1)))
770 (if_then_else (match_operator 3 "equality_operator"
771 [(match_dup 2) (const_int 0)])
772 (match_operand 4 "" "")
773 (match_operand 5 "" "")))]
774 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
778 (match_dup 1) (const_int 1)))
779 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
781 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
784 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
785 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
788 operands[2], const0_rtx);"
791 ;; The next four insns work because they compare the result with one of
792 ;; the operands, and we know that the use of the condition code is
793 ;; either GEU or LTU, so we can use the carry flag from the addition
794 ;; instead of doing the compare a second time.
795 (define_insn "*addsi3_compare_op1"
796 [(set (reg:CC_C CC_REGNUM)
798 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
799 (match_operand:SI 2 "arm_add_operand" "rI,L"))
801 (set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
806 sub%.\\t%0, %1, #%n2"
807 [(set_attr "conds" "set")]
810 (define_insn "*addsi3_compare_op2"
811 [(set (reg:CC_C CC_REGNUM)
813 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
814 (match_operand:SI 2 "arm_add_operand" "rI,L"))
816 (set (match_operand:SI 0 "s_register_operand" "=r,r")
817 (plus:SI (match_dup 1) (match_dup 2)))]
821 sub%.\\t%0, %1, #%n2"
822 [(set_attr "conds" "set")]
825 (define_insn "*compare_addsi2_op0"
826 [(set (reg:CC_C CC_REGNUM)
828 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
829 (match_operand:SI 1 "arm_add_operand" "rI,L"))
835 [(set_attr "conds" "set")]
838 (define_insn "*compare_addsi2_op1"
839 [(set (reg:CC_C CC_REGNUM)
841 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
842 (match_operand:SI 1 "arm_add_operand" "rI,L"))
848 [(set_attr "conds" "set")]
851 (define_insn "*addsi3_carryin"
852 [(set (match_operand:SI 0 "s_register_operand" "=r")
853 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
854 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
855 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
858 [(set_attr "conds" "use")]
861 (define_insn "*addsi3_carryin_shift"
862 [(set (match_operand:SI 0 "s_register_operand" "=r")
863 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
865 (match_operator:SI 2 "shift_operator"
866 [(match_operand:SI 3 "s_register_operand" "r")
867 (match_operand:SI 4 "reg_or_int_operand" "rM")])
868 (match_operand:SI 1 "s_register_operand" "r"))))]
870 "adc%?\\t%0, %1, %3%S2"
871 [(set_attr "conds" "use")
872 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
873 (const_string "alu_shift")
874 (const_string "alu_shift_reg")))]
877 (define_insn "*addsi3_carryin_alt1"
878 [(set (match_operand:SI 0 "s_register_operand" "=r")
879 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
880 (match_operand:SI 2 "arm_rhs_operand" "rI"))
881 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
884 [(set_attr "conds" "use")]
887 (define_insn "*addsi3_carryin_alt2"
888 [(set (match_operand:SI 0 "s_register_operand" "=r")
889 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
890 (match_operand:SI 1 "s_register_operand" "r"))
891 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
894 [(set_attr "conds" "use")]
897 (define_insn "*addsi3_carryin_alt3"
898 [(set (match_operand:SI 0 "s_register_operand" "=r")
899 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
900 (match_operand:SI 2 "arm_rhs_operand" "rI"))
901 (match_operand:SI 1 "s_register_operand" "r")))]
904 [(set_attr "conds" "use")]
907 (define_expand "incscc"
908 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
909 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
910 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
911 (match_operand:SI 1 "s_register_operand" "0,?r")))]
916 (define_insn "*arm_incscc"
917 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
918 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
919 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
920 (match_operand:SI 1 "s_register_operand" "0,?r")))]
924 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
925 [(set_attr "conds" "use")
926 (set_attr "length" "4,8")]
929 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
931 [(set (match_operand:SI 0 "s_register_operand" "")
932 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
933 (match_operand:SI 2 "s_register_operand" ""))
935 (clobber (match_operand:SI 3 "s_register_operand" ""))]
937 [(set (match_dup 3) (match_dup 1))
938 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
940 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
943 (define_expand "addsf3"
944 [(set (match_operand:SF 0 "s_register_operand" "")
945 (plus:SF (match_operand:SF 1 "s_register_operand" "")
946 (match_operand:SF 2 "arm_float_add_operand" "")))]
947 "TARGET_32BIT && TARGET_HARD_FLOAT"
950 && !cirrus_fp_register (operands[2], SFmode))
951 operands[2] = force_reg (SFmode, operands[2]);
954 (define_expand "adddf3"
955 [(set (match_operand:DF 0 "s_register_operand" "")
956 (plus:DF (match_operand:DF 1 "s_register_operand" "")
957 (match_operand:DF 2 "arm_float_add_operand" "")))]
958 "TARGET_32BIT && TARGET_HARD_FLOAT"
961 && !cirrus_fp_register (operands[2], DFmode))
962 operands[2] = force_reg (DFmode, operands[2]);
965 (define_expand "subdi3"
967 [(set (match_operand:DI 0 "s_register_operand" "")
968 (minus:DI (match_operand:DI 1 "s_register_operand" "")
969 (match_operand:DI 2 "s_register_operand" "")))
970 (clobber (reg:CC CC_REGNUM))])]
973 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
975 && cirrus_fp_register (operands[0], DImode)
976 && cirrus_fp_register (operands[1], DImode))
978 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
984 if (GET_CODE (operands[1]) != REG)
985 operands[1] = force_reg (DImode, operands[1]);
986 if (GET_CODE (operands[2]) != REG)
987 operands[2] = force_reg (DImode, operands[2]);
992 (define_insn "*arm_subdi3"
993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
994 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
995 (match_operand:DI 2 "s_register_operand" "r,0,0")))
996 (clobber (reg:CC CC_REGNUM))]
998 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
999 [(set_attr "conds" "clob")
1000 (set_attr "length" "8")]
1003 (define_insn "*thumb_subdi3"
1004 [(set (match_operand:DI 0 "register_operand" "=l")
1005 (minus:DI (match_operand:DI 1 "register_operand" "0")
1006 (match_operand:DI 2 "register_operand" "l")))
1007 (clobber (reg:CC CC_REGNUM))]
1009 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1010 [(set_attr "length" "4")]
1013 (define_insn "*subdi_di_zesidi"
1014 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1015 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
1017 (match_operand:SI 2 "s_register_operand" "r,r"))))
1018 (clobber (reg:CC CC_REGNUM))]
1020 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1021 [(set_attr "conds" "clob")
1022 (set_attr "length" "8")]
1025 (define_insn "*subdi_di_sesidi"
1026 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1027 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1029 (match_operand:SI 2 "s_register_operand" "r,r"))))
1030 (clobber (reg:CC CC_REGNUM))]
1032 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1033 [(set_attr "conds" "clob")
1034 (set_attr "length" "8")]
1037 (define_insn "*subdi_zesidi_di"
1038 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1039 (minus:DI (zero_extend:DI
1040 (match_operand:SI 2 "s_register_operand" "r,r"))
1041 (match_operand:DI 1 "s_register_operand" "?r,0")))
1042 (clobber (reg:CC CC_REGNUM))]
1044 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1045 [(set_attr "conds" "clob")
1046 (set_attr "length" "8")]
1049 (define_insn "*subdi_sesidi_di"
1050 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1051 (minus:DI (sign_extend:DI
1052 (match_operand:SI 2 "s_register_operand" "r,r"))
1053 (match_operand:DI 1 "s_register_operand" "?r,0")))
1054 (clobber (reg:CC CC_REGNUM))]
1056 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1057 [(set_attr "conds" "clob")
1058 (set_attr "length" "8")]
1061 (define_insn "*subdi_zesidi_zesidi"
1062 [(set (match_operand:DI 0 "s_register_operand" "=r")
1063 (minus:DI (zero_extend:DI
1064 (match_operand:SI 1 "s_register_operand" "r"))
1066 (match_operand:SI 2 "s_register_operand" "r"))))
1067 (clobber (reg:CC CC_REGNUM))]
1069 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1070 [(set_attr "conds" "clob")
1071 (set_attr "length" "8")]
1074 (define_expand "subsi3"
1075 [(set (match_operand:SI 0 "s_register_operand" "")
1076 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1077 (match_operand:SI 2 "s_register_operand" "")))]
1080 if (GET_CODE (operands[1]) == CONST_INT)
1084 arm_split_constant (MINUS, SImode, NULL_RTX,
1085 INTVAL (operands[1]), operands[0],
1086 operands[2], optimize && can_create_pseudo_p ());
1089 else /* TARGET_THUMB1 */
1090 operands[1] = force_reg (SImode, operands[1]);
1095 (define_insn "*thumb1_subsi3_insn"
1096 [(set (match_operand:SI 0 "register_operand" "=l")
1097 (minus:SI (match_operand:SI 1 "register_operand" "l")
1098 (match_operand:SI 2 "register_operand" "l")))]
1101 [(set_attr "length" "2")]
1104 ; ??? Check Thumb-2 split length
1105 (define_insn_and_split "*arm_subsi3_insn"
1106 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1107 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1108 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1115 && GET_CODE (operands[1]) == CONST_INT
1116 && !const_ok_for_arm (INTVAL (operands[1]))"
1117 [(clobber (const_int 0))]
1119 arm_split_constant (MINUS, SImode, curr_insn,
1120 INTVAL (operands[1]), operands[0], operands[2], 0);
1123 [(set_attr "length" "4,4,16")
1124 (set_attr "predicable" "yes")]
1128 [(match_scratch:SI 3 "r")
1129 (set (match_operand:SI 0 "arm_general_register_operand" "")
1130 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1131 (match_operand:SI 2 "arm_general_register_operand" "")))]
1133 && !const_ok_for_arm (INTVAL (operands[1]))
1134 && const_ok_for_arm (~INTVAL (operands[1]))"
1135 [(set (match_dup 3) (match_dup 1))
1136 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1140 (define_insn "*subsi3_compare0"
1141 [(set (reg:CC_NOOV CC_REGNUM)
1143 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1144 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1146 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1147 (minus:SI (match_dup 1) (match_dup 2)))]
1152 [(set_attr "conds" "set")]
1155 (define_expand "decscc"
1156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1157 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1158 (match_operator:SI 2 "arm_comparison_operator"
1159 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1164 (define_insn "*arm_decscc"
1165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1166 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1167 (match_operator:SI 2 "arm_comparison_operator"
1168 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1172 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1173 [(set_attr "conds" "use")
1174 (set_attr "length" "*,8")]
1177 (define_expand "subsf3"
1178 [(set (match_operand:SF 0 "s_register_operand" "")
1179 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1180 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1181 "TARGET_32BIT && TARGET_HARD_FLOAT"
1183 if (TARGET_MAVERICK)
1185 if (!cirrus_fp_register (operands[1], SFmode))
1186 operands[1] = force_reg (SFmode, operands[1]);
1187 if (!cirrus_fp_register (operands[2], SFmode))
1188 operands[2] = force_reg (SFmode, operands[2]);
1192 (define_expand "subdf3"
1193 [(set (match_operand:DF 0 "s_register_operand" "")
1194 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1195 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1196 "TARGET_32BIT && TARGET_HARD_FLOAT"
1198 if (TARGET_MAVERICK)
1200 if (!cirrus_fp_register (operands[1], DFmode))
1201 operands[1] = force_reg (DFmode, operands[1]);
1202 if (!cirrus_fp_register (operands[2], DFmode))
1203 operands[2] = force_reg (DFmode, operands[2]);
1208 ;; Multiplication insns
1210 (define_expand "mulsi3"
1211 [(set (match_operand:SI 0 "s_register_operand" "")
1212 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1213 (match_operand:SI 1 "s_register_operand" "")))]
1218 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1219 (define_insn "*arm_mulsi3"
1220 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1221 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1222 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1223 "TARGET_32BIT && !arm_arch6"
1224 "mul%?\\t%0, %2, %1"
1225 [(set_attr "insn" "mul")
1226 (set_attr "predicable" "yes")]
1229 (define_insn "*arm_mulsi3_v6"
1230 [(set (match_operand:SI 0 "s_register_operand" "=r")
1231 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1232 (match_operand:SI 2 "s_register_operand" "r")))]
1233 "TARGET_32BIT && arm_arch6"
1234 "mul%?\\t%0, %1, %2"
1235 [(set_attr "insn" "mul")
1236 (set_attr "predicable" "yes")]
1239 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1240 ; 1 and 2; are the same, because reload will make operand 0 match
1241 ; operand 1 without realizing that this conflicts with operand 2. We fix
1242 ; this by adding another alternative to match this case, and then `reload'
1243 ; it ourselves. This alternative must come first.
1244 (define_insn "*thumb_mulsi3"
1245 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1246 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1247 (match_operand:SI 2 "register_operand" "l,l,l")))]
1248 "TARGET_THUMB1 && !arm_arch6"
1250 if (which_alternative < 2)
1251 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1253 return \"mul\\t%0, %2\";
1255 [(set_attr "length" "4,4,2")
1256 (set_attr "insn" "mul")]
1259 (define_insn "*thumb_mulsi3_v6"
1260 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1261 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1262 (match_operand:SI 2 "register_operand" "l,0,0")))]
1263 "TARGET_THUMB1 && arm_arch6"
1268 [(set_attr "length" "2")
1269 (set_attr "insn" "mul")]
1272 (define_insn "*mulsi3_compare0"
1273 [(set (reg:CC_NOOV CC_REGNUM)
1274 (compare:CC_NOOV (mult:SI
1275 (match_operand:SI 2 "s_register_operand" "r,r")
1276 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1278 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1279 (mult:SI (match_dup 2) (match_dup 1)))]
1280 "TARGET_ARM && !arm_arch6"
1281 "mul%.\\t%0, %2, %1"
1282 [(set_attr "conds" "set")
1283 (set_attr "insn" "muls")]
1286 (define_insn "*mulsi3_compare0_v6"
1287 [(set (reg:CC_NOOV CC_REGNUM)
1288 (compare:CC_NOOV (mult:SI
1289 (match_operand:SI 2 "s_register_operand" "r")
1290 (match_operand:SI 1 "s_register_operand" "r"))
1292 (set (match_operand:SI 0 "s_register_operand" "=r")
1293 (mult:SI (match_dup 2) (match_dup 1)))]
1294 "TARGET_ARM && arm_arch6 && optimize_size"
1295 "mul%.\\t%0, %2, %1"
1296 [(set_attr "conds" "set")
1297 (set_attr "insn" "muls")]
1300 (define_insn "*mulsi_compare0_scratch"
1301 [(set (reg:CC_NOOV CC_REGNUM)
1302 (compare:CC_NOOV (mult:SI
1303 (match_operand:SI 2 "s_register_operand" "r,r")
1304 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1306 (clobber (match_scratch:SI 0 "=&r,&r"))]
1307 "TARGET_ARM && !arm_arch6"
1308 "mul%.\\t%0, %2, %1"
1309 [(set_attr "conds" "set")
1310 (set_attr "insn" "muls")]
1313 (define_insn "*mulsi_compare0_scratch_v6"
1314 [(set (reg:CC_NOOV CC_REGNUM)
1315 (compare:CC_NOOV (mult:SI
1316 (match_operand:SI 2 "s_register_operand" "r")
1317 (match_operand:SI 1 "s_register_operand" "r"))
1319 (clobber (match_scratch:SI 0 "=r"))]
1320 "TARGET_ARM && arm_arch6 && optimize_size"
1321 "mul%.\\t%0, %2, %1"
1322 [(set_attr "conds" "set")
1323 (set_attr "insn" "muls")]
1326 ;; Unnamed templates to match MLA instruction.
1328 (define_insn "*mulsi3addsi"
1329 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1331 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1332 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1333 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1334 "TARGET_32BIT && !arm_arch6"
1335 "mla%?\\t%0, %2, %1, %3"
1336 [(set_attr "insn" "mla")
1337 (set_attr "predicable" "yes")]
1340 (define_insn "*mulsi3addsi_v6"
1341 [(set (match_operand:SI 0 "s_register_operand" "=r")
1343 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1344 (match_operand:SI 1 "s_register_operand" "r"))
1345 (match_operand:SI 3 "s_register_operand" "r")))]
1346 "TARGET_32BIT && arm_arch6"
1347 "mla%?\\t%0, %2, %1, %3"
1348 [(set_attr "insn" "mla")
1349 (set_attr "predicable" "yes")]
1352 (define_insn "*mulsi3addsi_compare0"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1356 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1357 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1358 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1360 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1363 "TARGET_ARM && arm_arch6"
1364 "mla%.\\t%0, %2, %1, %3"
1365 [(set_attr "conds" "set")
1366 (set_attr "insn" "mlas")]
1369 (define_insn "*mulsi3addsi_compare0_v6"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1373 (match_operand:SI 2 "s_register_operand" "r")
1374 (match_operand:SI 1 "s_register_operand" "r"))
1375 (match_operand:SI 3 "s_register_operand" "r"))
1377 (set (match_operand:SI 0 "s_register_operand" "=r")
1378 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1380 "TARGET_ARM && arm_arch6 && optimize_size"
1381 "mla%.\\t%0, %2, %1, %3"
1382 [(set_attr "conds" "set")
1383 (set_attr "insn" "mlas")]
1386 (define_insn "*mulsi3addsi_compare0_scratch"
1387 [(set (reg:CC_NOOV CC_REGNUM)
1390 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1391 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1392 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1394 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1395 "TARGET_ARM && !arm_arch6"
1396 "mla%.\\t%0, %2, %1, %3"
1397 [(set_attr "conds" "set")
1398 (set_attr "insn" "mlas")]
1401 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1402 [(set (reg:CC_NOOV CC_REGNUM)
1405 (match_operand:SI 2 "s_register_operand" "r")
1406 (match_operand:SI 1 "s_register_operand" "r"))
1407 (match_operand:SI 3 "s_register_operand" "r"))
1409 (clobber (match_scratch:SI 0 "=r"))]
1410 "TARGET_ARM && arm_arch6 && optimize_size"
1411 "mla%.\\t%0, %2, %1, %3"
1412 [(set_attr "conds" "set")
1413 (set_attr "insn" "mlas")]
1416 (define_insn "*mulsi3subsi"
1417 [(set (match_operand:SI 0 "s_register_operand" "=r")
1419 (match_operand:SI 3 "s_register_operand" "r")
1420 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1421 (match_operand:SI 1 "s_register_operand" "r"))))]
1422 "TARGET_32BIT && arm_arch_thumb2"
1423 "mls%?\\t%0, %2, %1, %3"
1424 [(set_attr "insn" "mla")
1425 (set_attr "predicable" "yes")]
1428 ;; Unnamed template to match long long multiply-accumulate (smlal)
1430 (define_insn "*mulsidi3adddi"
1431 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1434 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1435 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1436 (match_operand:DI 1 "s_register_operand" "0")))]
1437 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1438 "smlal%?\\t%Q0, %R0, %3, %2"
1439 [(set_attr "insn" "smlal")
1440 (set_attr "predicable" "yes")]
1443 (define_insn "*mulsidi3adddi_v6"
1444 [(set (match_operand:DI 0 "s_register_operand" "=r")
1447 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1448 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1449 (match_operand:DI 1 "s_register_operand" "0")))]
1450 "TARGET_32BIT && arm_arch6"
1451 "smlal%?\\t%Q0, %R0, %3, %2"
1452 [(set_attr "insn" "smlal")
1453 (set_attr "predicable" "yes")]
1456 ;; 32x32->64 widening multiply.
1457 ;; As with mulsi3, the only difference between the v3-5 and v6+
1458 ;; versions of these patterns is the requirement that the output not
1459 ;; overlap the inputs, but that still means we have to have a named
1460 ;; expander and two different starred insns.
1462 (define_expand "mulsidi3"
1463 [(set (match_operand:DI 0 "s_register_operand" "")
1465 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1466 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1467 "TARGET_32BIT && arm_arch3m"
1471 (define_insn "*mulsidi3_nov6"
1472 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1474 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1475 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1476 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1477 "smull%?\\t%Q0, %R0, %1, %2"
1478 [(set_attr "insn" "smull")
1479 (set_attr "predicable" "yes")]
1482 (define_insn "*mulsidi3_v6"
1483 [(set (match_operand:DI 0 "s_register_operand" "=r")
1485 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1486 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1487 "TARGET_32BIT && arm_arch6"
1488 "smull%?\\t%Q0, %R0, %1, %2"
1489 [(set_attr "insn" "smull")
1490 (set_attr "predicable" "yes")]
1493 (define_expand "umulsidi3"
1494 [(set (match_operand:DI 0 "s_register_operand" "")
1496 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1497 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1498 "TARGET_32BIT && arm_arch3m"
1502 (define_insn "*umulsidi3_nov6"
1503 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1505 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1506 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1507 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1508 "umull%?\\t%Q0, %R0, %1, %2"
1509 [(set_attr "insn" "umull")
1510 (set_attr "predicable" "yes")]
1513 (define_insn "*umulsidi3_v6"
1514 [(set (match_operand:DI 0 "s_register_operand" "=r")
1516 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1517 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1518 "TARGET_32BIT && arm_arch6"
1519 "umull%?\\t%Q0, %R0, %1, %2"
1520 [(set_attr "insn" "umull")
1521 (set_attr "predicable" "yes")]
1524 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1526 (define_insn "*umulsidi3adddi"
1527 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1530 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1531 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1532 (match_operand:DI 1 "s_register_operand" "0")))]
1533 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1534 "umlal%?\\t%Q0, %R0, %3, %2"
1535 [(set_attr "insn" "umlal")
1536 (set_attr "predicable" "yes")]
1539 (define_insn "*umulsidi3adddi_v6"
1540 [(set (match_operand:DI 0 "s_register_operand" "=r")
1543 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1544 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1545 (match_operand:DI 1 "s_register_operand" "0")))]
1546 "TARGET_32BIT && arm_arch6"
1547 "umlal%?\\t%Q0, %R0, %3, %2"
1548 [(set_attr "insn" "umlal")
1549 (set_attr "predicable" "yes")]
1552 (define_expand "smulsi3_highpart"
1554 [(set (match_operand:SI 0 "s_register_operand" "")
1558 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1559 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1561 (clobber (match_scratch:SI 3 ""))])]
1562 "TARGET_32BIT && arm_arch3m"
1566 (define_insn "*smulsi3_highpart_nov6"
1567 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1571 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1572 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1574 (clobber (match_scratch:SI 3 "=&r,&r"))]
1575 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1576 "smull%?\\t%3, %0, %2, %1"
1577 [(set_attr "insn" "smull")
1578 (set_attr "predicable" "yes")]
1581 (define_insn "*smulsi3_highpart_v6"
1582 [(set (match_operand:SI 0 "s_register_operand" "=r")
1586 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1587 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1589 (clobber (match_scratch:SI 3 "=r"))]
1590 "TARGET_32BIT && arm_arch6"
1591 "smull%?\\t%3, %0, %2, %1"
1592 [(set_attr "insn" "smull")
1593 (set_attr "predicable" "yes")]
1596 (define_expand "umulsi3_highpart"
1598 [(set (match_operand:SI 0 "s_register_operand" "")
1602 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1603 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1605 (clobber (match_scratch:SI 3 ""))])]
1606 "TARGET_32BIT && arm_arch3m"
1610 (define_insn "*umulsi3_highpart_nov6"
1611 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1615 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1616 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1618 (clobber (match_scratch:SI 3 "=&r,&r"))]
1619 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1620 "umull%?\\t%3, %0, %2, %1"
1621 [(set_attr "insn" "umull")
1622 (set_attr "predicable" "yes")]
1625 (define_insn "*umulsi3_highpart_v6"
1626 [(set (match_operand:SI 0 "s_register_operand" "=r")
1630 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1631 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1633 (clobber (match_scratch:SI 3 "=r"))]
1634 "TARGET_32BIT && arm_arch6"
1635 "umull%?\\t%3, %0, %2, %1"
1636 [(set_attr "insn" "umull")
1637 (set_attr "predicable" "yes")]
1640 (define_insn "mulhisi3"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (sign_extend:SI
1643 (match_operand:HI 1 "s_register_operand" "%r"))
1645 (match_operand:HI 2 "s_register_operand" "r"))))]
1646 "TARGET_DSP_MULTIPLY"
1647 "smulbb%?\\t%0, %1, %2"
1648 [(set_attr "insn" "smulxy")
1649 (set_attr "predicable" "yes")]
1652 (define_insn "*mulhisi3tb"
1653 [(set (match_operand:SI 0 "s_register_operand" "=r")
1654 (mult:SI (ashiftrt:SI
1655 (match_operand:SI 1 "s_register_operand" "r")
1658 (match_operand:HI 2 "s_register_operand" "r"))))]
1659 "TARGET_DSP_MULTIPLY"
1660 "smultb%?\\t%0, %1, %2"
1661 [(set_attr "insn" "smulxy")
1662 (set_attr "predicable" "yes")]
1665 (define_insn "*mulhisi3bt"
1666 [(set (match_operand:SI 0 "s_register_operand" "=r")
1667 (mult:SI (sign_extend:SI
1668 (match_operand:HI 1 "s_register_operand" "r"))
1670 (match_operand:SI 2 "s_register_operand" "r")
1672 "TARGET_DSP_MULTIPLY"
1673 "smulbt%?\\t%0, %1, %2"
1674 [(set_attr "insn" "smulxy")
1675 (set_attr "predicable" "yes")]
1678 (define_insn "*mulhisi3tt"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1680 (mult:SI (ashiftrt:SI
1681 (match_operand:SI 1 "s_register_operand" "r")
1684 (match_operand:SI 2 "s_register_operand" "r")
1686 "TARGET_DSP_MULTIPLY"
1687 "smultt%?\\t%0, %1, %2"
1688 [(set_attr "insn" "smulxy")
1689 (set_attr "predicable" "yes")]
1692 (define_insn "*mulhisi3addsi"
1693 [(set (match_operand:SI 0 "s_register_operand" "=r")
1694 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1695 (mult:SI (sign_extend:SI
1696 (match_operand:HI 2 "s_register_operand" "%r"))
1698 (match_operand:HI 3 "s_register_operand" "r")))))]
1699 "TARGET_DSP_MULTIPLY"
1700 "smlabb%?\\t%0, %2, %3, %1"
1701 [(set_attr "insn" "smlaxy")
1702 (set_attr "predicable" "yes")]
1705 (define_insn "*mulhidi3adddi"
1706 [(set (match_operand:DI 0 "s_register_operand" "=r")
1708 (match_operand:DI 1 "s_register_operand" "0")
1709 (mult:DI (sign_extend:DI
1710 (match_operand:HI 2 "s_register_operand" "%r"))
1712 (match_operand:HI 3 "s_register_operand" "r")))))]
1713 "TARGET_DSP_MULTIPLY"
1714 "smlalbb%?\\t%Q0, %R0, %2, %3"
1715 [(set_attr "insn" "smlalxy")
1716 (set_attr "predicable" "yes")])
1718 (define_expand "mulsf3"
1719 [(set (match_operand:SF 0 "s_register_operand" "")
1720 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1721 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1722 "TARGET_32BIT && TARGET_HARD_FLOAT"
1725 && !cirrus_fp_register (operands[2], SFmode))
1726 operands[2] = force_reg (SFmode, operands[2]);
1729 (define_expand "muldf3"
1730 [(set (match_operand:DF 0 "s_register_operand" "")
1731 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1732 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1733 "TARGET_32BIT && TARGET_HARD_FLOAT"
1736 && !cirrus_fp_register (operands[2], DFmode))
1737 operands[2] = force_reg (DFmode, operands[2]);
1742 (define_expand "divsf3"
1743 [(set (match_operand:SF 0 "s_register_operand" "")
1744 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1745 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1746 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1749 (define_expand "divdf3"
1750 [(set (match_operand:DF 0 "s_register_operand" "")
1751 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1752 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1753 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1758 (define_expand "modsf3"
1759 [(set (match_operand:SF 0 "s_register_operand" "")
1760 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1761 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1762 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1765 (define_expand "moddf3"
1766 [(set (match_operand:DF 0 "s_register_operand" "")
1767 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1768 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1769 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1772 ;; Boolean and,ior,xor insns
1774 ;; Split up double word logical operations
1776 ;; Split up simple DImode logical operations. Simply perform the logical
1777 ;; operation on the upper and lower halves of the registers.
1779 [(set (match_operand:DI 0 "s_register_operand" "")
1780 (match_operator:DI 6 "logical_binary_operator"
1781 [(match_operand:DI 1 "s_register_operand" "")
1782 (match_operand:DI 2 "s_register_operand" "")]))]
1783 "TARGET_32BIT && reload_completed
1784 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1785 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1786 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1789 operands[3] = gen_highpart (SImode, operands[0]);
1790 operands[0] = gen_lowpart (SImode, operands[0]);
1791 operands[4] = gen_highpart (SImode, operands[1]);
1792 operands[1] = gen_lowpart (SImode, operands[1]);
1793 operands[5] = gen_highpart (SImode, operands[2]);
1794 operands[2] = gen_lowpart (SImode, operands[2]);
1799 [(set (match_operand:DI 0 "s_register_operand" "")
1800 (match_operator:DI 6 "logical_binary_operator"
1801 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1802 (match_operand:DI 1 "s_register_operand" "")]))]
1803 "TARGET_32BIT && reload_completed"
1804 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1805 (set (match_dup 3) (match_op_dup:SI 6
1806 [(ashiftrt:SI (match_dup 2) (const_int 31))
1810 operands[3] = gen_highpart (SImode, operands[0]);
1811 operands[0] = gen_lowpart (SImode, operands[0]);
1812 operands[4] = gen_highpart (SImode, operands[1]);
1813 operands[1] = gen_lowpart (SImode, operands[1]);
1814 operands[5] = gen_highpart (SImode, operands[2]);
1815 operands[2] = gen_lowpart (SImode, operands[2]);
1819 ;; The zero extend of operand 2 means we can just copy the high part of
1820 ;; operand1 into operand0.
1822 [(set (match_operand:DI 0 "s_register_operand" "")
1824 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1825 (match_operand:DI 1 "s_register_operand" "")))]
1826 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1827 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1828 (set (match_dup 3) (match_dup 4))]
1831 operands[4] = gen_highpart (SImode, operands[1]);
1832 operands[3] = gen_highpart (SImode, operands[0]);
1833 operands[0] = gen_lowpart (SImode, operands[0]);
1834 operands[1] = gen_lowpart (SImode, operands[1]);
1838 ;; The zero extend of operand 2 means we can just copy the high part of
1839 ;; operand1 into operand0.
1841 [(set (match_operand:DI 0 "s_register_operand" "")
1843 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1844 (match_operand:DI 1 "s_register_operand" "")))]
1845 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1846 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1847 (set (match_dup 3) (match_dup 4))]
1850 operands[4] = gen_highpart (SImode, operands[1]);
1851 operands[3] = gen_highpart (SImode, operands[0]);
1852 operands[0] = gen_lowpart (SImode, operands[0]);
1853 operands[1] = gen_lowpart (SImode, operands[1]);
1857 (define_insn "anddi3"
1858 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1859 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1860 (match_operand:DI 2 "s_register_operand" "r,r")))]
1861 "TARGET_32BIT && ! TARGET_IWMMXT"
1863 [(set_attr "length" "8")]
1866 (define_insn_and_split "*anddi_zesidi_di"
1867 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1868 (and:DI (zero_extend:DI
1869 (match_operand:SI 2 "s_register_operand" "r,r"))
1870 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1873 "TARGET_32BIT && reload_completed"
1874 ; The zero extend of operand 2 clears the high word of the output
1876 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1877 (set (match_dup 3) (const_int 0))]
1880 operands[3] = gen_highpart (SImode, operands[0]);
1881 operands[0] = gen_lowpart (SImode, operands[0]);
1882 operands[1] = gen_lowpart (SImode, operands[1]);
1884 [(set_attr "length" "8")]
1887 (define_insn "*anddi_sesdi_di"
1888 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1889 (and:DI (sign_extend:DI
1890 (match_operand:SI 2 "s_register_operand" "r,r"))
1891 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1894 [(set_attr "length" "8")]
1897 (define_expand "andsi3"
1898 [(set (match_operand:SI 0 "s_register_operand" "")
1899 (and:SI (match_operand:SI 1 "s_register_operand" "")
1900 (match_operand:SI 2 "reg_or_int_operand" "")))]
1905 if (GET_CODE (operands[2]) == CONST_INT)
1907 arm_split_constant (AND, SImode, NULL_RTX,
1908 INTVAL (operands[2]), operands[0],
1909 operands[1], optimize && can_create_pseudo_p ());
1914 else /* TARGET_THUMB1 */
1916 if (GET_CODE (operands[2]) != CONST_INT)
1917 operands[2] = force_reg (SImode, operands[2]);
1922 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1924 operands[2] = force_reg (SImode,
1925 GEN_INT (~INTVAL (operands[2])));
1927 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1932 for (i = 9; i <= 31; i++)
1934 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1936 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1940 else if ((((HOST_WIDE_INT) 1) << i) - 1
1941 == ~INTVAL (operands[2]))
1943 rtx shift = GEN_INT (i);
1944 rtx reg = gen_reg_rtx (SImode);
1946 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1947 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1953 operands[2] = force_reg (SImode, operands[2]);
1959 ; ??? Check split length for Thumb-2
1960 (define_insn_and_split "*arm_andsi3_insn"
1961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1962 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1963 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1967 bic%?\\t%0, %1, #%B2
1970 && GET_CODE (operands[2]) == CONST_INT
1971 && !(const_ok_for_arm (INTVAL (operands[2]))
1972 || const_ok_for_arm (~INTVAL (operands[2])))"
1973 [(clobber (const_int 0))]
1975 arm_split_constant (AND, SImode, curr_insn,
1976 INTVAL (operands[2]), operands[0], operands[1], 0);
1979 [(set_attr "length" "4,4,16")
1980 (set_attr "predicable" "yes")]
1983 (define_insn "*thumb1_andsi3_insn"
1984 [(set (match_operand:SI 0 "register_operand" "=l")
1985 (and:SI (match_operand:SI 1 "register_operand" "%0")
1986 (match_operand:SI 2 "register_operand" "l")))]
1989 [(set_attr "length" "2")]
1992 (define_insn "*andsi3_compare0"
1993 [(set (reg:CC_NOOV CC_REGNUM)
1995 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1996 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1998 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1999 (and:SI (match_dup 1) (match_dup 2)))]
2003 bic%.\\t%0, %1, #%B2"
2004 [(set_attr "conds" "set")]
2007 (define_insn "*andsi3_compare0_scratch"
2008 [(set (reg:CC_NOOV CC_REGNUM)
2010 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2011 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2013 (clobber (match_scratch:SI 2 "=X,r"))]
2017 bic%.\\t%2, %0, #%B1"
2018 [(set_attr "conds" "set")]
2021 (define_insn "*zeroextractsi_compare0_scratch"
2022 [(set (reg:CC_NOOV CC_REGNUM)
2023 (compare:CC_NOOV (zero_extract:SI
2024 (match_operand:SI 0 "s_register_operand" "r")
2025 (match_operand 1 "const_int_operand" "n")
2026 (match_operand 2 "const_int_operand" "n"))
2029 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2030 && INTVAL (operands[1]) > 0
2031 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2032 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2034 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2035 << INTVAL (operands[2]));
2036 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2039 [(set_attr "conds" "set")]
2042 (define_insn_and_split "*ne_zeroextractsi"
2043 [(set (match_operand:SI 0 "s_register_operand" "=r")
2044 (ne:SI (zero_extract:SI
2045 (match_operand:SI 1 "s_register_operand" "r")
2046 (match_operand:SI 2 "const_int_operand" "n")
2047 (match_operand:SI 3 "const_int_operand" "n"))
2049 (clobber (reg:CC CC_REGNUM))]
2051 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2052 && INTVAL (operands[2]) > 0
2053 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2054 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2057 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2058 && INTVAL (operands[2]) > 0
2059 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2060 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2061 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2062 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2064 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2066 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2067 (match_dup 0) (const_int 1)))]
2069 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2070 << INTVAL (operands[3]));
2072 [(set_attr "conds" "clob")
2073 (set (attr "length")
2074 (if_then_else (eq_attr "is_thumb" "yes")
2079 (define_insn_and_split "*ne_zeroextractsi_shifted"
2080 [(set (match_operand:SI 0 "s_register_operand" "=r")
2081 (ne:SI (zero_extract:SI
2082 (match_operand:SI 1 "s_register_operand" "r")
2083 (match_operand:SI 2 "const_int_operand" "n")
2086 (clobber (reg:CC CC_REGNUM))]
2090 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2091 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2093 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2095 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2096 (match_dup 0) (const_int 1)))]
2098 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2100 [(set_attr "conds" "clob")
2101 (set_attr "length" "8")]
2104 (define_insn_and_split "*ite_ne_zeroextractsi"
2105 [(set (match_operand:SI 0 "s_register_operand" "=r")
2106 (if_then_else:SI (ne (zero_extract:SI
2107 (match_operand:SI 1 "s_register_operand" "r")
2108 (match_operand:SI 2 "const_int_operand" "n")
2109 (match_operand:SI 3 "const_int_operand" "n"))
2111 (match_operand:SI 4 "arm_not_operand" "rIK")
2113 (clobber (reg:CC CC_REGNUM))]
2115 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2116 && INTVAL (operands[2]) > 0
2117 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2118 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2119 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2122 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2123 && INTVAL (operands[2]) > 0
2124 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2125 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2126 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2127 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2128 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2130 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2132 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2133 (match_dup 0) (match_dup 4)))]
2135 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2136 << INTVAL (operands[3]));
2138 [(set_attr "conds" "clob")
2139 (set_attr "length" "8")]
2142 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2143 [(set (match_operand:SI 0 "s_register_operand" "=r")
2144 (if_then_else:SI (ne (zero_extract:SI
2145 (match_operand:SI 1 "s_register_operand" "r")
2146 (match_operand:SI 2 "const_int_operand" "n")
2149 (match_operand:SI 3 "arm_not_operand" "rIK")
2151 (clobber (reg:CC CC_REGNUM))]
2152 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2154 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2155 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2156 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2158 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2160 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2161 (match_dup 0) (match_dup 3)))]
2163 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2165 [(set_attr "conds" "clob")
2166 (set_attr "length" "8")]
2170 [(set (match_operand:SI 0 "s_register_operand" "")
2171 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2172 (match_operand:SI 2 "const_int_operand" "")
2173 (match_operand:SI 3 "const_int_operand" "")))
2174 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2176 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2177 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2179 HOST_WIDE_INT temp = INTVAL (operands[2]);
2181 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2182 operands[3] = GEN_INT (32 - temp);
2186 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2188 [(set (match_operand:SI 0 "s_register_operand" "")
2189 (match_operator:SI 1 "shiftable_operator"
2190 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2191 (match_operand:SI 3 "const_int_operand" "")
2192 (match_operand:SI 4 "const_int_operand" ""))
2193 (match_operand:SI 5 "s_register_operand" "")]))
2194 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2196 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2199 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2202 HOST_WIDE_INT temp = INTVAL (operands[3]);
2204 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2205 operands[4] = GEN_INT (32 - temp);
2210 [(set (match_operand:SI 0 "s_register_operand" "")
2211 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2212 (match_operand:SI 2 "const_int_operand" "")
2213 (match_operand:SI 3 "const_int_operand" "")))]
2215 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2216 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2218 HOST_WIDE_INT temp = INTVAL (operands[2]);
2220 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2221 operands[3] = GEN_INT (32 - temp);
2226 [(set (match_operand:SI 0 "s_register_operand" "")
2227 (match_operator:SI 1 "shiftable_operator"
2228 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2229 (match_operand:SI 3 "const_int_operand" "")
2230 (match_operand:SI 4 "const_int_operand" ""))
2231 (match_operand:SI 5 "s_register_operand" "")]))
2232 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2234 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2237 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2240 HOST_WIDE_INT temp = INTVAL (operands[3]);
2242 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2243 operands[4] = GEN_INT (32 - temp);
2247 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2248 ;;; represented by the bitfield, then this will produce incorrect results.
2249 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2250 ;;; which have a real bit-field insert instruction, the truncation happens
2251 ;;; in the bit-field insert instruction itself. Since arm does not have a
2252 ;;; bit-field insert instruction, we would have to emit code here to truncate
2253 ;;; the value before we insert. This loses some of the advantage of having
2254 ;;; this insv pattern, so this pattern needs to be reevalutated.
2256 (define_expand "insv"
2257 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2258 (match_operand:SI 1 "general_operand" "")
2259 (match_operand:SI 2 "general_operand" ""))
2260 (match_operand:SI 3 "reg_or_int_operand" ""))]
2261 "TARGET_ARM || arm_arch_thumb2"
2264 int start_bit = INTVAL (operands[2]);
2265 int width = INTVAL (operands[1]);
2266 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2267 rtx target, subtarget;
2269 if (arm_arch_thumb2)
2271 bool use_bfi = TRUE;
2273 if (GET_CODE (operands[3]) == CONST_INT)
2275 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2279 emit_insn (gen_insv_zero (operands[0], operands[1],
2284 /* See if the set can be done with a single orr instruction. */
2285 if (val == mask && const_ok_for_arm (val << start_bit))
2291 if (GET_CODE (operands[3]) != REG)
2292 operands[3] = force_reg (SImode, operands[3]);
2294 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2300 target = copy_rtx (operands[0]);
2301 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2302 subreg as the final target. */
2303 if (GET_CODE (target) == SUBREG)
2305 subtarget = gen_reg_rtx (SImode);
2306 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2307 < GET_MODE_SIZE (SImode))
2308 target = SUBREG_REG (target);
2313 if (GET_CODE (operands[3]) == CONST_INT)
2315 /* Since we are inserting a known constant, we may be able to
2316 reduce the number of bits that we have to clear so that
2317 the mask becomes simple. */
2318 /* ??? This code does not check to see if the new mask is actually
2319 simpler. It may not be. */
2320 rtx op1 = gen_reg_rtx (SImode);
2321 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2322 start of this pattern. */
2323 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2324 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2326 emit_insn (gen_andsi3 (op1, operands[0],
2327 gen_int_mode (~mask2, SImode)));
2328 emit_insn (gen_iorsi3 (subtarget, op1,
2329 gen_int_mode (op3_value << start_bit, SImode)));
2331 else if (start_bit == 0
2332 && !(const_ok_for_arm (mask)
2333 || const_ok_for_arm (~mask)))
2335 /* A Trick, since we are setting the bottom bits in the word,
2336 we can shift operand[3] up, operand[0] down, OR them together
2337 and rotate the result back again. This takes 3 insns, and
2338 the third might be mergeable into another op. */
2339 /* The shift up copes with the possibility that operand[3] is
2340 wider than the bitfield. */
2341 rtx op0 = gen_reg_rtx (SImode);
2342 rtx op1 = gen_reg_rtx (SImode);
2344 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2345 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2346 emit_insn (gen_iorsi3 (op1, op1, op0));
2347 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2349 else if ((width + start_bit == 32)
2350 && !(const_ok_for_arm (mask)
2351 || const_ok_for_arm (~mask)))
2353 /* Similar trick, but slightly less efficient. */
2355 rtx op0 = gen_reg_rtx (SImode);
2356 rtx op1 = gen_reg_rtx (SImode);
2358 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2359 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2360 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2361 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2365 rtx op0 = gen_int_mode (mask, SImode);
2366 rtx op1 = gen_reg_rtx (SImode);
2367 rtx op2 = gen_reg_rtx (SImode);
2369 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2371 rtx tmp = gen_reg_rtx (SImode);
2373 emit_insn (gen_movsi (tmp, op0));
2377 /* Mask out any bits in operand[3] that are not needed. */
2378 emit_insn (gen_andsi3 (op1, operands[3], op0));
2380 if (GET_CODE (op0) == CONST_INT
2381 && (const_ok_for_arm (mask << start_bit)
2382 || const_ok_for_arm (~(mask << start_bit))))
2384 op0 = gen_int_mode (~(mask << start_bit), SImode);
2385 emit_insn (gen_andsi3 (op2, operands[0], op0));
2389 if (GET_CODE (op0) == CONST_INT)
2391 rtx tmp = gen_reg_rtx (SImode);
2393 emit_insn (gen_movsi (tmp, op0));
2398 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2400 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2404 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2406 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2409 if (subtarget != target)
2411 /* If TARGET is still a SUBREG, then it must be wider than a word,
2412 so we must be careful only to set the subword we were asked to. */
2413 if (GET_CODE (target) == SUBREG)
2414 emit_move_insn (target, subtarget);
2416 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2423 (define_insn "insv_zero"
2424 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2425 (match_operand:SI 1 "const_int_operand" "M")
2426 (match_operand:SI 2 "const_int_operand" "M"))
2430 [(set_attr "length" "4")
2431 (set_attr "predicable" "yes")]
2434 (define_insn "insv_t2"
2435 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2436 (match_operand:SI 1 "const_int_operand" "M")
2437 (match_operand:SI 2 "const_int_operand" "M"))
2438 (match_operand:SI 3 "s_register_operand" "r"))]
2440 "bfi%?\t%0, %3, %2, %1"
2441 [(set_attr "length" "4")
2442 (set_attr "predicable" "yes")]
2445 ; constants for op 2 will never be given to these patterns.
2446 (define_insn_and_split "*anddi_notdi_di"
2447 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2448 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2449 (match_operand:DI 2 "s_register_operand" "0,r")))]
2452 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2453 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2454 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2457 operands[3] = gen_highpart (SImode, operands[0]);
2458 operands[0] = gen_lowpart (SImode, operands[0]);
2459 operands[4] = gen_highpart (SImode, operands[1]);
2460 operands[1] = gen_lowpart (SImode, operands[1]);
2461 operands[5] = gen_highpart (SImode, operands[2]);
2462 operands[2] = gen_lowpart (SImode, operands[2]);
2464 [(set_attr "length" "8")
2465 (set_attr "predicable" "yes")]
2468 (define_insn_and_split "*anddi_notzesidi_di"
2469 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2470 (and:DI (not:DI (zero_extend:DI
2471 (match_operand:SI 2 "s_register_operand" "r,r")))
2472 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2475 bic%?\\t%Q0, %Q1, %2
2477 ; (not (zero_extend ...)) allows us to just copy the high word from
2478 ; operand1 to operand0.
2481 && operands[0] != operands[1]"
2482 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2483 (set (match_dup 3) (match_dup 4))]
2486 operands[3] = gen_highpart (SImode, operands[0]);
2487 operands[0] = gen_lowpart (SImode, operands[0]);
2488 operands[4] = gen_highpart (SImode, operands[1]);
2489 operands[1] = gen_lowpart (SImode, operands[1]);
2491 [(set_attr "length" "4,8")
2492 (set_attr "predicable" "yes")]
2495 (define_insn_and_split "*anddi_notsesidi_di"
2496 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2497 (and:DI (not:DI (sign_extend:DI
2498 (match_operand:SI 2 "s_register_operand" "r,r")))
2499 (match_operand:DI 1 "s_register_operand" "0,r")))]
2502 "TARGET_32BIT && reload_completed"
2503 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2504 (set (match_dup 3) (and:SI (not:SI
2505 (ashiftrt:SI (match_dup 2) (const_int 31)))
2509 operands[3] = gen_highpart (SImode, operands[0]);
2510 operands[0] = gen_lowpart (SImode, operands[0]);
2511 operands[4] = gen_highpart (SImode, operands[1]);
2512 operands[1] = gen_lowpart (SImode, operands[1]);
2514 [(set_attr "length" "8")
2515 (set_attr "predicable" "yes")]
2518 (define_insn "andsi_notsi_si"
2519 [(set (match_operand:SI 0 "s_register_operand" "=r")
2520 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2521 (match_operand:SI 1 "s_register_operand" "r")))]
2523 "bic%?\\t%0, %1, %2"
2524 [(set_attr "predicable" "yes")]
2527 (define_insn "bicsi3"
2528 [(set (match_operand:SI 0 "register_operand" "=l")
2529 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2530 (match_operand:SI 2 "register_operand" "0")))]
2533 [(set_attr "length" "2")]
2536 (define_insn "andsi_not_shiftsi_si"
2537 [(set (match_operand:SI 0 "s_register_operand" "=r")
2538 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2539 [(match_operand:SI 2 "s_register_operand" "r")
2540 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2541 (match_operand:SI 1 "s_register_operand" "r")))]
2543 "bic%?\\t%0, %1, %2%S4"
2544 [(set_attr "predicable" "yes")
2545 (set_attr "shift" "2")
2546 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2547 (const_string "alu_shift")
2548 (const_string "alu_shift_reg")))]
2551 (define_insn "*andsi_notsi_si_compare0"
2552 [(set (reg:CC_NOOV CC_REGNUM)
2554 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2555 (match_operand:SI 1 "s_register_operand" "r"))
2557 (set (match_operand:SI 0 "s_register_operand" "=r")
2558 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2560 "bic%.\\t%0, %1, %2"
2561 [(set_attr "conds" "set")]
2564 (define_insn "*andsi_notsi_si_compare0_scratch"
2565 [(set (reg:CC_NOOV CC_REGNUM)
2567 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2568 (match_operand:SI 1 "s_register_operand" "r"))
2570 (clobber (match_scratch:SI 0 "=r"))]
2572 "bic%.\\t%0, %1, %2"
2573 [(set_attr "conds" "set")]
2576 (define_insn "iordi3"
2577 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2578 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2579 (match_operand:DI 2 "s_register_operand" "r,r")))]
2580 "TARGET_32BIT && ! TARGET_IWMMXT"
2582 [(set_attr "length" "8")
2583 (set_attr "predicable" "yes")]
2586 (define_insn "*iordi_zesidi_di"
2587 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2588 (ior:DI (zero_extend:DI
2589 (match_operand:SI 2 "s_register_operand" "r,r"))
2590 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2593 orr%?\\t%Q0, %Q1, %2
2595 [(set_attr "length" "4,8")
2596 (set_attr "predicable" "yes")]
2599 (define_insn "*iordi_sesidi_di"
2600 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2601 (ior:DI (sign_extend:DI
2602 (match_operand:SI 2 "s_register_operand" "r,r"))
2603 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2606 [(set_attr "length" "8")
2607 (set_attr "predicable" "yes")]
2610 (define_expand "iorsi3"
2611 [(set (match_operand:SI 0 "s_register_operand" "")
2612 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2613 (match_operand:SI 2 "reg_or_int_operand" "")))]
2616 if (GET_CODE (operands[2]) == CONST_INT)
2620 arm_split_constant (IOR, SImode, NULL_RTX,
2621 INTVAL (operands[2]), operands[0], operands[1],
2622 optimize && can_create_pseudo_p ());
2625 else /* TARGET_THUMB1 */
2626 operands [2] = force_reg (SImode, operands [2]);
2631 (define_insn_and_split "*arm_iorsi3"
2632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2633 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2634 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2640 && GET_CODE (operands[2]) == CONST_INT
2641 && !const_ok_for_arm (INTVAL (operands[2]))"
2642 [(clobber (const_int 0))]
2644 arm_split_constant (IOR, SImode, curr_insn,
2645 INTVAL (operands[2]), operands[0], operands[1], 0);
2648 [(set_attr "length" "4,16")
2649 (set_attr "predicable" "yes")]
2652 (define_insn "*thumb1_iorsi3"
2653 [(set (match_operand:SI 0 "register_operand" "=l")
2654 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2655 (match_operand:SI 2 "register_operand" "l")))]
2658 [(set_attr "length" "2")]
2662 [(match_scratch:SI 3 "r")
2663 (set (match_operand:SI 0 "arm_general_register_operand" "")
2664 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2665 (match_operand:SI 2 "const_int_operand" "")))]
2667 && !const_ok_for_arm (INTVAL (operands[2]))
2668 && const_ok_for_arm (~INTVAL (operands[2]))"
2669 [(set (match_dup 3) (match_dup 2))
2670 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2674 (define_insn "*iorsi3_compare0"
2675 [(set (reg:CC_NOOV CC_REGNUM)
2676 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2677 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2679 (set (match_operand:SI 0 "s_register_operand" "=r")
2680 (ior:SI (match_dup 1) (match_dup 2)))]
2682 "orr%.\\t%0, %1, %2"
2683 [(set_attr "conds" "set")]
2686 (define_insn "*iorsi3_compare0_scratch"
2687 [(set (reg:CC_NOOV CC_REGNUM)
2688 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2689 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2691 (clobber (match_scratch:SI 0 "=r"))]
2693 "orr%.\\t%0, %1, %2"
2694 [(set_attr "conds" "set")]
2697 (define_insn "xordi3"
2698 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2699 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2700 (match_operand:DI 2 "s_register_operand" "r,r")))]
2701 "TARGET_32BIT && !TARGET_IWMMXT"
2703 [(set_attr "length" "8")
2704 (set_attr "predicable" "yes")]
2707 (define_insn "*xordi_zesidi_di"
2708 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2709 (xor:DI (zero_extend:DI
2710 (match_operand:SI 2 "s_register_operand" "r,r"))
2711 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2714 eor%?\\t%Q0, %Q1, %2
2716 [(set_attr "length" "4,8")
2717 (set_attr "predicable" "yes")]
2720 (define_insn "*xordi_sesidi_di"
2721 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2722 (xor:DI (sign_extend:DI
2723 (match_operand:SI 2 "s_register_operand" "r,r"))
2724 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2727 [(set_attr "length" "8")
2728 (set_attr "predicable" "yes")]
2731 (define_expand "xorsi3"
2732 [(set (match_operand:SI 0 "s_register_operand" "")
2733 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2734 (match_operand:SI 2 "arm_rhs_operand" "")))]
2737 if (GET_CODE (operands[2]) == CONST_INT)
2738 operands[2] = force_reg (SImode, operands[2]);
2742 (define_insn "*arm_xorsi3"
2743 [(set (match_operand:SI 0 "s_register_operand" "=r")
2744 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2745 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2747 "eor%?\\t%0, %1, %2"
2748 [(set_attr "predicable" "yes")]
2751 (define_insn "*thumb1_xorsi3"
2752 [(set (match_operand:SI 0 "register_operand" "=l")
2753 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2754 (match_operand:SI 2 "register_operand" "l")))]
2757 [(set_attr "length" "2")]
2760 (define_insn "*xorsi3_compare0"
2761 [(set (reg:CC_NOOV CC_REGNUM)
2762 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2763 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2765 (set (match_operand:SI 0 "s_register_operand" "=r")
2766 (xor:SI (match_dup 1) (match_dup 2)))]
2768 "eor%.\\t%0, %1, %2"
2769 [(set_attr "conds" "set")]
2772 (define_insn "*xorsi3_compare0_scratch"
2773 [(set (reg:CC_NOOV CC_REGNUM)
2774 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2775 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2779 [(set_attr "conds" "set")]
2782 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2783 ; (NOT D) we can sometimes merge the final NOT into one of the following
2787 [(set (match_operand:SI 0 "s_register_operand" "")
2788 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2789 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2790 (match_operand:SI 3 "arm_rhs_operand" "")))
2791 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2793 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2794 (not:SI (match_dup 3))))
2795 (set (match_dup 0) (not:SI (match_dup 4)))]
2799 (define_insn "*andsi_iorsi3_notsi"
2800 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2801 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2802 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2803 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2805 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2806 [(set_attr "length" "8")
2807 (set_attr "ce_count" "2")
2808 (set_attr "predicable" "yes")]
2811 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2812 ; insns are available?
2814 [(set (match_operand:SI 0 "s_register_operand" "")
2815 (match_operator:SI 1 "logical_binary_operator"
2816 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2817 (match_operand:SI 3 "const_int_operand" "")
2818 (match_operand:SI 4 "const_int_operand" ""))
2819 (match_operator:SI 9 "logical_binary_operator"
2820 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2821 (match_operand:SI 6 "const_int_operand" ""))
2822 (match_operand:SI 7 "s_register_operand" "")])]))
2823 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2825 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2826 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2829 [(ashift:SI (match_dup 2) (match_dup 4))
2833 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2836 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2840 [(set (match_operand:SI 0 "s_register_operand" "")
2841 (match_operator:SI 1 "logical_binary_operator"
2842 [(match_operator:SI 9 "logical_binary_operator"
2843 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2844 (match_operand:SI 6 "const_int_operand" ""))
2845 (match_operand:SI 7 "s_register_operand" "")])
2846 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2847 (match_operand:SI 3 "const_int_operand" "")
2848 (match_operand:SI 4 "const_int_operand" ""))]))
2849 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2851 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2852 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2855 [(ashift:SI (match_dup 2) (match_dup 4))
2859 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2862 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2866 [(set (match_operand:SI 0 "s_register_operand" "")
2867 (match_operator:SI 1 "logical_binary_operator"
2868 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2869 (match_operand:SI 3 "const_int_operand" "")
2870 (match_operand:SI 4 "const_int_operand" ""))
2871 (match_operator:SI 9 "logical_binary_operator"
2872 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2873 (match_operand:SI 6 "const_int_operand" ""))
2874 (match_operand:SI 7 "s_register_operand" "")])]))
2875 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2877 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2878 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2881 [(ashift:SI (match_dup 2) (match_dup 4))
2885 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2888 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2892 [(set (match_operand:SI 0 "s_register_operand" "")
2893 (match_operator:SI 1 "logical_binary_operator"
2894 [(match_operator:SI 9 "logical_binary_operator"
2895 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2896 (match_operand:SI 6 "const_int_operand" ""))
2897 (match_operand:SI 7 "s_register_operand" "")])
2898 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2899 (match_operand:SI 3 "const_int_operand" "")
2900 (match_operand:SI 4 "const_int_operand" ""))]))
2901 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2903 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2904 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2907 [(ashift:SI (match_dup 2) (match_dup 4))
2911 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2914 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2918 ;; Minimum and maximum insns
2920 (define_expand "smaxsi3"
2922 (set (match_operand:SI 0 "s_register_operand" "")
2923 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2924 (match_operand:SI 2 "arm_rhs_operand" "")))
2925 (clobber (reg:CC CC_REGNUM))])]
2928 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2930 /* No need for a clobber of the condition code register here. */
2931 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2932 gen_rtx_SMAX (SImode, operands[1],
2938 (define_insn "*smax_0"
2939 [(set (match_operand:SI 0 "s_register_operand" "=r")
2940 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2943 "bic%?\\t%0, %1, %1, asr #31"
2944 [(set_attr "predicable" "yes")]
2947 (define_insn "*smax_m1"
2948 [(set (match_operand:SI 0 "s_register_operand" "=r")
2949 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2952 "orr%?\\t%0, %1, %1, asr #31"
2953 [(set_attr "predicable" "yes")]
2956 (define_insn "*arm_smax_insn"
2957 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2958 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2959 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2960 (clobber (reg:CC CC_REGNUM))]
2963 cmp\\t%1, %2\;movlt\\t%0, %2
2964 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2965 [(set_attr "conds" "clob")
2966 (set_attr "length" "8,12")]
2969 (define_expand "sminsi3"
2971 (set (match_operand:SI 0 "s_register_operand" "")
2972 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2973 (match_operand:SI 2 "arm_rhs_operand" "")))
2974 (clobber (reg:CC CC_REGNUM))])]
2977 if (operands[2] == const0_rtx)
2979 /* No need for a clobber of the condition code register here. */
2980 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2981 gen_rtx_SMIN (SImode, operands[1],
2987 (define_insn "*smin_0"
2988 [(set (match_operand:SI 0 "s_register_operand" "=r")
2989 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2992 "and%?\\t%0, %1, %1, asr #31"
2993 [(set_attr "predicable" "yes")]
2996 (define_insn "*arm_smin_insn"
2997 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2998 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2999 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3000 (clobber (reg:CC CC_REGNUM))]
3003 cmp\\t%1, %2\;movge\\t%0, %2
3004 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3005 [(set_attr "conds" "clob")
3006 (set_attr "length" "8,12")]
3009 (define_expand "umaxsi3"
3011 (set (match_operand:SI 0 "s_register_operand" "")
3012 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3013 (match_operand:SI 2 "arm_rhs_operand" "")))
3014 (clobber (reg:CC CC_REGNUM))])]
3019 (define_insn "*arm_umaxsi3"
3020 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3021 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3022 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3023 (clobber (reg:CC CC_REGNUM))]
3026 cmp\\t%1, %2\;movcc\\t%0, %2
3027 cmp\\t%1, %2\;movcs\\t%0, %1
3028 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3029 [(set_attr "conds" "clob")
3030 (set_attr "length" "8,8,12")]
3033 (define_expand "uminsi3"
3035 (set (match_operand:SI 0 "s_register_operand" "")
3036 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3037 (match_operand:SI 2 "arm_rhs_operand" "")))
3038 (clobber (reg:CC CC_REGNUM))])]
3043 (define_insn "*arm_uminsi3"
3044 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3045 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3046 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3047 (clobber (reg:CC CC_REGNUM))]
3050 cmp\\t%1, %2\;movcs\\t%0, %2
3051 cmp\\t%1, %2\;movcc\\t%0, %1
3052 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3053 [(set_attr "conds" "clob")
3054 (set_attr "length" "8,8,12")]
3057 (define_insn "*store_minmaxsi"
3058 [(set (match_operand:SI 0 "memory_operand" "=m")
3059 (match_operator:SI 3 "minmax_operator"
3060 [(match_operand:SI 1 "s_register_operand" "r")
3061 (match_operand:SI 2 "s_register_operand" "r")]))
3062 (clobber (reg:CC CC_REGNUM))]
3065 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3066 operands[1], operands[2]);
3067 output_asm_insn (\"cmp\\t%1, %2\", operands);
3069 output_asm_insn (\"ite\t%d3\", operands);
3070 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3071 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3074 [(set_attr "conds" "clob")
3075 (set (attr "length")
3076 (if_then_else (eq_attr "is_thumb" "yes")
3079 (set_attr "type" "store1")]
3082 ; Reject the frame pointer in operand[1], since reloading this after
3083 ; it has been eliminated can cause carnage.
3084 (define_insn "*minmax_arithsi"
3085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3086 (match_operator:SI 4 "shiftable_operator"
3087 [(match_operator:SI 5 "minmax_operator"
3088 [(match_operand:SI 2 "s_register_operand" "r,r")
3089 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3090 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3091 (clobber (reg:CC CC_REGNUM))]
3092 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3095 enum rtx_code code = GET_CODE (operands[4]);
3098 if (which_alternative != 0 || operands[3] != const0_rtx
3099 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3104 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3105 operands[2], operands[3]);
3106 output_asm_insn (\"cmp\\t%2, %3\", operands);
3110 output_asm_insn (\"ite\\t%d5\", operands);
3112 output_asm_insn (\"it\\t%d5\", operands);
3114 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3116 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3119 [(set_attr "conds" "clob")
3120 (set (attr "length")
3121 (if_then_else (eq_attr "is_thumb" "yes")
3127 ;; Shift and rotation insns
3129 (define_expand "ashldi3"
3130 [(set (match_operand:DI 0 "s_register_operand" "")
3131 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3132 (match_operand:SI 2 "reg_or_int_operand" "")))]
3135 if (GET_CODE (operands[2]) == CONST_INT)
3137 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3139 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3142 /* Ideally we shouldn't fail here if we could know that operands[1]
3143 ends up already living in an iwmmxt register. Otherwise it's
3144 cheaper to have the alternate code being generated than moving
3145 values to iwmmxt regs and back. */
3148 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3153 (define_insn "arm_ashldi3_1bit"
3154 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3155 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3157 (clobber (reg:CC CC_REGNUM))]
3159 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3160 [(set_attr "conds" "clob")
3161 (set_attr "length" "8")]
3164 (define_expand "ashlsi3"
3165 [(set (match_operand:SI 0 "s_register_operand" "")
3166 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3167 (match_operand:SI 2 "arm_rhs_operand" "")))]
3170 if (GET_CODE (operands[2]) == CONST_INT
3171 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3173 emit_insn (gen_movsi (operands[0], const0_rtx));
3179 (define_insn "*thumb1_ashlsi3"
3180 [(set (match_operand:SI 0 "register_operand" "=l,l")
3181 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3182 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3185 [(set_attr "length" "2")]
3188 (define_expand "ashrdi3"
3189 [(set (match_operand:DI 0 "s_register_operand" "")
3190 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3191 (match_operand:SI 2 "reg_or_int_operand" "")))]
3194 if (GET_CODE (operands[2]) == CONST_INT)
3196 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3198 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3201 /* Ideally we shouldn't fail here if we could know that operands[1]
3202 ends up already living in an iwmmxt register. Otherwise it's
3203 cheaper to have the alternate code being generated than moving
3204 values to iwmmxt regs and back. */
3207 else if (!TARGET_REALLY_IWMMXT)
3212 (define_insn "arm_ashrdi3_1bit"
3213 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3214 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3216 (clobber (reg:CC CC_REGNUM))]
3218 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3219 [(set_attr "conds" "clob")
3220 (set_attr "length" "8")]
3223 (define_expand "ashrsi3"
3224 [(set (match_operand:SI 0 "s_register_operand" "")
3225 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3226 (match_operand:SI 2 "arm_rhs_operand" "")))]
3229 if (GET_CODE (operands[2]) == CONST_INT
3230 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3231 operands[2] = GEN_INT (31);
3235 (define_insn "*thumb1_ashrsi3"
3236 [(set (match_operand:SI 0 "register_operand" "=l,l")
3237 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3238 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3241 [(set_attr "length" "2")]
3244 (define_expand "lshrdi3"
3245 [(set (match_operand:DI 0 "s_register_operand" "")
3246 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3247 (match_operand:SI 2 "reg_or_int_operand" "")))]
3250 if (GET_CODE (operands[2]) == CONST_INT)
3252 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3254 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3257 /* Ideally we shouldn't fail here if we could know that operands[1]
3258 ends up already living in an iwmmxt register. Otherwise it's
3259 cheaper to have the alternate code being generated than moving
3260 values to iwmmxt regs and back. */
3263 else if (!TARGET_REALLY_IWMMXT)
3268 (define_insn "arm_lshrdi3_1bit"
3269 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3270 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3272 (clobber (reg:CC CC_REGNUM))]
3274 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3275 [(set_attr "conds" "clob")
3276 (set_attr "length" "8")]
3279 (define_expand "lshrsi3"
3280 [(set (match_operand:SI 0 "s_register_operand" "")
3281 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3282 (match_operand:SI 2 "arm_rhs_operand" "")))]
3285 if (GET_CODE (operands[2]) == CONST_INT
3286 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3288 emit_insn (gen_movsi (operands[0], const0_rtx));
3294 (define_insn "*thumb1_lshrsi3"
3295 [(set (match_operand:SI 0 "register_operand" "=l,l")
3296 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3297 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3300 [(set_attr "length" "2")]
3303 (define_expand "rotlsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "")
3305 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "reg_or_int_operand" "")))]
3309 if (GET_CODE (operands[2]) == CONST_INT)
3310 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3313 rtx reg = gen_reg_rtx (SImode);
3314 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3320 (define_expand "rotrsi3"
3321 [(set (match_operand:SI 0 "s_register_operand" "")
3322 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3323 (match_operand:SI 2 "arm_rhs_operand" "")))]
3328 if (GET_CODE (operands[2]) == CONST_INT
3329 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3330 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3332 else /* TARGET_THUMB1 */
3334 if (GET_CODE (operands [2]) == CONST_INT)
3335 operands [2] = force_reg (SImode, operands[2]);
3340 (define_insn "*thumb1_rotrsi3"
3341 [(set (match_operand:SI 0 "register_operand" "=l")
3342 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3343 (match_operand:SI 2 "register_operand" "l")))]
3346 [(set_attr "length" "2")]
3349 (define_insn "*arm_shiftsi3"
3350 [(set (match_operand:SI 0 "s_register_operand" "=r")
3351 (match_operator:SI 3 "shift_operator"
3352 [(match_operand:SI 1 "s_register_operand" "r")
3353 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3355 "* return arm_output_shift(operands, 0);"
3356 [(set_attr "predicable" "yes")
3357 (set_attr "shift" "1")
3358 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3359 (const_string "alu_shift")
3360 (const_string "alu_shift_reg")))]
3363 (define_insn "*shiftsi3_compare0"
3364 [(set (reg:CC_NOOV CC_REGNUM)
3365 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3366 [(match_operand:SI 1 "s_register_operand" "r")
3367 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3369 (set (match_operand:SI 0 "s_register_operand" "=r")
3370 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3372 "* return arm_output_shift(operands, 1);"
3373 [(set_attr "conds" "set")
3374 (set_attr "shift" "1")
3375 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3376 (const_string "alu_shift")
3377 (const_string "alu_shift_reg")))]
3380 (define_insn "*shiftsi3_compare0_scratch"
3381 [(set (reg:CC_NOOV CC_REGNUM)
3382 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3383 [(match_operand:SI 1 "s_register_operand" "r")
3384 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3386 (clobber (match_scratch:SI 0 "=r"))]
3388 "* return arm_output_shift(operands, 1);"
3389 [(set_attr "conds" "set")
3390 (set_attr "shift" "1")]
3393 (define_insn "*arm_notsi_shiftsi"
3394 [(set (match_operand:SI 0 "s_register_operand" "=r")
3395 (not:SI (match_operator:SI 3 "shift_operator"
3396 [(match_operand:SI 1 "s_register_operand" "r")
3397 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3400 [(set_attr "predicable" "yes")
3401 (set_attr "shift" "1")
3402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3403 (const_string "alu_shift")
3404 (const_string "alu_shift_reg")))]
3407 (define_insn "*arm_notsi_shiftsi_compare0"
3408 [(set (reg:CC_NOOV CC_REGNUM)
3409 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3410 [(match_operand:SI 1 "s_register_operand" "r")
3411 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3413 (set (match_operand:SI 0 "s_register_operand" "=r")
3414 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3417 [(set_attr "conds" "set")
3418 (set_attr "shift" "1")
3419 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3420 (const_string "alu_shift")
3421 (const_string "alu_shift_reg")))]
3424 (define_insn "*arm_not_shiftsi_compare0_scratch"
3425 [(set (reg:CC_NOOV CC_REGNUM)
3426 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3427 [(match_operand:SI 1 "s_register_operand" "r")
3428 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3430 (clobber (match_scratch:SI 0 "=r"))]
3433 [(set_attr "conds" "set")
3434 (set_attr "shift" "1")
3435 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3436 (const_string "alu_shift")
3437 (const_string "alu_shift_reg")))]
3440 ;; We don't really have extzv, but defining this using shifts helps
3441 ;; to reduce register pressure later on.
3443 (define_expand "extzv"
3445 (ashift:SI (match_operand:SI 1 "register_operand" "")
3446 (match_operand:SI 2 "const_int_operand" "")))
3447 (set (match_operand:SI 0 "register_operand" "")
3448 (lshiftrt:SI (match_dup 4)
3449 (match_operand:SI 3 "const_int_operand" "")))]
3450 "TARGET_THUMB1 || arm_arch_thumb2"
3453 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3454 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3456 if (arm_arch_thumb2)
3458 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3463 operands[3] = GEN_INT (rshift);
3467 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3471 operands[2] = GEN_INT (lshift);
3472 operands[4] = gen_reg_rtx (SImode);
3477 [(set (match_operand:SI 0 "s_register_operand" "=r")
3478 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3479 (match_operand:SI 2 "const_int_operand" "M")
3480 (match_operand:SI 3 "const_int_operand" "M")))]
3482 "sbfx%?\t%0, %1, %3, %2"
3483 [(set_attr "length" "4")
3484 (set_attr "predicable" "yes")]
3487 (define_insn "extzv_t2"
3488 [(set (match_operand:SI 0 "s_register_operand" "=r")
3489 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3490 (match_operand:SI 2 "const_int_operand" "M")
3491 (match_operand:SI 3 "const_int_operand" "M")))]
3493 "ubfx%?\t%0, %1, %3, %2"
3494 [(set_attr "length" "4")
3495 (set_attr "predicable" "yes")]
3499 ;; Unary arithmetic insns
3501 (define_expand "negdi2"
3503 [(set (match_operand:DI 0 "s_register_operand" "")
3504 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3505 (clobber (reg:CC CC_REGNUM))])]
3510 if (GET_CODE (operands[1]) != REG)
3511 operands[1] = force_reg (DImode, operands[1]);
3516 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3517 ;; The second alternative is to allow the common case of a *full* overlap.
3518 (define_insn "*arm_negdi2"
3519 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3520 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3521 (clobber (reg:CC CC_REGNUM))]
3523 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3524 [(set_attr "conds" "clob")
3525 (set_attr "length" "8")]
3528 (define_insn "*thumb1_negdi2"
3529 [(set (match_operand:DI 0 "register_operand" "=&l")
3530 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3531 (clobber (reg:CC CC_REGNUM))]
3533 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3534 [(set_attr "length" "6")]
3537 (define_expand "negsi2"
3538 [(set (match_operand:SI 0 "s_register_operand" "")
3539 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3544 (define_insn "*arm_negsi2"
3545 [(set (match_operand:SI 0 "s_register_operand" "=r")
3546 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3548 "rsb%?\\t%0, %1, #0"
3549 [(set_attr "predicable" "yes")]
3552 (define_insn "*thumb1_negsi2"
3553 [(set (match_operand:SI 0 "register_operand" "=l")
3554 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3557 [(set_attr "length" "2")]
3560 (define_expand "negsf2"
3561 [(set (match_operand:SF 0 "s_register_operand" "")
3562 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3563 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3567 (define_expand "negdf2"
3568 [(set (match_operand:DF 0 "s_register_operand" "")
3569 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3570 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3573 ;; abssi2 doesn't really clobber the condition codes if a different register
3574 ;; is being set. To keep things simple, assume during rtl manipulations that
3575 ;; it does, but tell the final scan operator the truth. Similarly for
3578 (define_expand "abssi2"
3580 [(set (match_operand:SI 0 "s_register_operand" "")
3581 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3582 (clobber (match_dup 2))])]
3586 operands[2] = gen_rtx_SCRATCH (SImode);
3588 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3591 (define_insn "*arm_abssi2"
3592 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3593 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3594 (clobber (reg:CC CC_REGNUM))]
3597 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3598 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3599 [(set_attr "conds" "clob,*")
3600 (set_attr "shift" "1")
3601 ;; predicable can't be set based on the variant, so left as no
3602 (set_attr "length" "8")]
3605 (define_insn_and_split "*thumb1_abssi2"
3606 [(set (match_operand:SI 0 "s_register_operand" "=l")
3607 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3608 (clobber (match_scratch:SI 2 "=&l"))]
3611 "TARGET_THUMB1 && reload_completed"
3612 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3613 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3614 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3616 [(set_attr "length" "6")]
3619 (define_insn "*arm_neg_abssi2"
3620 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3621 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3622 (clobber (reg:CC CC_REGNUM))]
3625 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3626 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3627 [(set_attr "conds" "clob,*")
3628 (set_attr "shift" "1")
3629 ;; predicable can't be set based on the variant, so left as no
3630 (set_attr "length" "8")]
3633 (define_insn_and_split "*thumb1_neg_abssi2"
3634 [(set (match_operand:SI 0 "s_register_operand" "=l")
3635 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3636 (clobber (match_scratch:SI 2 "=&l"))]
3639 "TARGET_THUMB1 && reload_completed"
3640 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3641 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3642 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3644 [(set_attr "length" "6")]
3647 (define_expand "abssf2"
3648 [(set (match_operand:SF 0 "s_register_operand" "")
3649 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3650 "TARGET_32BIT && TARGET_HARD_FLOAT"
3653 (define_expand "absdf2"
3654 [(set (match_operand:DF 0 "s_register_operand" "")
3655 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3656 "TARGET_32BIT && TARGET_HARD_FLOAT"
3659 (define_expand "sqrtsf2"
3660 [(set (match_operand:SF 0 "s_register_operand" "")
3661 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3662 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3665 (define_expand "sqrtdf2"
3666 [(set (match_operand:DF 0 "s_register_operand" "")
3667 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3668 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3671 (define_insn_and_split "one_cmpldi2"
3672 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3673 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3676 "TARGET_32BIT && reload_completed"
3677 [(set (match_dup 0) (not:SI (match_dup 1)))
3678 (set (match_dup 2) (not:SI (match_dup 3)))]
3681 operands[2] = gen_highpart (SImode, operands[0]);
3682 operands[0] = gen_lowpart (SImode, operands[0]);
3683 operands[3] = gen_highpart (SImode, operands[1]);
3684 operands[1] = gen_lowpart (SImode, operands[1]);
3686 [(set_attr "length" "8")
3687 (set_attr "predicable" "yes")]
3690 (define_expand "one_cmplsi2"
3691 [(set (match_operand:SI 0 "s_register_operand" "")
3692 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3697 (define_insn "*arm_one_cmplsi2"
3698 [(set (match_operand:SI 0 "s_register_operand" "=r")
3699 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3702 [(set_attr "predicable" "yes")]
3705 (define_insn "*thumb1_one_cmplsi2"
3706 [(set (match_operand:SI 0 "register_operand" "=l")
3707 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3710 [(set_attr "length" "2")]
3713 (define_insn "*notsi_compare0"
3714 [(set (reg:CC_NOOV CC_REGNUM)
3715 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3717 (set (match_operand:SI 0 "s_register_operand" "=r")
3718 (not:SI (match_dup 1)))]
3721 [(set_attr "conds" "set")]
3724 (define_insn "*notsi_compare0_scratch"
3725 [(set (reg:CC_NOOV CC_REGNUM)
3726 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3728 (clobber (match_scratch:SI 0 "=r"))]
3731 [(set_attr "conds" "set")]
3734 ;; Fixed <--> Floating conversion insns
3736 (define_expand "floatsihf2"
3737 [(set (match_operand:HF 0 "general_operand" "")
3738 (float:HF (match_operand:SI 1 "general_operand" "")))]
3742 rtx op1 = gen_reg_rtx (SFmode);
3743 expand_float (op1, operands[1], 0);
3744 op1 = convert_to_mode (HFmode, op1, 0);
3745 emit_move_insn (operands[0], op1);
3750 (define_expand "floatdihf2"
3751 [(set (match_operand:HF 0 "general_operand" "")
3752 (float:HF (match_operand:DI 1 "general_operand" "")))]
3756 rtx op1 = gen_reg_rtx (SFmode);
3757 expand_float (op1, operands[1], 0);
3758 op1 = convert_to_mode (HFmode, op1, 0);
3759 emit_move_insn (operands[0], op1);
3764 (define_expand "floatsisf2"
3765 [(set (match_operand:SF 0 "s_register_operand" "")
3766 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3767 "TARGET_32BIT && TARGET_HARD_FLOAT"
3769 if (TARGET_MAVERICK)
3771 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3776 (define_expand "floatsidf2"
3777 [(set (match_operand:DF 0 "s_register_operand" "")
3778 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3779 "TARGET_32BIT && TARGET_HARD_FLOAT"
3781 if (TARGET_MAVERICK)
3783 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3788 (define_expand "fix_trunchfsi2"
3789 [(set (match_operand:SI 0 "general_operand" "")
3790 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3794 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3795 expand_fix (operands[0], op1, 0);
3800 (define_expand "fix_trunchfdi2"
3801 [(set (match_operand:DI 0 "general_operand" "")
3802 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3806 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3807 expand_fix (operands[0], op1, 0);
3812 (define_expand "fix_truncsfsi2"
3813 [(set (match_operand:SI 0 "s_register_operand" "")
3814 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3815 "TARGET_32BIT && TARGET_HARD_FLOAT"
3817 if (TARGET_MAVERICK)
3819 if (!cirrus_fp_register (operands[0], SImode))
3820 operands[0] = force_reg (SImode, operands[0]);
3821 if (!cirrus_fp_register (operands[1], SFmode))
3822 operands[1] = force_reg (SFmode, operands[0]);
3823 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3828 (define_expand "fix_truncdfsi2"
3829 [(set (match_operand:SI 0 "s_register_operand" "")
3830 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3831 "TARGET_32BIT && TARGET_HARD_FLOAT"
3833 if (TARGET_MAVERICK)
3835 if (!cirrus_fp_register (operands[1], DFmode))
3836 operands[1] = force_reg (DFmode, operands[0]);
3837 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3844 (define_expand "truncdfsf2"
3845 [(set (match_operand:SF 0 "s_register_operand" "")
3847 (match_operand:DF 1 "s_register_operand" "")))]
3848 "TARGET_32BIT && TARGET_HARD_FLOAT"
3852 /* DFmode -> HFmode conversions have to go through SFmode. */
3853 (define_expand "truncdfhf2"
3854 [(set (match_operand:HF 0 "general_operand" "")
3856 (match_operand:DF 1 "general_operand" "")))]
3861 op1 = convert_to_mode (SFmode, operands[1], 0);
3862 op1 = convert_to_mode (HFmode, op1, 0);
3863 emit_move_insn (operands[0], op1);
3868 ;; Zero and sign extension instructions.
3870 (define_expand "zero_extendsidi2"
3871 [(set (match_operand:DI 0 "s_register_operand" "")
3872 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3877 (define_insn "*arm_zero_extendsidi2"
3878 [(set (match_operand:DI 0 "s_register_operand" "=r")
3879 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3882 if (REGNO (operands[1])
3883 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3884 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3885 return \"mov%?\\t%R0, #0\";
3887 [(set_attr "length" "8")
3888 (set_attr "predicable" "yes")]
3891 (define_expand "zero_extendqidi2"
3892 [(set (match_operand:DI 0 "s_register_operand" "")
3893 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3898 (define_insn "*arm_zero_extendqidi2"
3899 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3900 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3903 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3904 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3905 [(set_attr "length" "8")
3906 (set_attr "predicable" "yes")
3907 (set_attr "type" "*,load_byte")
3908 (set_attr "pool_range" "*,4092")
3909 (set_attr "neg_pool_range" "*,4084")]
3912 (define_expand "extendsidi2"
3913 [(set (match_operand:DI 0 "s_register_operand" "")
3914 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3919 (define_insn "*arm_extendsidi2"
3920 [(set (match_operand:DI 0 "s_register_operand" "=r")
3921 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3924 if (REGNO (operands[1])
3925 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3926 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3927 return \"mov%?\\t%R0, %Q0, asr #31\";
3929 [(set_attr "length" "8")
3930 (set_attr "shift" "1")
3931 (set_attr "predicable" "yes")]
3934 (define_expand "zero_extendhisi2"
3936 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3938 (set (match_operand:SI 0 "s_register_operand" "")
3939 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3943 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3945 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3946 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3950 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3952 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3956 if (!s_register_operand (operands[1], HImode))
3957 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3961 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3962 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3966 operands[1] = gen_lowpart (SImode, operands[1]);
3967 operands[2] = gen_reg_rtx (SImode);
3971 (define_insn "*thumb1_zero_extendhisi2"
3972 [(set (match_operand:SI 0 "register_operand" "=l")
3973 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3974 "TARGET_THUMB1 && !arm_arch6"
3976 rtx mem = XEXP (operands[1], 0);
3978 if (GET_CODE (mem) == CONST)
3979 mem = XEXP (mem, 0);
3981 if (GET_CODE (mem) == LABEL_REF)
3982 return \"ldr\\t%0, %1\";
3984 if (GET_CODE (mem) == PLUS)
3986 rtx a = XEXP (mem, 0);
3987 rtx b = XEXP (mem, 1);
3989 /* This can happen due to bugs in reload. */
3990 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3993 ops[0] = operands[0];
3996 output_asm_insn (\"mov %0, %1\", ops);
3998 XEXP (mem, 0) = operands[0];
4001 else if ( GET_CODE (a) == LABEL_REF
4002 && GET_CODE (b) == CONST_INT)
4003 return \"ldr\\t%0, %1\";
4006 return \"ldrh\\t%0, %1\";
4008 [(set_attr "length" "4")
4009 (set_attr "type" "load_byte")
4010 (set_attr "pool_range" "60")]
4013 (define_insn "*thumb1_zero_extendhisi2_v6"
4014 [(set (match_operand:SI 0 "register_operand" "=l,l")
4015 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4016 "TARGET_THUMB1 && arm_arch6"
4020 if (which_alternative == 0)
4021 return \"uxth\\t%0, %1\";
4023 mem = XEXP (operands[1], 0);
4025 if (GET_CODE (mem) == CONST)
4026 mem = XEXP (mem, 0);
4028 if (GET_CODE (mem) == LABEL_REF)
4029 return \"ldr\\t%0, %1\";
4031 if (GET_CODE (mem) == PLUS)
4033 rtx a = XEXP (mem, 0);
4034 rtx b = XEXP (mem, 1);
4036 /* This can happen due to bugs in reload. */
4037 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4040 ops[0] = operands[0];
4043 output_asm_insn (\"mov %0, %1\", ops);
4045 XEXP (mem, 0) = operands[0];
4048 else if ( GET_CODE (a) == LABEL_REF
4049 && GET_CODE (b) == CONST_INT)
4050 return \"ldr\\t%0, %1\";
4053 return \"ldrh\\t%0, %1\";
4055 [(set_attr "length" "2,4")
4056 (set_attr "type" "alu_shift,load_byte")
4057 (set_attr "pool_range" "*,60")]
4060 (define_insn "*arm_zero_extendhisi2"
4061 [(set (match_operand:SI 0 "s_register_operand" "=r")
4062 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4063 "TARGET_ARM && arm_arch4 && !arm_arch6"
4065 [(set_attr "type" "load_byte")
4066 (set_attr "predicable" "yes")
4067 (set_attr "pool_range" "256")
4068 (set_attr "neg_pool_range" "244")]
4071 (define_insn "*arm_zero_extendhisi2_v6"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4073 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4074 "TARGET_ARM && arm_arch6"
4078 [(set_attr "type" "alu_shift,load_byte")
4079 (set_attr "predicable" "yes")
4080 (set_attr "pool_range" "*,256")
4081 (set_attr "neg_pool_range" "*,244")]
4084 (define_insn "*arm_zero_extendhisi2addsi"
4085 [(set (match_operand:SI 0 "s_register_operand" "=r")
4086 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4087 (match_operand:SI 2 "s_register_operand" "r")))]
4089 "uxtah%?\\t%0, %2, %1"
4090 [(set_attr "type" "alu_shift")
4091 (set_attr "predicable" "yes")]
4094 (define_expand "zero_extendqisi2"
4095 [(set (match_operand:SI 0 "s_register_operand" "")
4096 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4099 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4103 emit_insn (gen_andsi3 (operands[0],
4104 gen_lowpart (SImode, operands[1]),
4107 else /* TARGET_THUMB */
4109 rtx temp = gen_reg_rtx (SImode);
4112 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4113 operands[1] = gen_lowpart (SImode, operands[1]);
4116 ops[1] = operands[1];
4117 ops[2] = GEN_INT (24);
4119 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4120 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4122 ops[0] = operands[0];
4124 ops[2] = GEN_INT (24);
4126 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4127 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4134 (define_insn "*thumb1_zero_extendqisi2"
4135 [(set (match_operand:SI 0 "register_operand" "=l")
4136 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4137 "TARGET_THUMB1 && !arm_arch6"
4139 [(set_attr "length" "2")
4140 (set_attr "type" "load_byte")
4141 (set_attr "pool_range" "32")]
4144 (define_insn "*thumb1_zero_extendqisi2_v6"
4145 [(set (match_operand:SI 0 "register_operand" "=l,l")
4146 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4147 "TARGET_THUMB1 && arm_arch6"
4151 [(set_attr "length" "2,2")
4152 (set_attr "type" "alu_shift,load_byte")
4153 (set_attr "pool_range" "*,32")]
4156 (define_insn "*arm_zero_extendqisi2"
4157 [(set (match_operand:SI 0 "s_register_operand" "=r")
4158 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4159 "TARGET_ARM && !arm_arch6"
4160 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4161 [(set_attr "type" "load_byte")
4162 (set_attr "predicable" "yes")
4163 (set_attr "pool_range" "4096")
4164 (set_attr "neg_pool_range" "4084")]
4167 (define_insn "*arm_zero_extendqisi2_v6"
4168 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4169 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4170 "TARGET_ARM && arm_arch6"
4173 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4174 [(set_attr "type" "alu_shift,load_byte")
4175 (set_attr "predicable" "yes")
4176 (set_attr "pool_range" "*,4096")
4177 (set_attr "neg_pool_range" "*,4084")]
4180 (define_insn "*arm_zero_extendqisi2addsi"
4181 [(set (match_operand:SI 0 "s_register_operand" "=r")
4182 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4183 (match_operand:SI 2 "s_register_operand" "r")))]
4185 "uxtab%?\\t%0, %2, %1"
4186 [(set_attr "predicable" "yes")
4187 (set_attr "insn" "xtab")
4188 (set_attr "type" "alu_shift")]
4192 [(set (match_operand:SI 0 "s_register_operand" "")
4193 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4194 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4195 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4196 [(set (match_dup 2) (match_dup 1))
4197 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4202 [(set (match_operand:SI 0 "s_register_operand" "")
4203 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4204 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4205 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4206 [(set (match_dup 2) (match_dup 1))
4207 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4211 (define_code_iterator ior_xor [ior xor])
4214 [(set (match_operand:SI 0 "s_register_operand" "")
4215 (ior_xor:SI (and:SI (ashift:SI
4216 (match_operand:SI 1 "s_register_operand" "")
4217 (match_operand:SI 2 "const_int_operand" ""))
4218 (match_operand:SI 3 "const_int_operand" ""))
4220 (match_operator 5 "subreg_lowpart_operator"
4221 [(match_operand:SI 4 "s_register_operand" "")]))))]
4223 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4224 == (GET_MODE_MASK (GET_MODE (operands[5]))
4225 & (GET_MODE_MASK (GET_MODE (operands[5]))
4226 << (INTVAL (operands[2])))))"
4227 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4229 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4230 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4233 (define_insn "*compareqi_eq0"
4234 [(set (reg:CC_Z CC_REGNUM)
4235 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4239 [(set_attr "conds" "set")]
4242 (define_expand "extendhisi2"
4244 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4246 (set (match_operand:SI 0 "s_register_operand" "")
4247 (ashiftrt:SI (match_dup 2)
4252 if (GET_CODE (operands[1]) == MEM)
4256 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4261 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4262 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4267 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4269 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4273 if (!s_register_operand (operands[1], HImode))
4274 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4279 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4281 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4282 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4287 operands[1] = gen_lowpart (SImode, operands[1]);
4288 operands[2] = gen_reg_rtx (SImode);
4292 (define_insn "thumb1_extendhisi2"
4293 [(set (match_operand:SI 0 "register_operand" "=l")
4294 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4295 (clobber (match_scratch:SI 2 "=&l"))]
4296 "TARGET_THUMB1 && !arm_arch6"
4300 rtx mem = XEXP (operands[1], 0);
4302 /* This code used to try to use 'V', and fix the address only if it was
4303 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4304 range of QImode offsets, and offsettable_address_p does a QImode
4307 if (GET_CODE (mem) == CONST)
4308 mem = XEXP (mem, 0);
4310 if (GET_CODE (mem) == LABEL_REF)
4311 return \"ldr\\t%0, %1\";
4313 if (GET_CODE (mem) == PLUS)
4315 rtx a = XEXP (mem, 0);
4316 rtx b = XEXP (mem, 1);
4318 if (GET_CODE (a) == LABEL_REF
4319 && GET_CODE (b) == CONST_INT)
4320 return \"ldr\\t%0, %1\";
4322 if (GET_CODE (b) == REG)
4323 return \"ldrsh\\t%0, %1\";
4331 ops[2] = const0_rtx;
4334 gcc_assert (GET_CODE (ops[1]) == REG);
4336 ops[0] = operands[0];
4337 ops[3] = operands[2];
4338 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4341 [(set_attr "length" "4")
4342 (set_attr "type" "load_byte")
4343 (set_attr "pool_range" "1020")]
4346 ;; We used to have an early-clobber on the scratch register here.
4347 ;; However, there's a bug somewhere in reload which means that this
4348 ;; can be partially ignored during spill allocation if the memory
4349 ;; address also needs reloading; this causes us to die later on when
4350 ;; we try to verify the operands. Fortunately, we don't really need
4351 ;; the early-clobber: we can always use operand 0 if operand 2
4352 ;; overlaps the address.
4353 (define_insn "*thumb1_extendhisi2_insn_v6"
4354 [(set (match_operand:SI 0 "register_operand" "=l,l")
4355 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4356 (clobber (match_scratch:SI 2 "=X,l"))]
4357 "TARGET_THUMB1 && arm_arch6"
4363 if (which_alternative == 0)
4364 return \"sxth\\t%0, %1\";
4366 mem = XEXP (operands[1], 0);
4368 /* This code used to try to use 'V', and fix the address only if it was
4369 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4370 range of QImode offsets, and offsettable_address_p does a QImode
4373 if (GET_CODE (mem) == CONST)
4374 mem = XEXP (mem, 0);
4376 if (GET_CODE (mem) == LABEL_REF)
4377 return \"ldr\\t%0, %1\";
4379 if (GET_CODE (mem) == PLUS)
4381 rtx a = XEXP (mem, 0);
4382 rtx b = XEXP (mem, 1);
4384 if (GET_CODE (a) == LABEL_REF
4385 && GET_CODE (b) == CONST_INT)
4386 return \"ldr\\t%0, %1\";
4388 if (GET_CODE (b) == REG)
4389 return \"ldrsh\\t%0, %1\";
4397 ops[2] = const0_rtx;
4400 gcc_assert (GET_CODE (ops[1]) == REG);
4402 ops[0] = operands[0];
4403 if (reg_mentioned_p (operands[2], ops[1]))
4406 ops[3] = operands[2];
4407 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4410 [(set_attr "length" "2,4")
4411 (set_attr "type" "alu_shift,load_byte")
4412 (set_attr "pool_range" "*,1020")]
4415 ;; This pattern will only be used when ldsh is not available
4416 (define_expand "extendhisi2_mem"
4417 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4419 (zero_extend:SI (match_dup 7)))
4420 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4421 (set (match_operand:SI 0 "" "")
4422 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4427 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4429 mem1 = change_address (operands[1], QImode, addr);
4430 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4431 operands[0] = gen_lowpart (SImode, operands[0]);
4433 operands[2] = gen_reg_rtx (SImode);
4434 operands[3] = gen_reg_rtx (SImode);
4435 operands[6] = gen_reg_rtx (SImode);
4438 if (BYTES_BIG_ENDIAN)
4440 operands[4] = operands[2];
4441 operands[5] = operands[3];
4445 operands[4] = operands[3];
4446 operands[5] = operands[2];
4451 (define_insn "*arm_extendhisi2"
4452 [(set (match_operand:SI 0 "s_register_operand" "=r")
4453 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4454 "TARGET_ARM && arm_arch4 && !arm_arch6"
4455 "ldr%(sh%)\\t%0, %1"
4456 [(set_attr "type" "load_byte")
4457 (set_attr "predicable" "yes")
4458 (set_attr "pool_range" "256")
4459 (set_attr "neg_pool_range" "244")]
4462 ;; ??? Check Thumb-2 pool range
4463 (define_insn "*arm_extendhisi2_v6"
4464 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4465 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4466 "TARGET_32BIT && arm_arch6"
4470 [(set_attr "type" "alu_shift,load_byte")
4471 (set_attr "predicable" "yes")
4472 (set_attr "pool_range" "*,256")
4473 (set_attr "neg_pool_range" "*,244")]
4476 (define_insn "*arm_extendhisi2addsi"
4477 [(set (match_operand:SI 0 "s_register_operand" "=r")
4478 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4479 (match_operand:SI 2 "s_register_operand" "r")))]
4481 "sxtah%?\\t%0, %2, %1"
4484 (define_expand "extendqihi2"
4486 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4488 (set (match_operand:HI 0 "s_register_operand" "")
4489 (ashiftrt:SI (match_dup 2)
4494 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4496 emit_insn (gen_rtx_SET (VOIDmode,
4498 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4501 if (!s_register_operand (operands[1], QImode))
4502 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4503 operands[0] = gen_lowpart (SImode, operands[0]);
4504 operands[1] = gen_lowpart (SImode, operands[1]);
4505 operands[2] = gen_reg_rtx (SImode);
4509 (define_insn "*arm_extendqihi_insn"
4510 [(set (match_operand:HI 0 "s_register_operand" "=r")
4511 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4512 "TARGET_ARM && arm_arch4"
4513 "ldr%(sb%)\\t%0, %1"
4514 [(set_attr "type" "load_byte")
4515 (set_attr "predicable" "yes")
4516 (set_attr "pool_range" "256")
4517 (set_attr "neg_pool_range" "244")]
4520 (define_expand "extendqisi2"
4522 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4524 (set (match_operand:SI 0 "s_register_operand" "")
4525 (ashiftrt:SI (match_dup 2)
4530 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4532 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4533 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4537 if (!s_register_operand (operands[1], QImode))
4538 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4542 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4543 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4547 operands[1] = gen_lowpart (SImode, operands[1]);
4548 operands[2] = gen_reg_rtx (SImode);
4552 (define_insn "*arm_extendqisi"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r")
4554 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4555 "TARGET_ARM && arm_arch4 && !arm_arch6"
4556 "ldr%(sb%)\\t%0, %1"
4557 [(set_attr "type" "load_byte")
4558 (set_attr "predicable" "yes")
4559 (set_attr "pool_range" "256")
4560 (set_attr "neg_pool_range" "244")]
4563 (define_insn "*arm_extendqisi_v6"
4564 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4566 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4567 "TARGET_ARM && arm_arch6"
4571 [(set_attr "type" "alu_shift,load_byte")
4572 (set_attr "predicable" "yes")
4573 (set_attr "pool_range" "*,256")
4574 (set_attr "neg_pool_range" "*,244")]
4577 (define_insn "*arm_extendqisi2addsi"
4578 [(set (match_operand:SI 0 "s_register_operand" "=r")
4579 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4580 (match_operand:SI 2 "s_register_operand" "r")))]
4582 "sxtab%?\\t%0, %2, %1"
4583 [(set_attr "type" "alu_shift")
4584 (set_attr "insn" "xtab")
4585 (set_attr "predicable" "yes")]
4588 (define_insn "*thumb1_extendqisi2"
4589 [(set (match_operand:SI 0 "register_operand" "=l,l")
4590 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4591 "TARGET_THUMB1 && !arm_arch6"
4595 rtx mem = XEXP (operands[1], 0);
4597 if (GET_CODE (mem) == CONST)
4598 mem = XEXP (mem, 0);
4600 if (GET_CODE (mem) == LABEL_REF)
4601 return \"ldr\\t%0, %1\";
4603 if (GET_CODE (mem) == PLUS
4604 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4605 return \"ldr\\t%0, %1\";
4607 if (which_alternative == 0)
4608 return \"ldrsb\\t%0, %1\";
4610 ops[0] = operands[0];
4612 if (GET_CODE (mem) == PLUS)
4614 rtx a = XEXP (mem, 0);
4615 rtx b = XEXP (mem, 1);
4620 if (GET_CODE (a) == REG)
4622 if (GET_CODE (b) == REG)
4623 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4624 else if (REGNO (a) == REGNO (ops[0]))
4626 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4627 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4628 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4631 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4635 gcc_assert (GET_CODE (b) == REG);
4636 if (REGNO (b) == REGNO (ops[0]))
4638 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4639 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4640 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4643 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4646 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4648 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4649 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4650 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4655 ops[2] = const0_rtx;
4657 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4661 [(set_attr "length" "2,6")
4662 (set_attr "type" "load_byte,load_byte")
4663 (set_attr "pool_range" "32,32")]
4666 (define_insn "*thumb1_extendqisi2_v6"
4667 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4668 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4669 "TARGET_THUMB1 && arm_arch6"
4675 if (which_alternative == 0)
4676 return \"sxtb\\t%0, %1\";
4678 mem = XEXP (operands[1], 0);
4680 if (GET_CODE (mem) == CONST)
4681 mem = XEXP (mem, 0);
4683 if (GET_CODE (mem) == LABEL_REF)
4684 return \"ldr\\t%0, %1\";
4686 if (GET_CODE (mem) == PLUS
4687 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4688 return \"ldr\\t%0, %1\";
4690 if (which_alternative == 0)
4691 return \"ldrsb\\t%0, %1\";
4693 ops[0] = operands[0];
4695 if (GET_CODE (mem) == PLUS)
4697 rtx a = XEXP (mem, 0);
4698 rtx b = XEXP (mem, 1);
4703 if (GET_CODE (a) == REG)
4705 if (GET_CODE (b) == REG)
4706 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4707 else if (REGNO (a) == REGNO (ops[0]))
4709 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4710 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4713 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4717 gcc_assert (GET_CODE (b) == REG);
4718 if (REGNO (b) == REGNO (ops[0]))
4720 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4721 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4724 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4727 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4729 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4730 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4735 ops[2] = const0_rtx;
4737 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4741 [(set_attr "length" "2,2,4")
4742 (set_attr "type" "alu_shift,load_byte,load_byte")
4743 (set_attr "pool_range" "*,32,32")]
4746 (define_expand "extendsfdf2"
4747 [(set (match_operand:DF 0 "s_register_operand" "")
4748 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4749 "TARGET_32BIT && TARGET_HARD_FLOAT"
4753 /* HFmode -> DFmode conversions have to go through SFmode. */
4754 (define_expand "extendhfdf2"
4755 [(set (match_operand:DF 0 "general_operand" "")
4756 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4761 op1 = convert_to_mode (SFmode, operands[1], 0);
4762 op1 = convert_to_mode (DFmode, op1, 0);
4763 emit_insn (gen_movdf (operands[0], op1));
4768 ;; Move insns (including loads and stores)
4770 ;; XXX Just some ideas about movti.
4771 ;; I don't think these are a good idea on the arm, there just aren't enough
4773 ;;(define_expand "loadti"
4774 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4775 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4778 ;;(define_expand "storeti"
4779 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4780 ;; (match_operand:TI 1 "s_register_operand" ""))]
4783 ;;(define_expand "movti"
4784 ;; [(set (match_operand:TI 0 "general_operand" "")
4785 ;; (match_operand:TI 1 "general_operand" ""))]
4791 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4792 ;; operands[1] = copy_to_reg (operands[1]);
4793 ;; if (GET_CODE (operands[0]) == MEM)
4794 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4795 ;; else if (GET_CODE (operands[1]) == MEM)
4796 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4800 ;; emit_insn (insn);
4804 ;; Recognize garbage generated above.
4807 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4808 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4812 ;; register mem = (which_alternative < 3);
4813 ;; register const char *template;
4815 ;; operands[mem] = XEXP (operands[mem], 0);
4816 ;; switch (which_alternative)
4818 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4819 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4820 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4821 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4822 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4823 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4825 ;; output_asm_insn (template, operands);
4829 (define_expand "movdi"
4830 [(set (match_operand:DI 0 "general_operand" "")
4831 (match_operand:DI 1 "general_operand" ""))]
4834 if (can_create_pseudo_p ())
4836 if (GET_CODE (operands[0]) != REG)
4837 operands[1] = force_reg (DImode, operands[1]);
4842 (define_insn "*arm_movdi"
4843 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4844 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4846 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4848 && ( register_operand (operands[0], DImode)
4849 || register_operand (operands[1], DImode))"
4851 switch (which_alternative)
4858 return output_move_double (operands);
4861 [(set_attr "length" "8,12,16,8,8")
4862 (set_attr "type" "*,*,*,load2,store2")
4863 (set_attr "pool_range" "*,*,*,1020,*")
4864 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4868 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4869 (match_operand:ANY64 1 "const_double_operand" ""))]
4872 && (arm_const_double_inline_cost (operands[1])
4873 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4876 arm_split_constant (SET, SImode, curr_insn,
4877 INTVAL (gen_lowpart (SImode, operands[1])),
4878 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4879 arm_split_constant (SET, SImode, curr_insn,
4880 INTVAL (gen_highpart_mode (SImode,
4881 GET_MODE (operands[0]),
4883 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4888 ; If optimizing for size, or if we have load delay slots, then
4889 ; we want to split the constant into two separate operations.
4890 ; In both cases this may split a trivial part into a single data op
4891 ; leaving a single complex constant to load. We can also get longer
4892 ; offsets in a LDR which means we get better chances of sharing the pool
4893 ; entries. Finally, we can normally do a better job of scheduling
4894 ; LDR instructions than we can with LDM.
4895 ; This pattern will only match if the one above did not.
4897 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4898 (match_operand:ANY64 1 "const_double_operand" ""))]
4899 "TARGET_ARM && reload_completed
4900 && arm_const_double_by_parts (operands[1])"
4901 [(set (match_dup 0) (match_dup 1))
4902 (set (match_dup 2) (match_dup 3))]
4904 operands[2] = gen_highpart (SImode, operands[0]);
4905 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4907 operands[0] = gen_lowpart (SImode, operands[0]);
4908 operands[1] = gen_lowpart (SImode, operands[1]);
4913 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4914 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4915 "TARGET_EITHER && reload_completed"
4916 [(set (match_dup 0) (match_dup 1))
4917 (set (match_dup 2) (match_dup 3))]
4919 operands[2] = gen_highpart (SImode, operands[0]);
4920 operands[3] = gen_highpart (SImode, operands[1]);
4921 operands[0] = gen_lowpart (SImode, operands[0]);
4922 operands[1] = gen_lowpart (SImode, operands[1]);
4924 /* Handle a partial overlap. */
4925 if (rtx_equal_p (operands[0], operands[3]))
4927 rtx tmp0 = operands[0];
4928 rtx tmp1 = operands[1];
4930 operands[0] = operands[2];
4931 operands[1] = operands[3];
4938 ;; We can't actually do base+index doubleword loads if the index and
4939 ;; destination overlap. Split here so that we at least have chance to
4942 [(set (match_operand:DI 0 "s_register_operand" "")
4943 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4944 (match_operand:SI 2 "s_register_operand" ""))))]
4946 && reg_overlap_mentioned_p (operands[0], operands[1])
4947 && reg_overlap_mentioned_p (operands[0], operands[2])"
4949 (plus:SI (match_dup 1)
4952 (mem:DI (match_dup 4)))]
4954 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4958 ;;; ??? This should have alternatives for constants.
4959 ;;; ??? This was originally identical to the movdf_insn pattern.
4960 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4961 ;;; thumb_reorg with a memory reference.
4962 (define_insn "*thumb1_movdi_insn"
4963 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4964 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4966 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4967 && ( register_operand (operands[0], DImode)
4968 || register_operand (operands[1], DImode))"
4971 switch (which_alternative)
4975 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4976 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4977 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4979 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4981 operands[1] = GEN_INT (- INTVAL (operands[1]));
4982 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4984 return \"ldmia\\t%1, {%0, %H0}\";
4986 return \"stmia\\t%0, {%1, %H1}\";
4988 return thumb_load_double_from_address (operands);
4990 operands[2] = gen_rtx_MEM (SImode,
4991 plus_constant (XEXP (operands[0], 0), 4));
4992 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4995 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4996 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4997 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5000 [(set_attr "length" "4,4,6,2,2,6,4,4")
5001 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5002 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5005 (define_expand "movsi"
5006 [(set (match_operand:SI 0 "general_operand" "")
5007 (match_operand:SI 1 "general_operand" ""))]
5011 rtx base, offset, tmp;
5015 /* Everything except mem = const or mem = mem can be done easily. */
5016 if (GET_CODE (operands[0]) == MEM)
5017 operands[1] = force_reg (SImode, operands[1]);
5018 if (arm_general_register_operand (operands[0], SImode)
5019 && GET_CODE (operands[1]) == CONST_INT
5020 && !(const_ok_for_arm (INTVAL (operands[1]))
5021 || const_ok_for_arm (~INTVAL (operands[1]))))
5023 arm_split_constant (SET, SImode, NULL_RTX,
5024 INTVAL (operands[1]), operands[0], NULL_RTX,
5025 optimize && can_create_pseudo_p ());
5029 if (TARGET_USE_MOVT && !target_word_relocations
5030 && GET_CODE (operands[1]) == SYMBOL_REF
5031 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5033 arm_emit_movpair (operands[0], operands[1]);
5037 else /* TARGET_THUMB1... */
5039 if (can_create_pseudo_p ())
5041 if (GET_CODE (operands[0]) != REG)
5042 operands[1] = force_reg (SImode, operands[1]);
5046 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5048 split_const (operands[1], &base, &offset);
5049 if (GET_CODE (base) == SYMBOL_REF
5050 && !offset_within_block_p (base, INTVAL (offset)))
5052 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5053 emit_move_insn (tmp, base);
5054 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5059 /* Recognize the case where operand[1] is a reference to thread-local
5060 data and load its address to a register. */
5061 if (arm_tls_referenced_p (operands[1]))
5063 rtx tmp = operands[1];
5066 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5068 addend = XEXP (XEXP (tmp, 0), 1);
5069 tmp = XEXP (XEXP (tmp, 0), 0);
5072 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5073 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5075 tmp = legitimize_tls_address (tmp,
5076 !can_create_pseudo_p () ? operands[0] : 0);
5079 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5080 tmp = force_operand (tmp, operands[0]);
5085 && (CONSTANT_P (operands[1])
5086 || symbol_mentioned_p (operands[1])
5087 || label_mentioned_p (operands[1])))
5088 operands[1] = legitimize_pic_address (operands[1], SImode,
5089 (!can_create_pseudo_p ()
5096 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5097 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5098 ;; so this does not matter.
5099 (define_insn "*arm_movt"
5100 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5101 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5102 (match_operand:SI 2 "general_operand" "i")))]
5104 "movt%?\t%0, #:upper16:%c2"
5105 [(set_attr "predicable" "yes")
5106 (set_attr "length" "4")]
5109 (define_insn "*arm_movsi_insn"
5110 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5111 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5112 "TARGET_ARM && ! TARGET_IWMMXT
5113 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5114 && ( register_operand (operands[0], SImode)
5115 || register_operand (operands[1], SImode))"
5123 [(set_attr "type" "*,*,*,*,load1,store1")
5124 (set_attr "predicable" "yes")
5125 (set_attr "pool_range" "*,*,*,*,4096,*")
5126 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5130 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5131 (match_operand:SI 1 "const_int_operand" ""))]
5133 && (!(const_ok_for_arm (INTVAL (operands[1]))
5134 || const_ok_for_arm (~INTVAL (operands[1]))))"
5135 [(clobber (const_int 0))]
5137 arm_split_constant (SET, SImode, NULL_RTX,
5138 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5143 (define_insn "*thumb1_movsi_insn"
5144 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5145 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5147 && ( register_operand (operands[0], SImode)
5148 || register_operand (operands[1], SImode))"
5159 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5160 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5161 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5165 [(set (match_operand:SI 0 "register_operand" "")
5166 (match_operand:SI 1 "const_int_operand" ""))]
5167 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5168 [(set (match_dup 0) (match_dup 1))
5169 (set (match_dup 0) (neg:SI (match_dup 0)))]
5170 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5174 [(set (match_operand:SI 0 "register_operand" "")
5175 (match_operand:SI 1 "const_int_operand" ""))]
5176 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5177 [(set (match_dup 0) (match_dup 1))
5178 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5181 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5182 unsigned HOST_WIDE_INT mask = 0xff;
5185 for (i = 0; i < 25; i++)
5186 if ((val & (mask << i)) == val)
5189 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5193 operands[1] = GEN_INT (val >> i);
5194 operands[2] = GEN_INT (i);
5198 ;; When generating pic, we need to load the symbol offset into a register.
5199 ;; So that the optimizer does not confuse this with a normal symbol load
5200 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5201 ;; since that is the only type of relocation we can use.
5203 ;; The rather odd constraints on the following are to force reload to leave
5204 ;; the insn alone, and to force the minipool generation pass to then move
5205 ;; the GOT symbol to memory.
5207 (define_insn "pic_load_addr_arm"
5208 [(set (match_operand:SI 0 "s_register_operand" "=r")
5209 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5210 "TARGET_ARM && flag_pic"
5212 [(set_attr "type" "load1")
5213 (set (attr "pool_range") (const_int 4096))
5214 (set (attr "neg_pool_range") (const_int 4084))]
5217 (define_insn "pic_load_addr_thumb1"
5218 [(set (match_operand:SI 0 "s_register_operand" "=l")
5219 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5220 "TARGET_THUMB1 && flag_pic"
5222 [(set_attr "type" "load1")
5223 (set (attr "pool_range") (const_int 1024))]
5226 (define_insn "pic_add_dot_plus_four"
5227 [(set (match_operand:SI 0 "register_operand" "=r")
5228 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5230 (match_operand 2 "" "")]
5234 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5235 INTVAL (operands[2]));
5236 return \"add\\t%0, %|pc\";
5238 [(set_attr "length" "2")]
5241 (define_insn "pic_add_dot_plus_eight"
5242 [(set (match_operand:SI 0 "register_operand" "=r")
5243 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5245 (match_operand 2 "" "")]
5249 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5250 INTVAL (operands[2]));
5251 return \"add%?\\t%0, %|pc, %1\";
5253 [(set_attr "predicable" "yes")]
5256 (define_insn "tls_load_dot_plus_eight"
5257 [(set (match_operand:SI 0 "register_operand" "+r")
5258 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5260 (match_operand 2 "" "")]
5264 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5265 INTVAL (operands[2]));
5266 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5268 [(set_attr "predicable" "yes")]
5271 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5272 ;; followed by a load. These sequences can be crunched down to
5273 ;; tls_load_dot_plus_eight by a peephole.
5276 [(set (match_operand:SI 0 "register_operand" "")
5277 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5279 (match_operand 1 "" "")]
5281 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5282 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5284 (mem:SI (unspec:SI [(match_dup 3)
5291 (define_insn "pic_offset_arm"
5292 [(set (match_operand:SI 0 "register_operand" "=r")
5293 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5294 (unspec:SI [(match_operand:SI 2 "" "X")]
5295 UNSPEC_PIC_OFFSET))))]
5296 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5297 "ldr%?\\t%0, [%1,%2]"
5298 [(set_attr "type" "load1")]
5301 (define_expand "builtin_setjmp_receiver"
5302 [(label_ref (match_operand 0 "" ""))]
5306 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5308 if (arm_pic_register != INVALID_REGNUM)
5309 arm_load_pic_register (1UL << 3);
5313 ;; If copying one reg to another we can set the condition codes according to
5314 ;; its value. Such a move is common after a return from subroutine and the
5315 ;; result is being tested against zero.
5317 (define_insn "*movsi_compare0"
5318 [(set (reg:CC CC_REGNUM)
5319 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5321 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5327 [(set_attr "conds" "set")]
5330 ;; Subroutine to store a half word from a register into memory.
5331 ;; Operand 0 is the source register (HImode)
5332 ;; Operand 1 is the destination address in a register (SImode)
5334 ;; In both this routine and the next, we must be careful not to spill
5335 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5336 ;; can generate unrecognizable rtl.
5338 (define_expand "storehi"
5339 [;; store the low byte
5340 (set (match_operand 1 "" "") (match_dup 3))
5341 ;; extract the high byte
5343 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5344 ;; store the high byte
5345 (set (match_dup 4) (match_dup 5))]
5349 rtx op1 = operands[1];
5350 rtx addr = XEXP (op1, 0);
5351 enum rtx_code code = GET_CODE (addr);
5353 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5355 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5357 operands[4] = adjust_address (op1, QImode, 1);
5358 operands[1] = adjust_address (operands[1], QImode, 0);
5359 operands[3] = gen_lowpart (QImode, operands[0]);
5360 operands[0] = gen_lowpart (SImode, operands[0]);
5361 operands[2] = gen_reg_rtx (SImode);
5362 operands[5] = gen_lowpart (QImode, operands[2]);
5366 (define_expand "storehi_bigend"
5367 [(set (match_dup 4) (match_dup 3))
5369 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5370 (set (match_operand 1 "" "") (match_dup 5))]
5374 rtx op1 = operands[1];
5375 rtx addr = XEXP (op1, 0);
5376 enum rtx_code code = GET_CODE (addr);
5378 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5380 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5382 operands[4] = adjust_address (op1, QImode, 1);
5383 operands[1] = adjust_address (operands[1], QImode, 0);
5384 operands[3] = gen_lowpart (QImode, operands[0]);
5385 operands[0] = gen_lowpart (SImode, operands[0]);
5386 operands[2] = gen_reg_rtx (SImode);
5387 operands[5] = gen_lowpart (QImode, operands[2]);
5391 ;; Subroutine to store a half word integer constant into memory.
5392 (define_expand "storeinthi"
5393 [(set (match_operand 0 "" "")
5394 (match_operand 1 "" ""))
5395 (set (match_dup 3) (match_dup 2))]
5399 HOST_WIDE_INT value = INTVAL (operands[1]);
5400 rtx addr = XEXP (operands[0], 0);
5401 rtx op0 = operands[0];
5402 enum rtx_code code = GET_CODE (addr);
5404 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5406 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5408 operands[1] = gen_reg_rtx (SImode);
5409 if (BYTES_BIG_ENDIAN)
5411 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5412 if ((value & 255) == ((value >> 8) & 255))
5413 operands[2] = operands[1];
5416 operands[2] = gen_reg_rtx (SImode);
5417 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5422 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5423 if ((value & 255) == ((value >> 8) & 255))
5424 operands[2] = operands[1];
5427 operands[2] = gen_reg_rtx (SImode);
5428 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5432 operands[3] = adjust_address (op0, QImode, 1);
5433 operands[0] = adjust_address (operands[0], QImode, 0);
5434 operands[2] = gen_lowpart (QImode, operands[2]);
5435 operands[1] = gen_lowpart (QImode, operands[1]);
5439 (define_expand "storehi_single_op"
5440 [(set (match_operand:HI 0 "memory_operand" "")
5441 (match_operand:HI 1 "general_operand" ""))]
5442 "TARGET_32BIT && arm_arch4"
5444 if (!s_register_operand (operands[1], HImode))
5445 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5449 (define_expand "movhi"
5450 [(set (match_operand:HI 0 "general_operand" "")
5451 (match_operand:HI 1 "general_operand" ""))]
5456 if (can_create_pseudo_p ())
5458 if (GET_CODE (operands[0]) == MEM)
5462 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5465 if (GET_CODE (operands[1]) == CONST_INT)
5466 emit_insn (gen_storeinthi (operands[0], operands[1]));
5469 if (GET_CODE (operands[1]) == MEM)
5470 operands[1] = force_reg (HImode, operands[1]);
5471 if (BYTES_BIG_ENDIAN)
5472 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5474 emit_insn (gen_storehi (operands[1], operands[0]));
5478 /* Sign extend a constant, and keep it in an SImode reg. */
5479 else if (GET_CODE (operands[1]) == CONST_INT)
5481 rtx reg = gen_reg_rtx (SImode);
5482 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5484 /* If the constant is already valid, leave it alone. */
5485 if (!const_ok_for_arm (val))
5487 /* If setting all the top bits will make the constant
5488 loadable in a single instruction, then set them.
5489 Otherwise, sign extend the number. */
5491 if (const_ok_for_arm (~(val | ~0xffff)))
5493 else if (val & 0x8000)
5497 emit_insn (gen_movsi (reg, GEN_INT (val)));
5498 operands[1] = gen_lowpart (HImode, reg);
5500 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5501 && GET_CODE (operands[1]) == MEM)
5503 rtx reg = gen_reg_rtx (SImode);
5505 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5506 operands[1] = gen_lowpart (HImode, reg);
5508 else if (!arm_arch4)
5510 if (GET_CODE (operands[1]) == MEM)
5513 rtx offset = const0_rtx;
5514 rtx reg = gen_reg_rtx (SImode);
5516 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5517 || (GET_CODE (base) == PLUS
5518 && (GET_CODE (offset = XEXP (base, 1))
5520 && ((INTVAL(offset) & 1) != 1)
5521 && GET_CODE (base = XEXP (base, 0)) == REG))
5522 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5526 new_rtx = widen_memory_access (operands[1], SImode,
5527 ((INTVAL (offset) & ~3)
5528 - INTVAL (offset)));
5529 emit_insn (gen_movsi (reg, new_rtx));
5530 if (((INTVAL (offset) & 2) != 0)
5531 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5533 rtx reg2 = gen_reg_rtx (SImode);
5535 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5540 emit_insn (gen_movhi_bytes (reg, operands[1]));
5542 operands[1] = gen_lowpart (HImode, reg);
5546 /* Handle loading a large integer during reload. */
5547 else if (GET_CODE (operands[1]) == CONST_INT
5548 && !const_ok_for_arm (INTVAL (operands[1]))
5549 && !const_ok_for_arm (~INTVAL (operands[1])))
5551 /* Writing a constant to memory needs a scratch, which should
5552 be handled with SECONDARY_RELOADs. */
5553 gcc_assert (GET_CODE (operands[0]) == REG);
5555 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5556 emit_insn (gen_movsi (operands[0], operands[1]));
5560 else if (TARGET_THUMB2)
5562 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5563 if (can_create_pseudo_p ())
5565 if (GET_CODE (operands[0]) != REG)
5566 operands[1] = force_reg (HImode, operands[1]);
5567 /* Zero extend a constant, and keep it in an SImode reg. */
5568 else if (GET_CODE (operands[1]) == CONST_INT)
5570 rtx reg = gen_reg_rtx (SImode);
5571 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5573 emit_insn (gen_movsi (reg, GEN_INT (val)));
5574 operands[1] = gen_lowpart (HImode, reg);
5578 else /* TARGET_THUMB1 */
5580 if (can_create_pseudo_p ())
5582 if (GET_CODE (operands[1]) == CONST_INT)
5584 rtx reg = gen_reg_rtx (SImode);
5586 emit_insn (gen_movsi (reg, operands[1]));
5587 operands[1] = gen_lowpart (HImode, reg);
5590 /* ??? We shouldn't really get invalid addresses here, but this can
5591 happen if we are passed a SP (never OK for HImode/QImode) or
5592 virtual register (also rejected as illegitimate for HImode/QImode)
5593 relative address. */
5594 /* ??? This should perhaps be fixed elsewhere, for instance, in
5595 fixup_stack_1, by checking for other kinds of invalid addresses,
5596 e.g. a bare reference to a virtual register. This may confuse the
5597 alpha though, which must handle this case differently. */
5598 if (GET_CODE (operands[0]) == MEM
5599 && !memory_address_p (GET_MODE (operands[0]),
5600 XEXP (operands[0], 0)))
5602 = replace_equiv_address (operands[0],
5603 copy_to_reg (XEXP (operands[0], 0)));
5605 if (GET_CODE (operands[1]) == MEM
5606 && !memory_address_p (GET_MODE (operands[1]),
5607 XEXP (operands[1], 0)))
5609 = replace_equiv_address (operands[1],
5610 copy_to_reg (XEXP (operands[1], 0)));
5612 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5614 rtx reg = gen_reg_rtx (SImode);
5616 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5617 operands[1] = gen_lowpart (HImode, reg);
5620 if (GET_CODE (operands[0]) == MEM)
5621 operands[1] = force_reg (HImode, operands[1]);
5623 else if (GET_CODE (operands[1]) == CONST_INT
5624 && !satisfies_constraint_I (operands[1]))
5626 /* Handle loading a large integer during reload. */
5628 /* Writing a constant to memory needs a scratch, which should
5629 be handled with SECONDARY_RELOADs. */
5630 gcc_assert (GET_CODE (operands[0]) == REG);
5632 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5633 emit_insn (gen_movsi (operands[0], operands[1]));
5640 (define_insn "*thumb1_movhi_insn"
5641 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5642 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5644 && ( register_operand (operands[0], HImode)
5645 || register_operand (operands[1], HImode))"
5647 switch (which_alternative)
5649 case 0: return \"add %0, %1, #0\";
5650 case 2: return \"strh %1, %0\";
5651 case 3: return \"mov %0, %1\";
5652 case 4: return \"mov %0, %1\";
5653 case 5: return \"mov %0, %1\";
5654 default: gcc_unreachable ();
5656 /* The stack pointer can end up being taken as an index register.
5657 Catch this case here and deal with it. */
5658 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5659 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5660 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5663 ops[0] = operands[0];
5664 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5666 output_asm_insn (\"mov %0, %1\", ops);
5668 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5671 return \"ldrh %0, %1\";
5673 [(set_attr "length" "2,4,2,2,2,2")
5674 (set_attr "type" "*,load1,store1,*,*,*")]
5678 (define_expand "movhi_bytes"
5679 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5681 (zero_extend:SI (match_dup 6)))
5682 (set (match_operand:SI 0 "" "")
5683 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5688 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5690 mem1 = change_address (operands[1], QImode, addr);
5691 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5692 operands[0] = gen_lowpart (SImode, operands[0]);
5694 operands[2] = gen_reg_rtx (SImode);
5695 operands[3] = gen_reg_rtx (SImode);
5698 if (BYTES_BIG_ENDIAN)
5700 operands[4] = operands[2];
5701 operands[5] = operands[3];
5705 operands[4] = operands[3];
5706 operands[5] = operands[2];
5711 (define_expand "movhi_bigend"
5713 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5716 (ashiftrt:SI (match_dup 2) (const_int 16)))
5717 (set (match_operand:HI 0 "s_register_operand" "")
5721 operands[2] = gen_reg_rtx (SImode);
5722 operands[3] = gen_reg_rtx (SImode);
5723 operands[4] = gen_lowpart (HImode, operands[3]);
5727 ;; Pattern to recognize insn generated default case above
5728 (define_insn "*movhi_insn_arch4"
5729 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5730 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5733 && (GET_CODE (operands[1]) != CONST_INT
5734 || const_ok_for_arm (INTVAL (operands[1]))
5735 || const_ok_for_arm (~INTVAL (operands[1])))"
5737 mov%?\\t%0, %1\\t%@ movhi
5738 mvn%?\\t%0, #%B1\\t%@ movhi
5739 str%(h%)\\t%1, %0\\t%@ movhi
5740 ldr%(h%)\\t%0, %1\\t%@ movhi"
5741 [(set_attr "type" "*,*,store1,load1")
5742 (set_attr "predicable" "yes")
5743 (set_attr "pool_range" "*,*,*,256")
5744 (set_attr "neg_pool_range" "*,*,*,244")]
5747 (define_insn "*movhi_bytes"
5748 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5749 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5752 mov%?\\t%0, %1\\t%@ movhi
5753 mvn%?\\t%0, #%B1\\t%@ movhi"
5754 [(set_attr "predicable" "yes")]
5757 (define_expand "thumb_movhi_clobber"
5758 [(set (match_operand:HI 0 "memory_operand" "")
5759 (match_operand:HI 1 "register_operand" ""))
5760 (clobber (match_operand:DI 2 "register_operand" ""))]
5763 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5764 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5766 emit_insn (gen_movhi (operands[0], operands[1]));
5769 /* XXX Fixme, need to handle other cases here as well. */
5774 ;; We use a DImode scratch because we may occasionally need an additional
5775 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5776 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5777 (define_expand "reload_outhi"
5778 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5779 (match_operand:HI 1 "s_register_operand" "r")
5780 (match_operand:DI 2 "s_register_operand" "=&l")])]
5783 arm_reload_out_hi (operands);
5785 thumb_reload_out_hi (operands);
5790 (define_expand "reload_inhi"
5791 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5792 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5793 (match_operand:DI 2 "s_register_operand" "=&r")])]
5797 arm_reload_in_hi (operands);
5799 thumb_reload_out_hi (operands);
5803 (define_expand "movqi"
5804 [(set (match_operand:QI 0 "general_operand" "")
5805 (match_operand:QI 1 "general_operand" ""))]
5808 /* Everything except mem = const or mem = mem can be done easily */
5810 if (can_create_pseudo_p ())
5812 if (GET_CODE (operands[1]) == CONST_INT)
5814 rtx reg = gen_reg_rtx (SImode);
5816 emit_insn (gen_movsi (reg, operands[1]));
5817 operands[1] = gen_lowpart (QImode, reg);
5822 /* ??? We shouldn't really get invalid addresses here, but this can
5823 happen if we are passed a SP (never OK for HImode/QImode) or
5824 virtual register (also rejected as illegitimate for HImode/QImode)
5825 relative address. */
5826 /* ??? This should perhaps be fixed elsewhere, for instance, in
5827 fixup_stack_1, by checking for other kinds of invalid addresses,
5828 e.g. a bare reference to a virtual register. This may confuse the
5829 alpha though, which must handle this case differently. */
5830 if (GET_CODE (operands[0]) == MEM
5831 && !memory_address_p (GET_MODE (operands[0]),
5832 XEXP (operands[0], 0)))
5834 = replace_equiv_address (operands[0],
5835 copy_to_reg (XEXP (operands[0], 0)));
5836 if (GET_CODE (operands[1]) == MEM
5837 && !memory_address_p (GET_MODE (operands[1]),
5838 XEXP (operands[1], 0)))
5840 = replace_equiv_address (operands[1],
5841 copy_to_reg (XEXP (operands[1], 0)));
5844 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5846 rtx reg = gen_reg_rtx (SImode);
5848 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5849 operands[1] = gen_lowpart (QImode, reg);
5852 if (GET_CODE (operands[0]) == MEM)
5853 operands[1] = force_reg (QImode, operands[1]);
5855 else if (TARGET_THUMB
5856 && GET_CODE (operands[1]) == CONST_INT
5857 && !satisfies_constraint_I (operands[1]))
5859 /* Handle loading a large integer during reload. */
5861 /* Writing a constant to memory needs a scratch, which should
5862 be handled with SECONDARY_RELOADs. */
5863 gcc_assert (GET_CODE (operands[0]) == REG);
5865 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5866 emit_insn (gen_movsi (operands[0], operands[1]));
5873 (define_insn "*arm_movqi_insn"
5874 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5875 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5877 && ( register_operand (operands[0], QImode)
5878 || register_operand (operands[1], QImode))"
5884 [(set_attr "type" "*,*,load1,store1")
5885 (set_attr "predicable" "yes")]
5888 (define_insn "*thumb1_movqi_insn"
5889 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5890 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5892 && ( register_operand (operands[0], QImode)
5893 || register_operand (operands[1], QImode))"
5901 [(set_attr "length" "2")
5902 (set_attr "type" "*,load1,store1,*,*,*")
5903 (set_attr "pool_range" "*,32,*,*,*,*")]
5907 (define_expand "movhf"
5908 [(set (match_operand:HF 0 "general_operand" "")
5909 (match_operand:HF 1 "general_operand" ""))]
5914 if (GET_CODE (operands[0]) == MEM)
5915 operands[1] = force_reg (HFmode, operands[1]);
5917 else /* TARGET_THUMB1 */
5919 if (can_create_pseudo_p ())
5921 if (GET_CODE (operands[0]) != REG)
5922 operands[1] = force_reg (HFmode, operands[1]);
5928 (define_insn "*arm32_movhf"
5929 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5930 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5931 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_NEON_FP16)
5932 && ( s_register_operand (operands[0], HFmode)
5933 || s_register_operand (operands[1], HFmode))"
5935 switch (which_alternative)
5937 case 0: /* ARM register from memory */
5938 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5939 case 1: /* memory from ARM register */
5940 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5941 case 2: /* ARM register from ARM register */
5942 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5943 case 3: /* ARM register from constant */
5949 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5950 bits = real_to_target (NULL, &r, HFmode);
5951 ops[0] = operands[0];
5952 ops[1] = GEN_INT (bits);
5953 ops[2] = GEN_INT (bits & 0xff00);
5954 ops[3] = GEN_INT (bits & 0x00ff);
5956 if (arm_arch_thumb2)
5957 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5959 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5966 [(set_attr "conds" "unconditional")
5967 (set_attr "type" "load1,store1,*,*")
5968 (set_attr "length" "4,4,4,8")
5969 (set_attr "predicable" "yes")
5973 (define_insn "*thumb1_movhf"
5974 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
5975 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
5977 && ( s_register_operand (operands[0], HFmode)
5978 || s_register_operand (operands[1], HFmode))"
5980 switch (which_alternative)
5985 gcc_assert (GET_CODE(operands[1]) == MEM);
5986 addr = XEXP (operands[1], 0);
5987 if (GET_CODE (addr) == LABEL_REF
5988 || (GET_CODE (addr) == CONST
5989 && GET_CODE (XEXP (addr, 0)) == PLUS
5990 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
5991 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
5993 /* Constant pool entry. */
5994 return \"ldr\\t%0, %1\";
5996 return \"ldrh\\t%0, %1\";
5998 case 2: return \"strh\\t%1, %0\";
5999 default: return \"mov\\t%0, %1\";
6002 [(set_attr "length" "2")
6003 (set_attr "type" "*,load1,store1,*,*")
6004 (set_attr "pool_range" "*,1020,*,*,*")]
6007 (define_expand "movsf"
6008 [(set (match_operand:SF 0 "general_operand" "")
6009 (match_operand:SF 1 "general_operand" ""))]
6014 if (GET_CODE (operands[0]) == MEM)
6015 operands[1] = force_reg (SFmode, operands[1]);
6017 else /* TARGET_THUMB1 */
6019 if (can_create_pseudo_p ())
6021 if (GET_CODE (operands[0]) != REG)
6022 operands[1] = force_reg (SFmode, operands[1]);
6028 ;; Transform a floating-point move of a constant into a core register into
6029 ;; an SImode operation.
6031 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6032 (match_operand:SF 1 "immediate_operand" ""))]
6035 && GET_CODE (operands[1]) == CONST_DOUBLE"
6036 [(set (match_dup 2) (match_dup 3))]
6038 operands[2] = gen_lowpart (SImode, operands[0]);
6039 operands[3] = gen_lowpart (SImode, operands[1]);
6040 if (operands[2] == 0 || operands[3] == 0)
6045 (define_insn "*arm_movsf_soft_insn"
6046 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6047 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6049 && TARGET_SOFT_FLOAT
6050 && (GET_CODE (operands[0]) != MEM
6051 || register_operand (operands[1], SFmode))"
6054 ldr%?\\t%0, %1\\t%@ float
6055 str%?\\t%1, %0\\t%@ float"
6056 [(set_attr "length" "4,4,4")
6057 (set_attr "predicable" "yes")
6058 (set_attr "type" "*,load1,store1")
6059 (set_attr "pool_range" "*,4096,*")
6060 (set_attr "neg_pool_range" "*,4084,*")]
6063 ;;; ??? This should have alternatives for constants.
6064 (define_insn "*thumb1_movsf_insn"
6065 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6066 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6068 && ( register_operand (operands[0], SFmode)
6069 || register_operand (operands[1], SFmode))"
6078 [(set_attr "length" "2")
6079 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6080 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6083 (define_expand "movdf"
6084 [(set (match_operand:DF 0 "general_operand" "")
6085 (match_operand:DF 1 "general_operand" ""))]
6090 if (GET_CODE (operands[0]) == MEM)
6091 operands[1] = force_reg (DFmode, operands[1]);
6093 else /* TARGET_THUMB */
6095 if (can_create_pseudo_p ())
6097 if (GET_CODE (operands[0]) != REG)
6098 operands[1] = force_reg (DFmode, operands[1]);
6104 ;; Reloading a df mode value stored in integer regs to memory can require a
6106 (define_expand "reload_outdf"
6107 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6108 (match_operand:DF 1 "s_register_operand" "r")
6109 (match_operand:SI 2 "s_register_operand" "=&r")]
6113 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6116 operands[2] = XEXP (operands[0], 0);
6117 else if (code == POST_INC || code == PRE_DEC)
6119 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6120 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6121 emit_insn (gen_movdi (operands[0], operands[1]));
6124 else if (code == PRE_INC)
6126 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6128 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6131 else if (code == POST_DEC)
6132 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6134 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6135 XEXP (XEXP (operands[0], 0), 1)));
6137 emit_insn (gen_rtx_SET (VOIDmode,
6138 replace_equiv_address (operands[0], operands[2]),
6141 if (code == POST_DEC)
6142 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6148 (define_insn "*movdf_soft_insn"
6149 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6150 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6151 "TARGET_ARM && TARGET_SOFT_FLOAT
6152 && ( register_operand (operands[0], DFmode)
6153 || register_operand (operands[1], DFmode))"
6155 switch (which_alternative)
6162 return output_move_double (operands);
6165 [(set_attr "length" "8,12,16,8,8")
6166 (set_attr "type" "*,*,*,load2,store2")
6167 (set_attr "pool_range" "1020")
6168 (set_attr "neg_pool_range" "1008")]
6171 ;;; ??? This should have alternatives for constants.
6172 ;;; ??? This was originally identical to the movdi_insn pattern.
6173 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6174 ;;; thumb_reorg with a memory reference.
6175 (define_insn "*thumb_movdf_insn"
6176 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6177 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6179 && ( register_operand (operands[0], DFmode)
6180 || register_operand (operands[1], DFmode))"
6182 switch (which_alternative)
6186 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6187 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6188 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6190 return \"ldmia\\t%1, {%0, %H0}\";
6192 return \"stmia\\t%0, {%1, %H1}\";
6194 return thumb_load_double_from_address (operands);
6196 operands[2] = gen_rtx_MEM (SImode,
6197 plus_constant (XEXP (operands[0], 0), 4));
6198 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6201 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6202 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6203 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6206 [(set_attr "length" "4,2,2,6,4,4")
6207 (set_attr "type" "*,load2,store2,load2,store2,*")
6208 (set_attr "pool_range" "*,*,*,1020,*,*")]
6211 (define_expand "movxf"
6212 [(set (match_operand:XF 0 "general_operand" "")
6213 (match_operand:XF 1 "general_operand" ""))]
6214 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6216 if (GET_CODE (operands[0]) == MEM)
6217 operands[1] = force_reg (XFmode, operands[1]);
6223 ;; load- and store-multiple insns
6224 ;; The arm can load/store any set of registers, provided that they are in
6225 ;; ascending order; but that is beyond GCC so stick with what it knows.
6227 (define_expand "load_multiple"
6228 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6229 (match_operand:SI 1 "" ""))
6230 (use (match_operand:SI 2 "" ""))])]
6233 HOST_WIDE_INT offset = 0;
6235 /* Support only fixed point registers. */
6236 if (GET_CODE (operands[2]) != CONST_INT
6237 || INTVAL (operands[2]) > 14
6238 || INTVAL (operands[2]) < 2
6239 || GET_CODE (operands[1]) != MEM
6240 || GET_CODE (operands[0]) != REG
6241 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6242 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6246 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6247 force_reg (SImode, XEXP (operands[1], 0)),
6248 TRUE, FALSE, operands[1], &offset);
6251 ;; Load multiple with write-back
6253 (define_insn "*ldmsi_postinc4"
6254 [(match_parallel 0 "load_multiple_operation"
6255 [(set (match_operand:SI 1 "s_register_operand" "=r")
6256 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6258 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6259 (mem:SI (match_dup 2)))
6260 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6261 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6262 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6263 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6264 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6265 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6266 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6267 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6268 [(set_attr "type" "load4")
6269 (set_attr "predicable" "yes")]
6272 (define_insn "*ldmsi_postinc4_thumb1"
6273 [(match_parallel 0 "load_multiple_operation"
6274 [(set (match_operand:SI 1 "s_register_operand" "=l")
6275 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6277 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6278 (mem:SI (match_dup 2)))
6279 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6280 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6281 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6282 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6283 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6284 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6285 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6286 "ldmia\\t%1!, {%3, %4, %5, %6}"
6287 [(set_attr "type" "load4")]
6290 (define_insn "*ldmsi_postinc3"
6291 [(match_parallel 0 "load_multiple_operation"
6292 [(set (match_operand:SI 1 "s_register_operand" "=r")
6293 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6295 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6296 (mem:SI (match_dup 2)))
6297 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6298 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6299 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6300 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6301 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6302 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6303 [(set_attr "type" "load3")
6304 (set_attr "predicable" "yes")]
6307 (define_insn "*ldmsi_postinc2"
6308 [(match_parallel 0 "load_multiple_operation"
6309 [(set (match_operand:SI 1 "s_register_operand" "=r")
6310 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6312 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6313 (mem:SI (match_dup 2)))
6314 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6315 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6316 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6317 "ldm%(ia%)\\t%1!, {%3, %4}"
6318 [(set_attr "type" "load2")
6319 (set_attr "predicable" "yes")]
6322 ;; Ordinary load multiple
6324 (define_insn "*ldmsi4"
6325 [(match_parallel 0 "load_multiple_operation"
6326 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6327 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6328 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6329 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6330 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6331 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6332 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6333 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6334 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6335 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6336 [(set_attr "type" "load4")
6337 (set_attr "predicable" "yes")]
6340 (define_insn "*ldmsi3"
6341 [(match_parallel 0 "load_multiple_operation"
6342 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6343 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6344 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6345 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6346 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6347 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6348 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6349 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6350 [(set_attr "type" "load3")
6351 (set_attr "predicable" "yes")]
6354 (define_insn "*ldmsi2"
6355 [(match_parallel 0 "load_multiple_operation"
6356 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6357 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6358 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6359 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6360 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6361 "ldm%(ia%)\\t%1, {%2, %3}"
6362 [(set_attr "type" "load2")
6363 (set_attr "predicable" "yes")]
6366 (define_expand "store_multiple"
6367 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6368 (match_operand:SI 1 "" ""))
6369 (use (match_operand:SI 2 "" ""))])]
6372 HOST_WIDE_INT offset = 0;
6374 /* Support only fixed point registers. */
6375 if (GET_CODE (operands[2]) != CONST_INT
6376 || INTVAL (operands[2]) > 14
6377 || INTVAL (operands[2]) < 2
6378 || GET_CODE (operands[1]) != REG
6379 || GET_CODE (operands[0]) != MEM
6380 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6381 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6385 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6386 force_reg (SImode, XEXP (operands[0], 0)),
6387 TRUE, FALSE, operands[0], &offset);
6390 ;; Store multiple with write-back
6392 (define_insn "*stmsi_postinc4"
6393 [(match_parallel 0 "store_multiple_operation"
6394 [(set (match_operand:SI 1 "s_register_operand" "=r")
6395 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6397 (set (mem:SI (match_dup 2))
6398 (match_operand:SI 3 "arm_hard_register_operand" ""))
6399 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6400 (match_operand:SI 4 "arm_hard_register_operand" ""))
6401 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6402 (match_operand:SI 5 "arm_hard_register_operand" ""))
6403 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6404 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6405 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6406 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6407 [(set_attr "predicable" "yes")
6408 (set_attr "type" "store4")]
6411 (define_insn "*stmsi_postinc4_thumb1"
6412 [(match_parallel 0 "store_multiple_operation"
6413 [(set (match_operand:SI 1 "s_register_operand" "=l")
6414 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6416 (set (mem:SI (match_dup 2))
6417 (match_operand:SI 3 "arm_hard_register_operand" ""))
6418 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6419 (match_operand:SI 4 "arm_hard_register_operand" ""))
6420 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6421 (match_operand:SI 5 "arm_hard_register_operand" ""))
6422 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6423 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6424 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6425 "stmia\\t%1!, {%3, %4, %5, %6}"
6426 [(set_attr "type" "store4")]
6429 (define_insn "*stmsi_postinc3"
6430 [(match_parallel 0 "store_multiple_operation"
6431 [(set (match_operand:SI 1 "s_register_operand" "=r")
6432 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6434 (set (mem:SI (match_dup 2))
6435 (match_operand:SI 3 "arm_hard_register_operand" ""))
6436 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6437 (match_operand:SI 4 "arm_hard_register_operand" ""))
6438 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6439 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6440 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6441 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6442 [(set_attr "predicable" "yes")
6443 (set_attr "type" "store3")]
6446 (define_insn "*stmsi_postinc2"
6447 [(match_parallel 0 "store_multiple_operation"
6448 [(set (match_operand:SI 1 "s_register_operand" "=r")
6449 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6451 (set (mem:SI (match_dup 2))
6452 (match_operand:SI 3 "arm_hard_register_operand" ""))
6453 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6454 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6455 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6456 "stm%(ia%)\\t%1!, {%3, %4}"
6457 [(set_attr "predicable" "yes")
6458 (set_attr "type" "store2")]
6461 ;; Ordinary store multiple
6463 (define_insn "*stmsi4"
6464 [(match_parallel 0 "store_multiple_operation"
6465 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6466 (match_operand:SI 2 "arm_hard_register_operand" ""))
6467 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6468 (match_operand:SI 3 "arm_hard_register_operand" ""))
6469 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6470 (match_operand:SI 4 "arm_hard_register_operand" ""))
6471 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6472 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6473 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6474 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6475 [(set_attr "predicable" "yes")
6476 (set_attr "type" "store4")]
6479 (define_insn "*stmsi3"
6480 [(match_parallel 0 "store_multiple_operation"
6481 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6482 (match_operand:SI 2 "arm_hard_register_operand" ""))
6483 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6484 (match_operand:SI 3 "arm_hard_register_operand" ""))
6485 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6486 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6487 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6488 "stm%(ia%)\\t%1, {%2, %3, %4}"
6489 [(set_attr "predicable" "yes")
6490 (set_attr "type" "store3")]
6493 (define_insn "*stmsi2"
6494 [(match_parallel 0 "store_multiple_operation"
6495 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6496 (match_operand:SI 2 "arm_hard_register_operand" ""))
6497 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6498 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6499 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6500 "stm%(ia%)\\t%1, {%2, %3}"
6501 [(set_attr "predicable" "yes")
6502 (set_attr "type" "store2")]
6505 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6506 ;; We could let this apply for blocks of less than this, but it clobbers so
6507 ;; many registers that there is then probably a better way.
6509 (define_expand "movmemqi"
6510 [(match_operand:BLK 0 "general_operand" "")
6511 (match_operand:BLK 1 "general_operand" "")
6512 (match_operand:SI 2 "const_int_operand" "")
6513 (match_operand:SI 3 "const_int_operand" "")]
6518 if (arm_gen_movmemqi (operands))
6522 else /* TARGET_THUMB1 */
6524 if ( INTVAL (operands[3]) != 4
6525 || INTVAL (operands[2]) > 48)
6528 thumb_expand_movmemqi (operands);
6534 ;; Thumb block-move insns
6536 (define_insn "movmem12b"
6537 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6538 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6539 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6540 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6541 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6542 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6543 (set (match_operand:SI 0 "register_operand" "=l")
6544 (plus:SI (match_dup 2) (const_int 12)))
6545 (set (match_operand:SI 1 "register_operand" "=l")
6546 (plus:SI (match_dup 3) (const_int 12)))
6547 (clobber (match_scratch:SI 4 "=&l"))
6548 (clobber (match_scratch:SI 5 "=&l"))
6549 (clobber (match_scratch:SI 6 "=&l"))]
6551 "* return thumb_output_move_mem_multiple (3, operands);"
6552 [(set_attr "length" "4")
6553 ; This isn't entirely accurate... It loads as well, but in terms of
6554 ; scheduling the following insn it is better to consider it as a store
6555 (set_attr "type" "store3")]
6558 (define_insn "movmem8b"
6559 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6560 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6561 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6562 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6563 (set (match_operand:SI 0 "register_operand" "=l")
6564 (plus:SI (match_dup 2) (const_int 8)))
6565 (set (match_operand:SI 1 "register_operand" "=l")
6566 (plus:SI (match_dup 3) (const_int 8)))
6567 (clobber (match_scratch:SI 4 "=&l"))
6568 (clobber (match_scratch:SI 5 "=&l"))]
6570 "* return thumb_output_move_mem_multiple (2, operands);"
6571 [(set_attr "length" "4")
6572 ; This isn't entirely accurate... It loads as well, but in terms of
6573 ; scheduling the following insn it is better to consider it as a store
6574 (set_attr "type" "store2")]
6579 ;; Compare & branch insns
6580 ;; The range calculations are based as follows:
6581 ;; For forward branches, the address calculation returns the address of
6582 ;; the next instruction. This is 2 beyond the branch instruction.
6583 ;; For backward branches, the address calculation returns the address of
6584 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6585 ;; instruction for the shortest sequence, and 4 before the branch instruction
6586 ;; if we have to jump around an unconditional branch.
6587 ;; To the basic branch range the PC offset must be added (this is +4).
6588 ;; So for forward branches we have
6589 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6590 ;; And for backward branches we have
6591 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6593 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6594 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6596 (define_expand "cbranchsi4"
6597 [(set (pc) (if_then_else
6598 (match_operator 0 "arm_comparison_operator"
6599 [(match_operand:SI 1 "s_register_operand" "")
6600 (match_operand:SI 2 "nonmemory_operand" "")])
6601 (label_ref (match_operand 3 "" ""))
6603 "TARGET_THUMB1 || TARGET_32BIT"
6607 if (!arm_add_operand (operands[2], SImode))
6608 operands[2] = force_reg (SImode, operands[2]);
6609 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6613 if (thumb1_cmpneg_operand (operands[2], SImode))
6615 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6616 operands[3], operands[0]));
6619 if (!thumb1_cmp_operand (operands[2], SImode))
6620 operands[2] = force_reg (SImode, operands[2]);
6623 (define_expand "cbranchsf4"
6624 [(set (pc) (if_then_else
6625 (match_operator 0 "arm_comparison_operator"
6626 [(match_operand:SF 1 "s_register_operand" "")
6627 (match_operand:SF 2 "arm_float_compare_operand" "")])
6628 (label_ref (match_operand 3 "" ""))
6630 "TARGET_32BIT && TARGET_HARD_FLOAT"
6631 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6632 operands[3])); DONE;"
6635 (define_expand "cbranchdf4"
6636 [(set (pc) (if_then_else
6637 (match_operator 0 "arm_comparison_operator"
6638 [(match_operand:DF 1 "s_register_operand" "")
6639 (match_operand:DF 2 "arm_float_compare_operand" "")])
6640 (label_ref (match_operand 3 "" ""))
6642 "TARGET_32BIT && TARGET_HARD_FLOAT"
6643 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6644 operands[3])); DONE;"
6647 ;; this uses the Cirrus DI compare instruction
6648 (define_expand "cbranchdi4"
6649 [(set (pc) (if_then_else
6650 (match_operator 0 "arm_comparison_operator"
6651 [(match_operand:DI 1 "cirrus_fp_register" "")
6652 (match_operand:DI 2 "cirrus_fp_register" "")])
6653 (label_ref (match_operand 3 "" ""))
6655 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6656 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6657 operands[3])); DONE;"
6660 (define_insn "*cbranchsi4_insn"
6661 [(set (pc) (if_then_else
6662 (match_operator 0 "arm_comparison_operator"
6663 [(match_operand:SI 1 "s_register_operand" "l,*h")
6664 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6665 (label_ref (match_operand 3 "" ""))
6669 output_asm_insn (\"cmp\\t%1, %2\", operands);
6671 switch (get_attr_length (insn))
6673 case 4: return \"b%d0\\t%l3\";
6674 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6675 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6678 [(set (attr "far_jump")
6680 (eq_attr "length" "8")
6681 (const_string "yes")
6682 (const_string "no")))
6683 (set (attr "length")
6685 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6686 (le (minus (match_dup 3) (pc)) (const_int 256)))
6689 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6690 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6695 (define_insn "cbranchsi4_scratch"
6696 [(set (pc) (if_then_else
6697 (match_operator 4 "arm_comparison_operator"
6698 [(match_operand:SI 1 "s_register_operand" "l,0")
6699 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6700 (label_ref (match_operand 3 "" ""))
6702 (clobber (match_scratch:SI 0 "=l,l"))]
6705 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6707 switch (get_attr_length (insn))
6709 case 4: return \"b%d4\\t%l3\";
6710 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6711 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6714 [(set (attr "far_jump")
6716 (eq_attr "length" "8")
6717 (const_string "yes")
6718 (const_string "no")))
6719 (set (attr "length")
6721 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6722 (le (minus (match_dup 3) (pc)) (const_int 256)))
6725 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6726 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6730 (define_insn "*movsi_cbranchsi4"
6733 (match_operator 3 "arm_comparison_operator"
6734 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6736 (label_ref (match_operand 2 "" ""))
6738 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6742 if (which_alternative == 0)
6743 output_asm_insn (\"cmp\t%0, #0\", operands);
6744 else if (which_alternative == 1)
6745 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6748 output_asm_insn (\"cmp\t%1, #0\", operands);
6749 if (which_alternative == 2)
6750 output_asm_insn (\"mov\t%0, %1\", operands);
6752 output_asm_insn (\"str\t%1, %0\", operands);
6754 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6756 case 4: return \"b%d3\\t%l2\";
6757 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6758 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6761 [(set (attr "far_jump")
6763 (ior (and (gt (symbol_ref ("which_alternative"))
6765 (eq_attr "length" "8"))
6766 (eq_attr "length" "10"))
6767 (const_string "yes")
6768 (const_string "no")))
6769 (set (attr "length")
6771 (le (symbol_ref ("which_alternative"))
6774 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6775 (le (minus (match_dup 2) (pc)) (const_int 256)))
6778 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6779 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6783 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6784 (le (minus (match_dup 2) (pc)) (const_int 256)))
6787 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6788 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6793 (define_insn "*negated_cbranchsi4"
6796 (match_operator 0 "equality_operator"
6797 [(match_operand:SI 1 "s_register_operand" "l")
6798 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6799 (label_ref (match_operand 3 "" ""))
6803 output_asm_insn (\"cmn\\t%1, %2\", operands);
6804 switch (get_attr_length (insn))
6806 case 4: return \"b%d0\\t%l3\";
6807 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6808 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6811 [(set (attr "far_jump")
6813 (eq_attr "length" "8")
6814 (const_string "yes")
6815 (const_string "no")))
6816 (set (attr "length")
6818 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6819 (le (minus (match_dup 3) (pc)) (const_int 256)))
6822 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6823 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6828 (define_insn "*tbit_cbranch"
6831 (match_operator 0 "equality_operator"
6832 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6834 (match_operand:SI 2 "const_int_operand" "i"))
6836 (label_ref (match_operand 3 "" ""))
6838 (clobber (match_scratch:SI 4 "=l"))]
6843 op[0] = operands[4];
6844 op[1] = operands[1];
6845 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6847 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6848 switch (get_attr_length (insn))
6850 case 4: return \"b%d0\\t%l3\";
6851 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6852 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6855 [(set (attr "far_jump")
6857 (eq_attr "length" "8")
6858 (const_string "yes")
6859 (const_string "no")))
6860 (set (attr "length")
6862 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6863 (le (minus (match_dup 3) (pc)) (const_int 256)))
6866 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6867 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6872 (define_insn "*tlobits_cbranch"
6875 (match_operator 0 "equality_operator"
6876 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6877 (match_operand:SI 2 "const_int_operand" "i")
6880 (label_ref (match_operand 3 "" ""))
6882 (clobber (match_scratch:SI 4 "=l"))]
6887 op[0] = operands[4];
6888 op[1] = operands[1];
6889 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6891 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6892 switch (get_attr_length (insn))
6894 case 4: return \"b%d0\\t%l3\";
6895 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6896 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6899 [(set (attr "far_jump")
6901 (eq_attr "length" "8")
6902 (const_string "yes")
6903 (const_string "no")))
6904 (set (attr "length")
6906 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6907 (le (minus (match_dup 3) (pc)) (const_int 256)))
6910 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6911 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6916 (define_insn "*tstsi3_cbranch"
6919 (match_operator 3 "equality_operator"
6920 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6921 (match_operand:SI 1 "s_register_operand" "l"))
6923 (label_ref (match_operand 2 "" ""))
6928 output_asm_insn (\"tst\\t%0, %1\", operands);
6929 switch (get_attr_length (insn))
6931 case 4: return \"b%d3\\t%l2\";
6932 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6933 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6936 [(set (attr "far_jump")
6938 (eq_attr "length" "8")
6939 (const_string "yes")
6940 (const_string "no")))
6941 (set (attr "length")
6943 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6944 (le (minus (match_dup 2) (pc)) (const_int 256)))
6947 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6948 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6953 (define_insn "*andsi3_cbranch"
6956 (match_operator 5 "equality_operator"
6957 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6958 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6960 (label_ref (match_operand 4 "" ""))
6962 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6963 (and:SI (match_dup 2) (match_dup 3)))
6964 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6968 if (which_alternative == 0)
6969 output_asm_insn (\"and\\t%0, %3\", operands);
6970 else if (which_alternative == 1)
6972 output_asm_insn (\"and\\t%1, %3\", operands);
6973 output_asm_insn (\"mov\\t%0, %1\", operands);
6977 output_asm_insn (\"and\\t%1, %3\", operands);
6978 output_asm_insn (\"str\\t%1, %0\", operands);
6981 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6983 case 4: return \"b%d5\\t%l4\";
6984 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6985 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6988 [(set (attr "far_jump")
6990 (ior (and (eq (symbol_ref ("which_alternative"))
6992 (eq_attr "length" "8"))
6993 (eq_attr "length" "10"))
6994 (const_string "yes")
6995 (const_string "no")))
6996 (set (attr "length")
6998 (eq (symbol_ref ("which_alternative"))
7001 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7002 (le (minus (match_dup 4) (pc)) (const_int 256)))
7005 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7006 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7010 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7011 (le (minus (match_dup 4) (pc)) (const_int 256)))
7014 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7015 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7020 (define_insn "*orrsi3_cbranch_scratch"
7023 (match_operator 4 "equality_operator"
7024 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7025 (match_operand:SI 2 "s_register_operand" "l"))
7027 (label_ref (match_operand 3 "" ""))
7029 (clobber (match_scratch:SI 0 "=l"))]
7033 output_asm_insn (\"orr\\t%0, %2\", operands);
7034 switch (get_attr_length (insn))
7036 case 4: return \"b%d4\\t%l3\";
7037 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7038 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7041 [(set (attr "far_jump")
7043 (eq_attr "length" "8")
7044 (const_string "yes")
7045 (const_string "no")))
7046 (set (attr "length")
7048 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7049 (le (minus (match_dup 3) (pc)) (const_int 256)))
7052 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7053 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7058 (define_insn "*orrsi3_cbranch"
7061 (match_operator 5 "equality_operator"
7062 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7063 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7065 (label_ref (match_operand 4 "" ""))
7067 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7068 (ior:SI (match_dup 2) (match_dup 3)))
7069 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7073 if (which_alternative == 0)
7074 output_asm_insn (\"orr\\t%0, %3\", operands);
7075 else if (which_alternative == 1)
7077 output_asm_insn (\"orr\\t%1, %3\", operands);
7078 output_asm_insn (\"mov\\t%0, %1\", operands);
7082 output_asm_insn (\"orr\\t%1, %3\", operands);
7083 output_asm_insn (\"str\\t%1, %0\", operands);
7086 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7088 case 4: return \"b%d5\\t%l4\";
7089 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7090 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7093 [(set (attr "far_jump")
7095 (ior (and (eq (symbol_ref ("which_alternative"))
7097 (eq_attr "length" "8"))
7098 (eq_attr "length" "10"))
7099 (const_string "yes")
7100 (const_string "no")))
7101 (set (attr "length")
7103 (eq (symbol_ref ("which_alternative"))
7106 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7107 (le (minus (match_dup 4) (pc)) (const_int 256)))
7110 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7111 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7115 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7116 (le (minus (match_dup 4) (pc)) (const_int 256)))
7119 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7120 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7125 (define_insn "*xorsi3_cbranch_scratch"
7128 (match_operator 4 "equality_operator"
7129 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7130 (match_operand:SI 2 "s_register_operand" "l"))
7132 (label_ref (match_operand 3 "" ""))
7134 (clobber (match_scratch:SI 0 "=l"))]
7138 output_asm_insn (\"eor\\t%0, %2\", operands);
7139 switch (get_attr_length (insn))
7141 case 4: return \"b%d4\\t%l3\";
7142 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7143 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7146 [(set (attr "far_jump")
7148 (eq_attr "length" "8")
7149 (const_string "yes")
7150 (const_string "no")))
7151 (set (attr "length")
7153 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7154 (le (minus (match_dup 3) (pc)) (const_int 256)))
7157 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7158 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7163 (define_insn "*xorsi3_cbranch"
7166 (match_operator 5 "equality_operator"
7167 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7168 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7170 (label_ref (match_operand 4 "" ""))
7172 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7173 (xor:SI (match_dup 2) (match_dup 3)))
7174 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7178 if (which_alternative == 0)
7179 output_asm_insn (\"eor\\t%0, %3\", operands);
7180 else if (which_alternative == 1)
7182 output_asm_insn (\"eor\\t%1, %3\", operands);
7183 output_asm_insn (\"mov\\t%0, %1\", operands);
7187 output_asm_insn (\"eor\\t%1, %3\", operands);
7188 output_asm_insn (\"str\\t%1, %0\", operands);
7191 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7193 case 4: return \"b%d5\\t%l4\";
7194 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7195 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7198 [(set (attr "far_jump")
7200 (ior (and (eq (symbol_ref ("which_alternative"))
7202 (eq_attr "length" "8"))
7203 (eq_attr "length" "10"))
7204 (const_string "yes")
7205 (const_string "no")))
7206 (set (attr "length")
7208 (eq (symbol_ref ("which_alternative"))
7211 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7212 (le (minus (match_dup 4) (pc)) (const_int 256)))
7215 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7216 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7220 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7221 (le (minus (match_dup 4) (pc)) (const_int 256)))
7224 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7225 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7230 (define_insn "*bicsi3_cbranch_scratch"
7233 (match_operator 4 "equality_operator"
7234 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7235 (match_operand:SI 1 "s_register_operand" "0"))
7237 (label_ref (match_operand 3 "" ""))
7239 (clobber (match_scratch:SI 0 "=l"))]
7243 output_asm_insn (\"bic\\t%0, %2\", operands);
7244 switch (get_attr_length (insn))
7246 case 4: return \"b%d4\\t%l3\";
7247 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7248 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7251 [(set (attr "far_jump")
7253 (eq_attr "length" "8")
7254 (const_string "yes")
7255 (const_string "no")))
7256 (set (attr "length")
7258 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7259 (le (minus (match_dup 3) (pc)) (const_int 256)))
7262 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7263 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7268 (define_insn "*bicsi3_cbranch"
7271 (match_operator 5 "equality_operator"
7272 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7273 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7275 (label_ref (match_operand 4 "" ""))
7277 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7278 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7279 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7283 if (which_alternative == 0)
7284 output_asm_insn (\"bic\\t%0, %3\", operands);
7285 else if (which_alternative <= 2)
7287 output_asm_insn (\"bic\\t%1, %3\", operands);
7288 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7289 conditions again, since we're only testing for equality. */
7290 output_asm_insn (\"mov\\t%0, %1\", operands);
7294 output_asm_insn (\"bic\\t%1, %3\", operands);
7295 output_asm_insn (\"str\\t%1, %0\", operands);
7298 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7300 case 4: return \"b%d5\\t%l4\";
7301 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7302 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7305 [(set (attr "far_jump")
7307 (ior (and (eq (symbol_ref ("which_alternative"))
7309 (eq_attr "length" "8"))
7310 (eq_attr "length" "10"))
7311 (const_string "yes")
7312 (const_string "no")))
7313 (set (attr "length")
7315 (eq (symbol_ref ("which_alternative"))
7318 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7319 (le (minus (match_dup 4) (pc)) (const_int 256)))
7322 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7323 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7327 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7328 (le (minus (match_dup 4) (pc)) (const_int 256)))
7331 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7332 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7337 (define_insn "*cbranchne_decr1"
7339 (if_then_else (match_operator 3 "equality_operator"
7340 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7342 (label_ref (match_operand 4 "" ""))
7344 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7345 (plus:SI (match_dup 2) (const_int -1)))
7346 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7351 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7353 VOIDmode, operands[2], const1_rtx);
7354 cond[1] = operands[4];
7356 if (which_alternative == 0)
7357 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7358 else if (which_alternative == 1)
7360 /* We must provide an alternative for a hi reg because reload
7361 cannot handle output reloads on a jump instruction, but we
7362 can't subtract into that. Fortunately a mov from lo to hi
7363 does not clobber the condition codes. */
7364 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7365 output_asm_insn (\"mov\\t%0, %1\", operands);
7369 /* Similarly, but the target is memory. */
7370 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7371 output_asm_insn (\"str\\t%1, %0\", operands);
7374 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7377 output_asm_insn (\"b%d0\\t%l1\", cond);
7380 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7381 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7383 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7384 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7388 [(set (attr "far_jump")
7390 (ior (and (eq (symbol_ref ("which_alternative"))
7392 (eq_attr "length" "8"))
7393 (eq_attr "length" "10"))
7394 (const_string "yes")
7395 (const_string "no")))
7396 (set_attr_alternative "length"
7400 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7401 (le (minus (match_dup 4) (pc)) (const_int 256)))
7404 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7405 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7410 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7411 (le (minus (match_dup 4) (pc)) (const_int 256)))
7414 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7415 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7420 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7421 (le (minus (match_dup 4) (pc)) (const_int 256)))
7424 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7425 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7430 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7431 (le (minus (match_dup 4) (pc)) (const_int 256)))
7434 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7435 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7440 (define_insn "*addsi3_cbranch"
7443 (match_operator 4 "arm_comparison_operator"
7445 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7446 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7448 (label_ref (match_operand 5 "" ""))
7451 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7452 (plus:SI (match_dup 2) (match_dup 3)))
7453 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7455 && (GET_CODE (operands[4]) == EQ
7456 || GET_CODE (operands[4]) == NE
7457 || GET_CODE (operands[4]) == GE
7458 || GET_CODE (operands[4]) == LT)"
7464 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7465 cond[1] = operands[2];
7466 cond[2] = operands[3];
7468 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7469 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7471 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7473 if (which_alternative >= 3
7474 && which_alternative < 4)
7475 output_asm_insn (\"mov\\t%0, %1\", operands);
7476 else if (which_alternative >= 4)
7477 output_asm_insn (\"str\\t%1, %0\", operands);
7479 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7482 return \"b%d4\\t%l5\";
7484 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7486 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7490 [(set (attr "far_jump")
7492 (ior (and (lt (symbol_ref ("which_alternative"))
7494 (eq_attr "length" "8"))
7495 (eq_attr "length" "10"))
7496 (const_string "yes")
7497 (const_string "no")))
7498 (set (attr "length")
7500 (lt (symbol_ref ("which_alternative"))
7503 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7504 (le (minus (match_dup 5) (pc)) (const_int 256)))
7507 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7508 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7512 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7513 (le (minus (match_dup 5) (pc)) (const_int 256)))
7516 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7517 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7522 (define_insn "*addsi3_cbranch_scratch"
7525 (match_operator 3 "arm_comparison_operator"
7527 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7528 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7530 (label_ref (match_operand 4 "" ""))
7532 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7534 && (GET_CODE (operands[3]) == EQ
7535 || GET_CODE (operands[3]) == NE
7536 || GET_CODE (operands[3]) == GE
7537 || GET_CODE (operands[3]) == LT)"
7540 switch (which_alternative)
7543 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7546 output_asm_insn (\"cmn\t%1, %2\", operands);
7549 if (INTVAL (operands[2]) < 0)
7550 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7552 output_asm_insn (\"add\t%0, %1, %2\", operands);
7555 if (INTVAL (operands[2]) < 0)
7556 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7558 output_asm_insn (\"add\t%0, %0, %2\", operands);
7562 switch (get_attr_length (insn))
7565 return \"b%d3\\t%l4\";
7567 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7569 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7573 [(set (attr "far_jump")
7575 (eq_attr "length" "8")
7576 (const_string "yes")
7577 (const_string "no")))
7578 (set (attr "length")
7580 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7581 (le (minus (match_dup 4) (pc)) (const_int 256)))
7584 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7585 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7590 (define_insn "*subsi3_cbranch"
7593 (match_operator 4 "arm_comparison_operator"
7595 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7596 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7598 (label_ref (match_operand 5 "" ""))
7600 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7601 (minus:SI (match_dup 2) (match_dup 3)))
7602 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7604 && (GET_CODE (operands[4]) == EQ
7605 || GET_CODE (operands[4]) == NE
7606 || GET_CODE (operands[4]) == GE
7607 || GET_CODE (operands[4]) == LT)"
7610 if (which_alternative == 0)
7611 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7612 else if (which_alternative == 1)
7614 /* We must provide an alternative for a hi reg because reload
7615 cannot handle output reloads on a jump instruction, but we
7616 can't subtract into that. Fortunately a mov from lo to hi
7617 does not clobber the condition codes. */
7618 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7619 output_asm_insn (\"mov\\t%0, %1\", operands);
7623 /* Similarly, but the target is memory. */
7624 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7625 output_asm_insn (\"str\\t%1, %0\", operands);
7628 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7631 return \"b%d4\\t%l5\";
7633 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7635 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7639 [(set (attr "far_jump")
7641 (ior (and (eq (symbol_ref ("which_alternative"))
7643 (eq_attr "length" "8"))
7644 (eq_attr "length" "10"))
7645 (const_string "yes")
7646 (const_string "no")))
7647 (set (attr "length")
7649 (eq (symbol_ref ("which_alternative"))
7652 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7653 (le (minus (match_dup 5) (pc)) (const_int 256)))
7656 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7657 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7661 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7662 (le (minus (match_dup 5) (pc)) (const_int 256)))
7665 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7666 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7671 (define_insn "*subsi3_cbranch_scratch"
7674 (match_operator 0 "arm_comparison_operator"
7675 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7676 (match_operand:SI 2 "nonmemory_operand" "l"))
7678 (label_ref (match_operand 3 "" ""))
7681 && (GET_CODE (operands[0]) == EQ
7682 || GET_CODE (operands[0]) == NE
7683 || GET_CODE (operands[0]) == GE
7684 || GET_CODE (operands[0]) == LT)"
7686 output_asm_insn (\"cmp\\t%1, %2\", operands);
7687 switch (get_attr_length (insn))
7689 case 4: return \"b%d0\\t%l3\";
7690 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7691 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7694 [(set (attr "far_jump")
7696 (eq_attr "length" "8")
7697 (const_string "yes")
7698 (const_string "no")))
7699 (set (attr "length")
7701 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7702 (le (minus (match_dup 3) (pc)) (const_int 256)))
7705 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7706 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7711 ;; Comparison and test insns
7713 (define_insn "*arm_cmpsi_insn"
7714 [(set (reg:CC CC_REGNUM)
7715 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7716 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7721 [(set_attr "conds" "set")]
7724 (define_insn "*arm_cmpsi_shiftsi"
7725 [(set (reg:CC CC_REGNUM)
7726 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7727 (match_operator:SI 3 "shift_operator"
7728 [(match_operand:SI 1 "s_register_operand" "r")
7729 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7732 [(set_attr "conds" "set")
7733 (set_attr "shift" "1")
7734 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7735 (const_string "alu_shift")
7736 (const_string "alu_shift_reg")))]
7739 (define_insn "*arm_cmpsi_shiftsi_swp"
7740 [(set (reg:CC_SWP CC_REGNUM)
7741 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7742 [(match_operand:SI 1 "s_register_operand" "r")
7743 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7744 (match_operand:SI 0 "s_register_operand" "r")))]
7747 [(set_attr "conds" "set")
7748 (set_attr "shift" "1")
7749 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7750 (const_string "alu_shift")
7751 (const_string "alu_shift_reg")))]
7754 (define_insn "*arm_cmpsi_negshiftsi_si"
7755 [(set (reg:CC_Z CC_REGNUM)
7757 (neg:SI (match_operator:SI 1 "shift_operator"
7758 [(match_operand:SI 2 "s_register_operand" "r")
7759 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7760 (match_operand:SI 0 "s_register_operand" "r")))]
7763 [(set_attr "conds" "set")
7764 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7765 (const_string "alu_shift")
7766 (const_string "alu_shift_reg")))]
7769 ;; Cirrus SF compare instruction
7770 (define_insn "*cirrus_cmpsf"
7771 [(set (reg:CCFP CC_REGNUM)
7772 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7773 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7774 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7775 "cfcmps%?\\tr15, %V0, %V1"
7776 [(set_attr "type" "mav_farith")
7777 (set_attr "cirrus" "compare")]
7780 ;; Cirrus DF compare instruction
7781 (define_insn "*cirrus_cmpdf"
7782 [(set (reg:CCFP CC_REGNUM)
7783 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7784 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7785 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7786 "cfcmpd%?\\tr15, %V0, %V1"
7787 [(set_attr "type" "mav_farith")
7788 (set_attr "cirrus" "compare")]
7791 (define_insn "*cirrus_cmpdi"
7792 [(set (reg:CC CC_REGNUM)
7793 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7794 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7795 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7796 "cfcmp64%?\\tr15, %V0, %V1"
7797 [(set_attr "type" "mav_farith")
7798 (set_attr "cirrus" "compare")]
7801 ; This insn allows redundant compares to be removed by cse, nothing should
7802 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7803 ; is deleted later on. The match_dup will match the mode here, so that
7804 ; mode changes of the condition codes aren't lost by this even though we don't
7805 ; specify what they are.
7807 (define_insn "*deleted_compare"
7808 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7810 "\\t%@ deleted compare"
7811 [(set_attr "conds" "set")
7812 (set_attr "length" "0")]
7816 ;; Conditional branch insns
7818 (define_expand "cbranch_cc"
7820 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7821 (match_operand 2 "" "")])
7822 (label_ref (match_operand 3 "" ""))
7825 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7826 operands[1], operands[2]);
7827 operands[2] = const0_rtx;"
7831 ;; Patterns to match conditional branch insns.
7834 (define_insn "*arm_cond_branch"
7836 (if_then_else (match_operator 1 "arm_comparison_operator"
7837 [(match_operand 2 "cc_register" "") (const_int 0)])
7838 (label_ref (match_operand 0 "" ""))
7842 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7844 arm_ccfsm_state += 2;
7847 return \"b%d1\\t%l0\";
7849 [(set_attr "conds" "use")
7850 (set_attr "type" "branch")]
7853 (define_insn "*arm_cond_branch_reversed"
7855 (if_then_else (match_operator 1 "arm_comparison_operator"
7856 [(match_operand 2 "cc_register" "") (const_int 0)])
7858 (label_ref (match_operand 0 "" ""))))]
7861 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7863 arm_ccfsm_state += 2;
7866 return \"b%D1\\t%l0\";
7868 [(set_attr "conds" "use")
7869 (set_attr "type" "branch")]
7876 (define_expand "cstore_cc"
7877 [(set (match_operand:SI 0 "s_register_operand" "")
7878 (match_operator:SI 1 "" [(match_operand 2 "" "")
7879 (match_operand 3 "" "")]))]
7881 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7882 operands[2], operands[3]);
7883 operands[3] = const0_rtx;"
7886 (define_insn "*mov_scc"
7887 [(set (match_operand:SI 0 "s_register_operand" "=r")
7888 (match_operator:SI 1 "arm_comparison_operator"
7889 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7891 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7892 [(set_attr "conds" "use")
7893 (set_attr "length" "8")]
7896 (define_insn "*mov_negscc"
7897 [(set (match_operand:SI 0 "s_register_operand" "=r")
7898 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7899 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7901 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7902 [(set_attr "conds" "use")
7903 (set_attr "length" "8")]
7906 (define_insn "*mov_notscc"
7907 [(set (match_operand:SI 0 "s_register_operand" "=r")
7908 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7909 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7911 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7912 [(set_attr "conds" "use")
7913 (set_attr "length" "8")]
7916 (define_expand "cstoresi4"
7917 [(set (match_operand:SI 0 "s_register_operand" "")
7918 (match_operator:SI 1 "arm_comparison_operator"
7919 [(match_operand:SI 2 "s_register_operand" "")
7920 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7921 "TARGET_32BIT || TARGET_THUMB1"
7923 rtx op3, scratch, scratch2;
7927 if (!arm_add_operand (operands[3], SImode))
7928 operands[3] = force_reg (SImode, operands[3]);
7929 emit_insn (gen_cstore_cc (operands[0], operands[1],
7930 operands[2], operands[3]));
7934 if (operands[3] == const0_rtx)
7936 switch (GET_CODE (operands[1]))
7939 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7943 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7947 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7948 NULL_RTX, 0, OPTAB_WIDEN);
7949 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7950 NULL_RTX, 0, OPTAB_WIDEN);
7951 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7952 operands[0], 1, OPTAB_WIDEN);
7956 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7958 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7959 NULL_RTX, 1, OPTAB_WIDEN);
7963 scratch = expand_binop (SImode, ashr_optab, operands[2],
7964 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7965 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7966 NULL_RTX, 0, OPTAB_WIDEN);
7967 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7971 /* LT is handled by generic code. No need for unsigned with 0. */
7978 switch (GET_CODE (operands[1]))
7981 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7982 NULL_RTX, 0, OPTAB_WIDEN);
7983 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7987 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7988 NULL_RTX, 0, OPTAB_WIDEN);
7989 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7993 op3 = force_reg (SImode, operands[3]);
7995 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7996 NULL_RTX, 1, OPTAB_WIDEN);
7997 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7998 NULL_RTX, 0, OPTAB_WIDEN);
7999 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8005 if (!thumb1_cmp_operand (op3, SImode))
8006 op3 = force_reg (SImode, op3);
8007 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8008 NULL_RTX, 0, OPTAB_WIDEN);
8009 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8010 NULL_RTX, 1, OPTAB_WIDEN);
8011 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8016 op3 = force_reg (SImode, operands[3]);
8017 scratch = force_reg (SImode, const0_rtx);
8018 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8024 if (!thumb1_cmp_operand (op3, SImode))
8025 op3 = force_reg (SImode, op3);
8026 scratch = force_reg (SImode, const0_rtx);
8027 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8033 if (!thumb1_cmp_operand (op3, SImode))
8034 op3 = force_reg (SImode, op3);
8035 scratch = gen_reg_rtx (SImode);
8036 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8037 emit_insn (gen_negsi2 (operands[0], scratch));
8041 op3 = force_reg (SImode, operands[3]);
8042 scratch = gen_reg_rtx (SImode);
8043 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8044 emit_insn (gen_negsi2 (operands[0], scratch));
8047 /* No good sequences for GT, LT. */
8054 (define_expand "cstoresf4"
8055 [(set (match_operand:SI 0 "s_register_operand" "")
8056 (match_operator:SI 1 "arm_comparison_operator"
8057 [(match_operand:SF 2 "s_register_operand" "")
8058 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8059 "TARGET_32BIT && TARGET_HARD_FLOAT"
8060 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8061 operands[2], operands[3])); DONE;"
8064 (define_expand "cstoredf4"
8065 [(set (match_operand:SI 0 "s_register_operand" "")
8066 (match_operator:SI 1 "arm_comparison_operator"
8067 [(match_operand:DF 2 "s_register_operand" "")
8068 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8069 "TARGET_32BIT && TARGET_HARD_FLOAT"
8070 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8071 operands[2], operands[3])); DONE;"
8074 ;; this uses the Cirrus DI compare instruction
8075 (define_expand "cstoredi4"
8076 [(set (match_operand:SI 0 "s_register_operand" "")
8077 (match_operator:SI 1 "arm_comparison_operator"
8078 [(match_operand:DI 2 "cirrus_fp_register" "")
8079 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8080 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8081 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8082 operands[2], operands[3])); DONE;"
8086 (define_expand "cstoresi_eq0_thumb1"
8088 [(set (match_operand:SI 0 "s_register_operand" "")
8089 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8091 (clobber (match_dup:SI 2))])]
8093 "operands[2] = gen_reg_rtx (SImode);"
8096 (define_expand "cstoresi_ne0_thumb1"
8098 [(set (match_operand:SI 0 "s_register_operand" "")
8099 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8101 (clobber (match_dup:SI 2))])]
8103 "operands[2] = gen_reg_rtx (SImode);"
8106 (define_insn "*cstoresi_eq0_thumb1_insn"
8107 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8108 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8110 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8113 neg\\t%0, %1\;adc\\t%0, %0, %1
8114 neg\\t%2, %1\;adc\\t%0, %1, %2"
8115 [(set_attr "length" "4")]
8118 (define_insn "*cstoresi_ne0_thumb1_insn"
8119 [(set (match_operand:SI 0 "s_register_operand" "=l")
8120 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8122 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8124 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8125 [(set_attr "length" "4")]
8128 (define_insn "cstoresi_nltu_thumb1"
8129 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8130 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8131 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8133 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8134 [(set_attr "length" "4")]
8137 ;; Used as part of the expansion of thumb les sequence.
8138 (define_insn "thumb1_addsi3_addgeu"
8139 [(set (match_operand:SI 0 "s_register_operand" "=l")
8140 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8141 (match_operand:SI 2 "s_register_operand" "l"))
8142 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8143 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8145 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8146 [(set_attr "length" "4")]
8150 ;; Conditional move insns
8152 (define_expand "movsicc"
8153 [(set (match_operand:SI 0 "s_register_operand" "")
8154 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8155 (match_operand:SI 2 "arm_not_operand" "")
8156 (match_operand:SI 3 "arm_not_operand" "")))]
8160 enum rtx_code code = GET_CODE (operands[1]);
8163 if (code == UNEQ || code == LTGT)
8166 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8167 XEXP (operands[1], 1));
8168 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8172 (define_expand "movsfcc"
8173 [(set (match_operand:SF 0 "s_register_operand" "")
8174 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8175 (match_operand:SF 2 "s_register_operand" "")
8176 (match_operand:SF 3 "nonmemory_operand" "")))]
8177 "TARGET_32BIT && TARGET_HARD_FLOAT"
8180 enum rtx_code code = GET_CODE (operands[1]);
8183 if (code == UNEQ || code == LTGT)
8186 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8187 Otherwise, ensure it is a valid FP add operand */
8188 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8189 || (!arm_float_add_operand (operands[3], SFmode)))
8190 operands[3] = force_reg (SFmode, operands[3]);
8192 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8193 XEXP (operands[1], 1));
8194 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8198 (define_expand "movdfcc"
8199 [(set (match_operand:DF 0 "s_register_operand" "")
8200 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8201 (match_operand:DF 2 "s_register_operand" "")
8202 (match_operand:DF 3 "arm_float_add_operand" "")))]
8203 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8206 enum rtx_code code = GET_CODE (operands[1]);
8209 if (code == UNEQ || code == LTGT)
8212 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8213 XEXP (operands[1], 1));
8214 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8218 (define_insn "*movsicc_insn"
8219 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8221 (match_operator 3 "arm_comparison_operator"
8222 [(match_operand 4 "cc_register" "") (const_int 0)])
8223 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8224 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8231 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8232 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8233 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8234 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8235 [(set_attr "length" "4,4,4,4,8,8,8,8")
8236 (set_attr "conds" "use")]
8239 (define_insn "*movsfcc_soft_insn"
8240 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8241 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8242 [(match_operand 4 "cc_register" "") (const_int 0)])
8243 (match_operand:SF 1 "s_register_operand" "0,r")
8244 (match_operand:SF 2 "s_register_operand" "r,0")))]
8245 "TARGET_ARM && TARGET_SOFT_FLOAT"
8249 [(set_attr "conds" "use")]
8253 ;; Jump and linkage insns
8255 (define_expand "jump"
8257 (label_ref (match_operand 0 "" "")))]
8262 (define_insn "*arm_jump"
8264 (label_ref (match_operand 0 "" "")))]
8268 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8270 arm_ccfsm_state += 2;
8273 return \"b%?\\t%l0\";
8276 [(set_attr "predicable" "yes")]
8279 (define_insn "*thumb_jump"
8281 (label_ref (match_operand 0 "" "")))]
8284 if (get_attr_length (insn) == 2)
8286 return \"bl\\t%l0\\t%@ far jump\";
8288 [(set (attr "far_jump")
8290 (eq_attr "length" "4")
8291 (const_string "yes")
8292 (const_string "no")))
8293 (set (attr "length")
8295 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8296 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8301 (define_expand "call"
8302 [(parallel [(call (match_operand 0 "memory_operand" "")
8303 (match_operand 1 "general_operand" ""))
8304 (use (match_operand 2 "" ""))
8305 (clobber (reg:SI LR_REGNUM))])]
8311 /* In an untyped call, we can get NULL for operand 2. */
8312 if (operands[2] == NULL_RTX)
8313 operands[2] = const0_rtx;
8315 /* Decide if we should generate indirect calls by loading the
8316 32-bit address of the callee into a register before performing the
8318 callee = XEXP (operands[0], 0);
8319 if (GET_CODE (callee) == SYMBOL_REF
8320 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8322 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8324 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8325 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8330 (define_expand "call_internal"
8331 [(parallel [(call (match_operand 0 "memory_operand" "")
8332 (match_operand 1 "general_operand" ""))
8333 (use (match_operand 2 "" ""))
8334 (clobber (reg:SI LR_REGNUM))])])
8336 (define_insn "*call_reg_armv5"
8337 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8338 (match_operand 1 "" ""))
8339 (use (match_operand 2 "" ""))
8340 (clobber (reg:SI LR_REGNUM))]
8341 "TARGET_ARM && arm_arch5"
8343 [(set_attr "type" "call")]
8346 (define_insn "*call_reg_arm"
8347 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8348 (match_operand 1 "" ""))
8349 (use (match_operand 2 "" ""))
8350 (clobber (reg:SI LR_REGNUM))]
8351 "TARGET_ARM && !arm_arch5"
8353 return output_call (operands);
8355 ;; length is worst case, normally it is only two
8356 [(set_attr "length" "12")
8357 (set_attr "type" "call")]
8360 (define_insn "*call_mem"
8361 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8362 (match_operand 1 "" ""))
8363 (use (match_operand 2 "" ""))
8364 (clobber (reg:SI LR_REGNUM))]
8367 return output_call_mem (operands);
8369 [(set_attr "length" "12")
8370 (set_attr "type" "call")]
8373 (define_insn "*call_reg_thumb1_v5"
8374 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8375 (match_operand 1 "" ""))
8376 (use (match_operand 2 "" ""))
8377 (clobber (reg:SI LR_REGNUM))]
8378 "TARGET_THUMB1 && arm_arch5"
8380 [(set_attr "length" "2")
8381 (set_attr "type" "call")]
8384 (define_insn "*call_reg_thumb1"
8385 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8386 (match_operand 1 "" ""))
8387 (use (match_operand 2 "" ""))
8388 (clobber (reg:SI LR_REGNUM))]
8389 "TARGET_THUMB1 && !arm_arch5"
8392 if (!TARGET_CALLER_INTERWORKING)
8393 return thumb_call_via_reg (operands[0]);
8394 else if (operands[1] == const0_rtx)
8395 return \"bl\\t%__interwork_call_via_%0\";
8396 else if (frame_pointer_needed)
8397 return \"bl\\t%__interwork_r7_call_via_%0\";
8399 return \"bl\\t%__interwork_r11_call_via_%0\";
8401 [(set_attr "type" "call")]
8404 (define_expand "call_value"
8405 [(parallel [(set (match_operand 0 "" "")
8406 (call (match_operand 1 "memory_operand" "")
8407 (match_operand 2 "general_operand" "")))
8408 (use (match_operand 3 "" ""))
8409 (clobber (reg:SI LR_REGNUM))])]
8415 /* In an untyped call, we can get NULL for operand 2. */
8416 if (operands[3] == 0)
8417 operands[3] = const0_rtx;
8419 /* Decide if we should generate indirect calls by loading the
8420 32-bit address of the callee into a register before performing the
8422 callee = XEXP (operands[1], 0);
8423 if (GET_CODE (callee) == SYMBOL_REF
8424 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8426 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8428 pat = gen_call_value_internal (operands[0], operands[1],
8429 operands[2], operands[3]);
8430 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8435 (define_expand "call_value_internal"
8436 [(parallel [(set (match_operand 0 "" "")
8437 (call (match_operand 1 "memory_operand" "")
8438 (match_operand 2 "general_operand" "")))
8439 (use (match_operand 3 "" ""))
8440 (clobber (reg:SI LR_REGNUM))])])
8442 (define_insn "*call_value_reg_armv5"
8443 [(set (match_operand 0 "" "")
8444 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8445 (match_operand 2 "" "")))
8446 (use (match_operand 3 "" ""))
8447 (clobber (reg:SI LR_REGNUM))]
8448 "TARGET_ARM && arm_arch5"
8450 [(set_attr "type" "call")]
8453 (define_insn "*call_value_reg_arm"
8454 [(set (match_operand 0 "" "")
8455 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8456 (match_operand 2 "" "")))
8457 (use (match_operand 3 "" ""))
8458 (clobber (reg:SI LR_REGNUM))]
8459 "TARGET_ARM && !arm_arch5"
8461 return output_call (&operands[1]);
8463 [(set_attr "length" "12")
8464 (set_attr "type" "call")]
8467 (define_insn "*call_value_mem"
8468 [(set (match_operand 0 "" "")
8469 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8470 (match_operand 2 "" "")))
8471 (use (match_operand 3 "" ""))
8472 (clobber (reg:SI LR_REGNUM))]
8473 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8475 return output_call_mem (&operands[1]);
8477 [(set_attr "length" "12")
8478 (set_attr "type" "call")]
8481 (define_insn "*call_value_reg_thumb1_v5"
8482 [(set (match_operand 0 "" "")
8483 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8484 (match_operand 2 "" "")))
8485 (use (match_operand 3 "" ""))
8486 (clobber (reg:SI LR_REGNUM))]
8487 "TARGET_THUMB1 && arm_arch5"
8489 [(set_attr "length" "2")
8490 (set_attr "type" "call")]
8493 (define_insn "*call_value_reg_thumb1"
8494 [(set (match_operand 0 "" "")
8495 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8496 (match_operand 2 "" "")))
8497 (use (match_operand 3 "" ""))
8498 (clobber (reg:SI LR_REGNUM))]
8499 "TARGET_THUMB1 && !arm_arch5"
8502 if (!TARGET_CALLER_INTERWORKING)
8503 return thumb_call_via_reg (operands[1]);
8504 else if (operands[2] == const0_rtx)
8505 return \"bl\\t%__interwork_call_via_%1\";
8506 else if (frame_pointer_needed)
8507 return \"bl\\t%__interwork_r7_call_via_%1\";
8509 return \"bl\\t%__interwork_r11_call_via_%1\";
8511 [(set_attr "type" "call")]
8514 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8515 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8517 (define_insn "*call_symbol"
8518 [(call (mem:SI (match_operand:SI 0 "" ""))
8519 (match_operand 1 "" ""))
8520 (use (match_operand 2 "" ""))
8521 (clobber (reg:SI LR_REGNUM))]
8523 && (GET_CODE (operands[0]) == SYMBOL_REF)
8524 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8527 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8529 [(set_attr "type" "call")]
8532 (define_insn "*call_value_symbol"
8533 [(set (match_operand 0 "" "")
8534 (call (mem:SI (match_operand:SI 1 "" ""))
8535 (match_operand:SI 2 "" "")))
8536 (use (match_operand 3 "" ""))
8537 (clobber (reg:SI LR_REGNUM))]
8539 && (GET_CODE (operands[1]) == SYMBOL_REF)
8540 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8543 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8545 [(set_attr "type" "call")]
8548 (define_insn "*call_insn"
8549 [(call (mem:SI (match_operand:SI 0 "" ""))
8550 (match_operand:SI 1 "" ""))
8551 (use (match_operand 2 "" ""))
8552 (clobber (reg:SI LR_REGNUM))]
8554 && GET_CODE (operands[0]) == SYMBOL_REF
8555 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8557 [(set_attr "length" "4")
8558 (set_attr "type" "call")]
8561 (define_insn "*call_value_insn"
8562 [(set (match_operand 0 "" "")
8563 (call (mem:SI (match_operand 1 "" ""))
8564 (match_operand 2 "" "")))
8565 (use (match_operand 3 "" ""))
8566 (clobber (reg:SI LR_REGNUM))]
8568 && GET_CODE (operands[1]) == SYMBOL_REF
8569 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8571 [(set_attr "length" "4")
8572 (set_attr "type" "call")]
8575 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8576 (define_expand "sibcall"
8577 [(parallel [(call (match_operand 0 "memory_operand" "")
8578 (match_operand 1 "general_operand" ""))
8580 (use (match_operand 2 "" ""))])]
8584 if (operands[2] == NULL_RTX)
8585 operands[2] = const0_rtx;
8589 (define_expand "sibcall_value"
8590 [(parallel [(set (match_operand 0 "" "")
8591 (call (match_operand 1 "memory_operand" "")
8592 (match_operand 2 "general_operand" "")))
8594 (use (match_operand 3 "" ""))])]
8598 if (operands[3] == NULL_RTX)
8599 operands[3] = const0_rtx;
8603 (define_insn "*sibcall_insn"
8604 [(call (mem:SI (match_operand:SI 0 "" "X"))
8605 (match_operand 1 "" ""))
8607 (use (match_operand 2 "" ""))]
8608 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8610 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8612 [(set_attr "type" "call")]
8615 (define_insn "*sibcall_value_insn"
8616 [(set (match_operand 0 "" "")
8617 (call (mem:SI (match_operand:SI 1 "" "X"))
8618 (match_operand 2 "" "")))
8620 (use (match_operand 3 "" ""))]
8621 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8623 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8625 [(set_attr "type" "call")]
8628 ;; Often the return insn will be the same as loading from memory, so set attr
8629 (define_insn "return"
8631 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8634 if (arm_ccfsm_state == 2)
8636 arm_ccfsm_state += 2;
8639 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8641 [(set_attr "type" "load1")
8642 (set_attr "length" "12")
8643 (set_attr "predicable" "yes")]
8646 (define_insn "*cond_return"
8648 (if_then_else (match_operator 0 "arm_comparison_operator"
8649 [(match_operand 1 "cc_register" "") (const_int 0)])
8652 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8655 if (arm_ccfsm_state == 2)
8657 arm_ccfsm_state += 2;
8660 return output_return_instruction (operands[0], TRUE, FALSE);
8662 [(set_attr "conds" "use")
8663 (set_attr "length" "12")
8664 (set_attr "type" "load1")]
8667 (define_insn "*cond_return_inverted"
8669 (if_then_else (match_operator 0 "arm_comparison_operator"
8670 [(match_operand 1 "cc_register" "") (const_int 0)])
8673 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8676 if (arm_ccfsm_state == 2)
8678 arm_ccfsm_state += 2;
8681 return output_return_instruction (operands[0], TRUE, TRUE);
8683 [(set_attr "conds" "use")
8684 (set_attr "length" "12")
8685 (set_attr "type" "load1")]
8688 ;; Generate a sequence of instructions to determine if the processor is
8689 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8692 (define_expand "return_addr_mask"
8694 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8696 (set (match_operand:SI 0 "s_register_operand" "")
8697 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8699 (const_int 67108860)))] ; 0x03fffffc
8702 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8705 (define_insn "*check_arch2"
8706 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8707 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8710 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8711 [(set_attr "length" "8")
8712 (set_attr "conds" "set")]
8715 ;; Call subroutine returning any type.
8717 (define_expand "untyped_call"
8718 [(parallel [(call (match_operand 0 "" "")
8720 (match_operand 1 "" "")
8721 (match_operand 2 "" "")])]
8726 rtx par = gen_rtx_PARALLEL (VOIDmode,
8727 rtvec_alloc (XVECLEN (operands[2], 0)));
8728 rtx addr = gen_reg_rtx (Pmode);
8732 emit_move_insn (addr, XEXP (operands[1], 0));
8733 mem = change_address (operands[1], BLKmode, addr);
8735 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8737 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8739 /* Default code only uses r0 as a return value, but we could
8740 be using anything up to 4 registers. */
8741 if (REGNO (src) == R0_REGNUM)
8742 src = gen_rtx_REG (TImode, R0_REGNUM);
8744 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8746 size += GET_MODE_SIZE (GET_MODE (src));
8749 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8754 for (i = 0; i < XVECLEN (par, 0); i++)
8756 HOST_WIDE_INT offset = 0;
8757 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8760 emit_move_insn (addr, plus_constant (addr, size));
8762 mem = change_address (mem, GET_MODE (reg), NULL);
8763 if (REGNO (reg) == R0_REGNUM)
8765 /* On thumb we have to use a write-back instruction. */
8766 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8767 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8768 size = TARGET_ARM ? 16 : 0;
8772 emit_move_insn (mem, reg);
8773 size = GET_MODE_SIZE (GET_MODE (reg));
8777 /* The optimizer does not know that the call sets the function value
8778 registers we stored in the result block. We avoid problems by
8779 claiming that all hard registers are used and clobbered at this
8781 emit_insn (gen_blockage ());
8787 (define_expand "untyped_return"
8788 [(match_operand:BLK 0 "memory_operand" "")
8789 (match_operand 1 "" "")]
8794 rtx addr = gen_reg_rtx (Pmode);
8798 emit_move_insn (addr, XEXP (operands[0], 0));
8799 mem = change_address (operands[0], BLKmode, addr);
8801 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8803 HOST_WIDE_INT offset = 0;
8804 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8807 emit_move_insn (addr, plus_constant (addr, size));
8809 mem = change_address (mem, GET_MODE (reg), NULL);
8810 if (REGNO (reg) == R0_REGNUM)
8812 /* On thumb we have to use a write-back instruction. */
8813 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8814 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8815 size = TARGET_ARM ? 16 : 0;
8819 emit_move_insn (reg, mem);
8820 size = GET_MODE_SIZE (GET_MODE (reg));
8824 /* Emit USE insns before the return. */
8825 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8826 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8828 /* Construct the return. */
8829 expand_naked_return ();
8835 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8836 ;; all of memory. This blocks insns from being moved across this point.
8838 (define_insn "blockage"
8839 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8842 [(set_attr "length" "0")
8843 (set_attr "type" "block")]
8846 (define_expand "casesi"
8847 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8848 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8849 (match_operand:SI 2 "const_int_operand" "") ; total range
8850 (match_operand:SI 3 "" "") ; table label
8851 (match_operand:SI 4 "" "")] ; Out of range label
8852 "TARGET_32BIT || optimize_size || flag_pic"
8855 enum insn_code code;
8856 if (operands[1] != const0_rtx)
8858 rtx reg = gen_reg_rtx (SImode);
8860 emit_insn (gen_addsi3 (reg, operands[0],
8861 GEN_INT (-INTVAL (operands[1]))));
8866 code = CODE_FOR_arm_casesi_internal;
8867 else if (TARGET_THUMB1)
8868 code = CODE_FOR_thumb1_casesi_internal_pic;
8870 code = CODE_FOR_thumb2_casesi_internal_pic;
8872 code = CODE_FOR_thumb2_casesi_internal;
8874 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8875 operands[2] = force_reg (SImode, operands[2]);
8877 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8878 operands[3], operands[4]));
8883 ;; The USE in this pattern is needed to tell flow analysis that this is
8884 ;; a CASESI insn. It has no other purpose.
8885 (define_insn "arm_casesi_internal"
8886 [(parallel [(set (pc)
8888 (leu (match_operand:SI 0 "s_register_operand" "r")
8889 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8890 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8891 (label_ref (match_operand 2 "" ""))))
8892 (label_ref (match_operand 3 "" ""))))
8893 (clobber (reg:CC CC_REGNUM))
8894 (use (label_ref (match_dup 2)))])]
8898 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8899 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8901 [(set_attr "conds" "clob")
8902 (set_attr "length" "12")]
8905 (define_expand "thumb1_casesi_internal_pic"
8906 [(match_operand:SI 0 "s_register_operand" "")
8907 (match_operand:SI 1 "thumb1_cmp_operand" "")
8908 (match_operand 2 "" "")
8909 (match_operand 3 "" "")]
8913 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8914 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8916 reg0 = gen_rtx_REG (SImode, 0);
8917 emit_move_insn (reg0, operands[0]);
8918 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8923 (define_insn "thumb1_casesi_dispatch"
8924 [(parallel [(set (pc) (unspec [(reg:SI 0)
8925 (label_ref (match_operand 0 "" ""))
8926 ;; (label_ref (match_operand 1 "" ""))
8928 UNSPEC_THUMB1_CASESI))
8929 (clobber (reg:SI IP_REGNUM))
8930 (clobber (reg:SI LR_REGNUM))])]
8932 "* return thumb1_output_casesi(operands);"
8933 [(set_attr "length" "4")]
8936 (define_expand "indirect_jump"
8938 (match_operand:SI 0 "s_register_operand" ""))]
8941 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8942 address and use bx. */
8946 tmp = gen_reg_rtx (SImode);
8947 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8953 ;; NB Never uses BX.
8954 (define_insn "*arm_indirect_jump"
8956 (match_operand:SI 0 "s_register_operand" "r"))]
8958 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8959 [(set_attr "predicable" "yes")]
8962 (define_insn "*load_indirect_jump"
8964 (match_operand:SI 0 "memory_operand" "m"))]
8966 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8967 [(set_attr "type" "load1")
8968 (set_attr "pool_range" "4096")
8969 (set_attr "neg_pool_range" "4084")
8970 (set_attr "predicable" "yes")]
8973 ;; NB Never uses BX.
8974 (define_insn "*thumb1_indirect_jump"
8976 (match_operand:SI 0 "register_operand" "l*r"))]
8979 [(set_attr "conds" "clob")
8980 (set_attr "length" "2")]
8990 if (TARGET_UNIFIED_ASM)
8993 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8994 return \"mov\\tr8, r8\";
8996 [(set (attr "length")
8997 (if_then_else (eq_attr "is_thumb" "yes")
9003 ;; Patterns to allow combination of arithmetic, cond code and shifts
9005 (define_insn "*arith_shiftsi"
9006 [(set (match_operand:SI 0 "s_register_operand" "=r")
9007 (match_operator:SI 1 "shiftable_operator"
9008 [(match_operator:SI 3 "shift_operator"
9009 [(match_operand:SI 4 "s_register_operand" "r")
9010 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9011 (match_operand:SI 2 "s_register_operand" "r")]))]
9013 "%i1%?\\t%0, %2, %4%S3"
9014 [(set_attr "predicable" "yes")
9015 (set_attr "shift" "4")
9016 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9017 (const_string "alu_shift")
9018 (const_string "alu_shift_reg")))]
9022 [(set (match_operand:SI 0 "s_register_operand" "")
9023 (match_operator:SI 1 "shiftable_operator"
9024 [(match_operator:SI 2 "shiftable_operator"
9025 [(match_operator:SI 3 "shift_operator"
9026 [(match_operand:SI 4 "s_register_operand" "")
9027 (match_operand:SI 5 "reg_or_int_operand" "")])
9028 (match_operand:SI 6 "s_register_operand" "")])
9029 (match_operand:SI 7 "arm_rhs_operand" "")]))
9030 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9033 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9036 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9039 (define_insn "*arith_shiftsi_compare0"
9040 [(set (reg:CC_NOOV CC_REGNUM)
9041 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9042 [(match_operator:SI 3 "shift_operator"
9043 [(match_operand:SI 4 "s_register_operand" "r")
9044 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9045 (match_operand:SI 2 "s_register_operand" "r")])
9047 (set (match_operand:SI 0 "s_register_operand" "=r")
9048 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9051 "%i1%.\\t%0, %2, %4%S3"
9052 [(set_attr "conds" "set")
9053 (set_attr "shift" "4")
9054 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9055 (const_string "alu_shift")
9056 (const_string "alu_shift_reg")))]
9059 (define_insn "*arith_shiftsi_compare0_scratch"
9060 [(set (reg:CC_NOOV CC_REGNUM)
9061 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9062 [(match_operator:SI 3 "shift_operator"
9063 [(match_operand:SI 4 "s_register_operand" "r")
9064 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9065 (match_operand:SI 2 "s_register_operand" "r")])
9067 (clobber (match_scratch:SI 0 "=r"))]
9069 "%i1%.\\t%0, %2, %4%S3"
9070 [(set_attr "conds" "set")
9071 (set_attr "shift" "4")
9072 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9073 (const_string "alu_shift")
9074 (const_string "alu_shift_reg")))]
9077 (define_insn "*sub_shiftsi"
9078 [(set (match_operand:SI 0 "s_register_operand" "=r")
9079 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9080 (match_operator:SI 2 "shift_operator"
9081 [(match_operand:SI 3 "s_register_operand" "r")
9082 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9084 "sub%?\\t%0, %1, %3%S2"
9085 [(set_attr "predicable" "yes")
9086 (set_attr "shift" "3")
9087 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9088 (const_string "alu_shift")
9089 (const_string "alu_shift_reg")))]
9092 (define_insn "*sub_shiftsi_compare0"
9093 [(set (reg:CC_NOOV CC_REGNUM)
9095 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9096 (match_operator:SI 2 "shift_operator"
9097 [(match_operand:SI 3 "s_register_operand" "r")
9098 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9100 (set (match_operand:SI 0 "s_register_operand" "=r")
9101 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9104 "sub%.\\t%0, %1, %3%S2"
9105 [(set_attr "conds" "set")
9106 (set_attr "shift" "3")
9107 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9108 (const_string "alu_shift")
9109 (const_string "alu_shift_reg")))]
9112 (define_insn "*sub_shiftsi_compare0_scratch"
9113 [(set (reg:CC_NOOV CC_REGNUM)
9115 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9116 (match_operator:SI 2 "shift_operator"
9117 [(match_operand:SI 3 "s_register_operand" "r")
9118 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9120 (clobber (match_scratch:SI 0 "=r"))]
9122 "sub%.\\t%0, %1, %3%S2"
9123 [(set_attr "conds" "set")
9124 (set_attr "shift" "3")
9125 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9126 (const_string "alu_shift")
9127 (const_string "alu_shift_reg")))]
9132 (define_insn "*and_scc"
9133 [(set (match_operand:SI 0 "s_register_operand" "=r")
9134 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9135 [(match_operand 3 "cc_register" "") (const_int 0)])
9136 (match_operand:SI 2 "s_register_operand" "r")))]
9138 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9139 [(set_attr "conds" "use")
9140 (set_attr "length" "8")]
9143 (define_insn "*ior_scc"
9144 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9145 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9146 [(match_operand 3 "cc_register" "") (const_int 0)])
9147 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9151 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9152 [(set_attr "conds" "use")
9153 (set_attr "length" "4,8")]
9156 (define_insn "*compare_scc"
9157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9158 (match_operator:SI 1 "arm_comparison_operator"
9159 [(match_operand:SI 2 "s_register_operand" "r,r")
9160 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9161 (clobber (reg:CC CC_REGNUM))]
9164 if (operands[3] == const0_rtx)
9166 if (GET_CODE (operands[1]) == LT)
9167 return \"mov\\t%0, %2, lsr #31\";
9169 if (GET_CODE (operands[1]) == GE)
9170 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9172 if (GET_CODE (operands[1]) == EQ)
9173 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9176 if (GET_CODE (operands[1]) == NE)
9178 if (which_alternative == 1)
9179 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9180 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9182 if (which_alternative == 1)
9183 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9185 output_asm_insn (\"cmp\\t%2, %3\", operands);
9186 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9188 [(set_attr "conds" "clob")
9189 (set_attr "length" "12")]
9192 (define_insn "*cond_move"
9193 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9194 (if_then_else:SI (match_operator 3 "equality_operator"
9195 [(match_operator 4 "arm_comparison_operator"
9196 [(match_operand 5 "cc_register" "") (const_int 0)])
9198 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9199 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9202 if (GET_CODE (operands[3]) == NE)
9204 if (which_alternative != 1)
9205 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9206 if (which_alternative != 0)
9207 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9210 if (which_alternative != 0)
9211 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9212 if (which_alternative != 1)
9213 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9216 [(set_attr "conds" "use")
9217 (set_attr "length" "4,4,8")]
9220 (define_insn "*cond_arith"
9221 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9222 (match_operator:SI 5 "shiftable_operator"
9223 [(match_operator:SI 4 "arm_comparison_operator"
9224 [(match_operand:SI 2 "s_register_operand" "r,r")
9225 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9226 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9227 (clobber (reg:CC CC_REGNUM))]
9230 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9231 return \"%i5\\t%0, %1, %2, lsr #31\";
9233 output_asm_insn (\"cmp\\t%2, %3\", operands);
9234 if (GET_CODE (operands[5]) == AND)
9235 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9236 else if (GET_CODE (operands[5]) == MINUS)
9237 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9238 else if (which_alternative != 0)
9239 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9240 return \"%i5%d4\\t%0, %1, #1\";
9242 [(set_attr "conds" "clob")
9243 (set_attr "length" "12")]
9246 (define_insn "*cond_sub"
9247 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9248 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9249 (match_operator:SI 4 "arm_comparison_operator"
9250 [(match_operand:SI 2 "s_register_operand" "r,r")
9251 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9252 (clobber (reg:CC CC_REGNUM))]
9255 output_asm_insn (\"cmp\\t%2, %3\", operands);
9256 if (which_alternative != 0)
9257 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9258 return \"sub%d4\\t%0, %1, #1\";
9260 [(set_attr "conds" "clob")
9261 (set_attr "length" "8,12")]
9264 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9265 (define_insn "*cmp_ite0"
9266 [(set (match_operand 6 "dominant_cc_register" "")
9269 (match_operator 4 "arm_comparison_operator"
9270 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9271 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9272 (match_operator:SI 5 "arm_comparison_operator"
9273 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9274 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9280 static const char * const opcodes[4][2] =
9282 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9283 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9284 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9285 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9286 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9287 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9288 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9289 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9292 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9294 return opcodes[which_alternative][swap];
9296 [(set_attr "conds" "set")
9297 (set_attr "length" "8")]
9300 (define_insn "*cmp_ite1"
9301 [(set (match_operand 6 "dominant_cc_register" "")
9304 (match_operator 4 "arm_comparison_operator"
9305 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9306 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9307 (match_operator:SI 5 "arm_comparison_operator"
9308 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9309 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9315 static const char * const opcodes[4][2] =
9317 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9318 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9319 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9320 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9321 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9322 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9323 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9324 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9327 comparison_dominates_p (GET_CODE (operands[5]),
9328 reverse_condition (GET_CODE (operands[4])));
9330 return opcodes[which_alternative][swap];
9332 [(set_attr "conds" "set")
9333 (set_attr "length" "8")]
9336 (define_insn "*cmp_and"
9337 [(set (match_operand 6 "dominant_cc_register" "")
9340 (match_operator 4 "arm_comparison_operator"
9341 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9342 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9343 (match_operator:SI 5 "arm_comparison_operator"
9344 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9345 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9350 static const char *const opcodes[4][2] =
9352 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9353 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9354 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9355 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9356 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9357 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9358 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9359 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9362 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9364 return opcodes[which_alternative][swap];
9366 [(set_attr "conds" "set")
9367 (set_attr "predicable" "no")
9368 (set_attr "length" "8")]
9371 (define_insn "*cmp_ior"
9372 [(set (match_operand 6 "dominant_cc_register" "")
9375 (match_operator 4 "arm_comparison_operator"
9376 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9377 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9378 (match_operator:SI 5 "arm_comparison_operator"
9379 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9380 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9385 static const char *const opcodes[4][2] =
9387 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9388 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9389 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9390 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9391 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9392 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9393 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9394 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9397 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9399 return opcodes[which_alternative][swap];
9402 [(set_attr "conds" "set")
9403 (set_attr "length" "8")]
9406 (define_insn_and_split "*ior_scc_scc"
9407 [(set (match_operand:SI 0 "s_register_operand" "=r")
9408 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9409 [(match_operand:SI 1 "s_register_operand" "r")
9410 (match_operand:SI 2 "arm_add_operand" "rIL")])
9411 (match_operator:SI 6 "arm_comparison_operator"
9412 [(match_operand:SI 4 "s_register_operand" "r")
9413 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9414 (clobber (reg:CC CC_REGNUM))]
9416 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9419 "TARGET_ARM && reload_completed"
9423 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9424 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9426 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9428 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9431 [(set_attr "conds" "clob")
9432 (set_attr "length" "16")])
9434 ; If the above pattern is followed by a CMP insn, then the compare is
9435 ; redundant, since we can rework the conditional instruction that follows.
9436 (define_insn_and_split "*ior_scc_scc_cmp"
9437 [(set (match_operand 0 "dominant_cc_register" "")
9438 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9439 [(match_operand:SI 1 "s_register_operand" "r")
9440 (match_operand:SI 2 "arm_add_operand" "rIL")])
9441 (match_operator:SI 6 "arm_comparison_operator"
9442 [(match_operand:SI 4 "s_register_operand" "r")
9443 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9445 (set (match_operand:SI 7 "s_register_operand" "=r")
9446 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9447 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9450 "TARGET_ARM && reload_completed"
9454 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9455 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9457 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9459 [(set_attr "conds" "set")
9460 (set_attr "length" "16")])
9462 (define_insn_and_split "*and_scc_scc"
9463 [(set (match_operand:SI 0 "s_register_operand" "=r")
9464 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9465 [(match_operand:SI 1 "s_register_operand" "r")
9466 (match_operand:SI 2 "arm_add_operand" "rIL")])
9467 (match_operator:SI 6 "arm_comparison_operator"
9468 [(match_operand:SI 4 "s_register_operand" "r")
9469 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9470 (clobber (reg:CC CC_REGNUM))]
9472 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9475 "TARGET_ARM && reload_completed
9476 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9481 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9482 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9484 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9486 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9489 [(set_attr "conds" "clob")
9490 (set_attr "length" "16")])
9492 ; If the above pattern is followed by a CMP insn, then the compare is
9493 ; redundant, since we can rework the conditional instruction that follows.
9494 (define_insn_and_split "*and_scc_scc_cmp"
9495 [(set (match_operand 0 "dominant_cc_register" "")
9496 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9497 [(match_operand:SI 1 "s_register_operand" "r")
9498 (match_operand:SI 2 "arm_add_operand" "rIL")])
9499 (match_operator:SI 6 "arm_comparison_operator"
9500 [(match_operand:SI 4 "s_register_operand" "r")
9501 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9503 (set (match_operand:SI 7 "s_register_operand" "=r")
9504 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9505 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9508 "TARGET_ARM && reload_completed"
9512 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9513 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9515 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9517 [(set_attr "conds" "set")
9518 (set_attr "length" "16")])
9520 ;; If there is no dominance in the comparison, then we can still save an
9521 ;; instruction in the AND case, since we can know that the second compare
9522 ;; need only zero the value if false (if true, then the value is already
9524 (define_insn_and_split "*and_scc_scc_nodom"
9525 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9526 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9527 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9528 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9529 (match_operator:SI 6 "arm_comparison_operator"
9530 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9531 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9532 (clobber (reg:CC CC_REGNUM))]
9534 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9537 "TARGET_ARM && reload_completed"
9538 [(parallel [(set (match_dup 0)
9539 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9540 (clobber (reg:CC CC_REGNUM))])
9541 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9543 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9546 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9547 operands[4], operands[5]),
9549 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9551 [(set_attr "conds" "clob")
9552 (set_attr "length" "20")])
9555 [(set (reg:CC_NOOV CC_REGNUM)
9556 (compare:CC_NOOV (ior:SI
9557 (and:SI (match_operand:SI 0 "s_register_operand" "")
9559 (match_operator:SI 1 "arm_comparison_operator"
9560 [(match_operand:SI 2 "s_register_operand" "")
9561 (match_operand:SI 3 "arm_add_operand" "")]))
9563 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9566 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9568 (set (reg:CC_NOOV CC_REGNUM)
9569 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9574 [(set (reg:CC_NOOV CC_REGNUM)
9575 (compare:CC_NOOV (ior:SI
9576 (match_operator:SI 1 "arm_comparison_operator"
9577 [(match_operand:SI 2 "s_register_operand" "")
9578 (match_operand:SI 3 "arm_add_operand" "")])
9579 (and:SI (match_operand:SI 0 "s_register_operand" "")
9582 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9585 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9587 (set (reg:CC_NOOV CC_REGNUM)
9588 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9591 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9593 (define_insn "*negscc"
9594 [(set (match_operand:SI 0 "s_register_operand" "=r")
9595 (neg:SI (match_operator 3 "arm_comparison_operator"
9596 [(match_operand:SI 1 "s_register_operand" "r")
9597 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9598 (clobber (reg:CC CC_REGNUM))]
9601 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9602 return \"mov\\t%0, %1, asr #31\";
9604 if (GET_CODE (operands[3]) == NE)
9605 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9607 output_asm_insn (\"cmp\\t%1, %2\", operands);
9608 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9609 return \"mvn%d3\\t%0, #0\";
9611 [(set_attr "conds" "clob")
9612 (set_attr "length" "12")]
9615 (define_insn "movcond"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9618 (match_operator 5 "arm_comparison_operator"
9619 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9620 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9621 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9622 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9623 (clobber (reg:CC CC_REGNUM))]
9626 if (GET_CODE (operands[5]) == LT
9627 && (operands[4] == const0_rtx))
9629 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9631 if (operands[2] == const0_rtx)
9632 return \"and\\t%0, %1, %3, asr #31\";
9633 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9635 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9637 if (operands[1] == const0_rtx)
9638 return \"bic\\t%0, %2, %3, asr #31\";
9639 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9641 /* The only case that falls through to here is when both ops 1 & 2
9645 if (GET_CODE (operands[5]) == GE
9646 && (operands[4] == const0_rtx))
9648 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9650 if (operands[2] == const0_rtx)
9651 return \"bic\\t%0, %1, %3, asr #31\";
9652 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9654 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9656 if (operands[1] == const0_rtx)
9657 return \"and\\t%0, %2, %3, asr #31\";
9658 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9660 /* The only case that falls through to here is when both ops 1 & 2
9663 if (GET_CODE (operands[4]) == CONST_INT
9664 && !const_ok_for_arm (INTVAL (operands[4])))
9665 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9667 output_asm_insn (\"cmp\\t%3, %4\", operands);
9668 if (which_alternative != 0)
9669 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9670 if (which_alternative != 1)
9671 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9674 [(set_attr "conds" "clob")
9675 (set_attr "length" "8,8,12")]
9678 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9680 (define_insn "*ifcompare_plus_move"
9681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9682 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9683 [(match_operand:SI 4 "s_register_operand" "r,r")
9684 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9686 (match_operand:SI 2 "s_register_operand" "r,r")
9687 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9688 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9689 (clobber (reg:CC CC_REGNUM))]
9692 [(set_attr "conds" "clob")
9693 (set_attr "length" "8,12")]
9696 (define_insn "*if_plus_move"
9697 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9699 (match_operator 4 "arm_comparison_operator"
9700 [(match_operand 5 "cc_register" "") (const_int 0)])
9702 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9703 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9704 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9708 sub%d4\\t%0, %2, #%n3
9709 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9710 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9711 [(set_attr "conds" "use")
9712 (set_attr "length" "4,4,8,8")
9713 (set_attr "type" "*,*,*,*")]
9716 (define_insn "*ifcompare_move_plus"
9717 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9718 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9719 [(match_operand:SI 4 "s_register_operand" "r,r")
9720 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9721 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9723 (match_operand:SI 2 "s_register_operand" "r,r")
9724 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9725 (clobber (reg:CC CC_REGNUM))]
9728 [(set_attr "conds" "clob")
9729 (set_attr "length" "8,12")]
9732 (define_insn "*if_move_plus"
9733 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9735 (match_operator 4 "arm_comparison_operator"
9736 [(match_operand 5 "cc_register" "") (const_int 0)])
9737 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9739 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9740 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9744 sub%D4\\t%0, %2, #%n3
9745 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9746 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9747 [(set_attr "conds" "use")
9748 (set_attr "length" "4,4,8,8")
9749 (set_attr "type" "*,*,*,*")]
9752 (define_insn "*ifcompare_arith_arith"
9753 [(set (match_operand:SI 0 "s_register_operand" "=r")
9754 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9755 [(match_operand:SI 5 "s_register_operand" "r")
9756 (match_operand:SI 6 "arm_add_operand" "rIL")])
9757 (match_operator:SI 8 "shiftable_operator"
9758 [(match_operand:SI 1 "s_register_operand" "r")
9759 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9760 (match_operator:SI 7 "shiftable_operator"
9761 [(match_operand:SI 3 "s_register_operand" "r")
9762 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9763 (clobber (reg:CC CC_REGNUM))]
9766 [(set_attr "conds" "clob")
9767 (set_attr "length" "12")]
9770 (define_insn "*if_arith_arith"
9771 [(set (match_operand:SI 0 "s_register_operand" "=r")
9772 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9773 [(match_operand 8 "cc_register" "") (const_int 0)])
9774 (match_operator:SI 6 "shiftable_operator"
9775 [(match_operand:SI 1 "s_register_operand" "r")
9776 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9777 (match_operator:SI 7 "shiftable_operator"
9778 [(match_operand:SI 3 "s_register_operand" "r")
9779 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9781 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9782 [(set_attr "conds" "use")
9783 (set_attr "length" "8")]
9786 (define_insn "*ifcompare_arith_move"
9787 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9788 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9789 [(match_operand:SI 2 "s_register_operand" "r,r")
9790 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9791 (match_operator:SI 7 "shiftable_operator"
9792 [(match_operand:SI 4 "s_register_operand" "r,r")
9793 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9794 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9795 (clobber (reg:CC CC_REGNUM))]
9798 /* If we have an operation where (op x 0) is the identity operation and
9799 the conditional operator is LT or GE and we are comparing against zero and
9800 everything is in registers then we can do this in two instructions. */
9801 if (operands[3] == const0_rtx
9802 && GET_CODE (operands[7]) != AND
9803 && GET_CODE (operands[5]) == REG
9804 && GET_CODE (operands[1]) == REG
9805 && REGNO (operands[1]) == REGNO (operands[4])
9806 && REGNO (operands[4]) != REGNO (operands[0]))
9808 if (GET_CODE (operands[6]) == LT)
9809 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9810 else if (GET_CODE (operands[6]) == GE)
9811 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9813 if (GET_CODE (operands[3]) == CONST_INT
9814 && !const_ok_for_arm (INTVAL (operands[3])))
9815 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9817 output_asm_insn (\"cmp\\t%2, %3\", operands);
9818 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9819 if (which_alternative != 0)
9820 return \"mov%D6\\t%0, %1\";
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "8,12")]
9827 (define_insn "*if_arith_move"
9828 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9829 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9830 [(match_operand 6 "cc_register" "") (const_int 0)])
9831 (match_operator:SI 5 "shiftable_operator"
9832 [(match_operand:SI 2 "s_register_operand" "r,r")
9833 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9834 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9838 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9839 [(set_attr "conds" "use")
9840 (set_attr "length" "4,8")
9841 (set_attr "type" "*,*")]
9844 (define_insn "*ifcompare_move_arith"
9845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9846 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9847 [(match_operand:SI 4 "s_register_operand" "r,r")
9848 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9849 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9850 (match_operator:SI 7 "shiftable_operator"
9851 [(match_operand:SI 2 "s_register_operand" "r,r")
9852 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9853 (clobber (reg:CC CC_REGNUM))]
9856 /* If we have an operation where (op x 0) is the identity operation and
9857 the conditional operator is LT or GE and we are comparing against zero and
9858 everything is in registers then we can do this in two instructions */
9859 if (operands[5] == const0_rtx
9860 && GET_CODE (operands[7]) != AND
9861 && GET_CODE (operands[3]) == REG
9862 && GET_CODE (operands[1]) == REG
9863 && REGNO (operands[1]) == REGNO (operands[2])
9864 && REGNO (operands[2]) != REGNO (operands[0]))
9866 if (GET_CODE (operands[6]) == GE)
9867 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9868 else if (GET_CODE (operands[6]) == LT)
9869 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9872 if (GET_CODE (operands[5]) == CONST_INT
9873 && !const_ok_for_arm (INTVAL (operands[5])))
9874 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9876 output_asm_insn (\"cmp\\t%4, %5\", operands);
9878 if (which_alternative != 0)
9879 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9880 return \"%I7%D6\\t%0, %2, %3\";
9882 [(set_attr "conds" "clob")
9883 (set_attr "length" "8,12")]
9886 (define_insn "*if_move_arith"
9887 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9889 (match_operator 4 "arm_comparison_operator"
9890 [(match_operand 6 "cc_register" "") (const_int 0)])
9891 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9892 (match_operator:SI 5 "shiftable_operator"
9893 [(match_operand:SI 2 "s_register_operand" "r,r")
9894 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9898 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9899 [(set_attr "conds" "use")
9900 (set_attr "length" "4,8")
9901 (set_attr "type" "*,*")]
9904 (define_insn "*ifcompare_move_not"
9905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9907 (match_operator 5 "arm_comparison_operator"
9908 [(match_operand:SI 3 "s_register_operand" "r,r")
9909 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9910 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9912 (match_operand:SI 2 "s_register_operand" "r,r"))))
9913 (clobber (reg:CC CC_REGNUM))]
9916 [(set_attr "conds" "clob")
9917 (set_attr "length" "8,12")]
9920 (define_insn "*if_move_not"
9921 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9923 (match_operator 4 "arm_comparison_operator"
9924 [(match_operand 3 "cc_register" "") (const_int 0)])
9925 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9926 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9930 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9931 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9932 [(set_attr "conds" "use")
9933 (set_attr "length" "4,8,8")]
9936 (define_insn "*ifcompare_not_move"
9937 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9939 (match_operator 5 "arm_comparison_operator"
9940 [(match_operand:SI 3 "s_register_operand" "r,r")
9941 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9943 (match_operand:SI 2 "s_register_operand" "r,r"))
9944 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9945 (clobber (reg:CC CC_REGNUM))]
9948 [(set_attr "conds" "clob")
9949 (set_attr "length" "8,12")]
9952 (define_insn "*if_not_move"
9953 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9955 (match_operator 4 "arm_comparison_operator"
9956 [(match_operand 3 "cc_register" "") (const_int 0)])
9957 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9958 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9962 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9963 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9964 [(set_attr "conds" "use")
9965 (set_attr "length" "4,8,8")]
9968 (define_insn "*ifcompare_shift_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9971 (match_operator 6 "arm_comparison_operator"
9972 [(match_operand:SI 4 "s_register_operand" "r,r")
9973 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9974 (match_operator:SI 7 "shift_operator"
9975 [(match_operand:SI 2 "s_register_operand" "r,r")
9976 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9977 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9978 (clobber (reg:CC CC_REGNUM))]
9981 [(set_attr "conds" "clob")
9982 (set_attr "length" "8,12")]
9985 (define_insn "*if_shift_move"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9988 (match_operator 5 "arm_comparison_operator"
9989 [(match_operand 6 "cc_register" "") (const_int 0)])
9990 (match_operator:SI 4 "shift_operator"
9991 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9992 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9993 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9997 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9998 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9999 [(set_attr "conds" "use")
10000 (set_attr "shift" "2")
10001 (set_attr "length" "4,8,8")
10002 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10003 (const_string "alu_shift")
10004 (const_string "alu_shift_reg")))]
10007 (define_insn "*ifcompare_move_shift"
10008 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10010 (match_operator 6 "arm_comparison_operator"
10011 [(match_operand:SI 4 "s_register_operand" "r,r")
10012 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10013 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10014 (match_operator:SI 7 "shift_operator"
10015 [(match_operand:SI 2 "s_register_operand" "r,r")
10016 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10017 (clobber (reg:CC CC_REGNUM))]
10020 [(set_attr "conds" "clob")
10021 (set_attr "length" "8,12")]
10024 (define_insn "*if_move_shift"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10027 (match_operator 5 "arm_comparison_operator"
10028 [(match_operand 6 "cc_register" "") (const_int 0)])
10029 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10030 (match_operator:SI 4 "shift_operator"
10031 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10032 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10036 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10037 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10038 [(set_attr "conds" "use")
10039 (set_attr "shift" "2")
10040 (set_attr "length" "4,8,8")
10041 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10042 (const_string "alu_shift")
10043 (const_string "alu_shift_reg")))]
10046 (define_insn "*ifcompare_shift_shift"
10047 [(set (match_operand:SI 0 "s_register_operand" "=r")
10049 (match_operator 7 "arm_comparison_operator"
10050 [(match_operand:SI 5 "s_register_operand" "r")
10051 (match_operand:SI 6 "arm_add_operand" "rIL")])
10052 (match_operator:SI 8 "shift_operator"
10053 [(match_operand:SI 1 "s_register_operand" "r")
10054 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10055 (match_operator:SI 9 "shift_operator"
10056 [(match_operand:SI 3 "s_register_operand" "r")
10057 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10058 (clobber (reg:CC CC_REGNUM))]
10061 [(set_attr "conds" "clob")
10062 (set_attr "length" "12")]
10065 (define_insn "*if_shift_shift"
10066 [(set (match_operand:SI 0 "s_register_operand" "=r")
10068 (match_operator 5 "arm_comparison_operator"
10069 [(match_operand 8 "cc_register" "") (const_int 0)])
10070 (match_operator:SI 6 "shift_operator"
10071 [(match_operand:SI 1 "s_register_operand" "r")
10072 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10073 (match_operator:SI 7 "shift_operator"
10074 [(match_operand:SI 3 "s_register_operand" "r")
10075 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10077 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10078 [(set_attr "conds" "use")
10079 (set_attr "shift" "1")
10080 (set_attr "length" "8")
10081 (set (attr "type") (if_then_else
10082 (and (match_operand 2 "const_int_operand" "")
10083 (match_operand 4 "const_int_operand" ""))
10084 (const_string "alu_shift")
10085 (const_string "alu_shift_reg")))]
10088 (define_insn "*ifcompare_not_arith"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r")
10091 (match_operator 6 "arm_comparison_operator"
10092 [(match_operand:SI 4 "s_register_operand" "r")
10093 (match_operand:SI 5 "arm_add_operand" "rIL")])
10094 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10095 (match_operator:SI 7 "shiftable_operator"
10096 [(match_operand:SI 2 "s_register_operand" "r")
10097 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10098 (clobber (reg:CC CC_REGNUM))]
10101 [(set_attr "conds" "clob")
10102 (set_attr "length" "12")]
10105 (define_insn "*if_not_arith"
10106 [(set (match_operand:SI 0 "s_register_operand" "=r")
10108 (match_operator 5 "arm_comparison_operator"
10109 [(match_operand 4 "cc_register" "") (const_int 0)])
10110 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10111 (match_operator:SI 6 "shiftable_operator"
10112 [(match_operand:SI 2 "s_register_operand" "r")
10113 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10115 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10116 [(set_attr "conds" "use")
10117 (set_attr "length" "8")]
10120 (define_insn "*ifcompare_arith_not"
10121 [(set (match_operand:SI 0 "s_register_operand" "=r")
10123 (match_operator 6 "arm_comparison_operator"
10124 [(match_operand:SI 4 "s_register_operand" "r")
10125 (match_operand:SI 5 "arm_add_operand" "rIL")])
10126 (match_operator:SI 7 "shiftable_operator"
10127 [(match_operand:SI 2 "s_register_operand" "r")
10128 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10129 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10130 (clobber (reg:CC CC_REGNUM))]
10133 [(set_attr "conds" "clob")
10134 (set_attr "length" "12")]
10137 (define_insn "*if_arith_not"
10138 [(set (match_operand:SI 0 "s_register_operand" "=r")
10140 (match_operator 5 "arm_comparison_operator"
10141 [(match_operand 4 "cc_register" "") (const_int 0)])
10142 (match_operator:SI 6 "shiftable_operator"
10143 [(match_operand:SI 2 "s_register_operand" "r")
10144 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10145 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10147 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10148 [(set_attr "conds" "use")
10149 (set_attr "length" "8")]
10152 (define_insn "*ifcompare_neg_move"
10153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10155 (match_operator 5 "arm_comparison_operator"
10156 [(match_operand:SI 3 "s_register_operand" "r,r")
10157 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10158 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10159 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10160 (clobber (reg:CC CC_REGNUM))]
10163 [(set_attr "conds" "clob")
10164 (set_attr "length" "8,12")]
10167 (define_insn "*if_neg_move"
10168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10170 (match_operator 4 "arm_comparison_operator"
10171 [(match_operand 3 "cc_register" "") (const_int 0)])
10172 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10173 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10176 rsb%d4\\t%0, %2, #0
10177 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10178 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10179 [(set_attr "conds" "use")
10180 (set_attr "length" "4,8,8")]
10183 (define_insn "*ifcompare_move_neg"
10184 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10186 (match_operator 5 "arm_comparison_operator"
10187 [(match_operand:SI 3 "s_register_operand" "r,r")
10188 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10189 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10190 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10191 (clobber (reg:CC CC_REGNUM))]
10194 [(set_attr "conds" "clob")
10195 (set_attr "length" "8,12")]
10198 (define_insn "*if_move_neg"
10199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10201 (match_operator 4 "arm_comparison_operator"
10202 [(match_operand 3 "cc_register" "") (const_int 0)])
10203 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10204 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10207 rsb%D4\\t%0, %2, #0
10208 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10209 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10210 [(set_attr "conds" "use")
10211 (set_attr "length" "4,8,8")]
10214 (define_insn "*arith_adjacentmem"
10215 [(set (match_operand:SI 0 "s_register_operand" "=r")
10216 (match_operator:SI 1 "shiftable_operator"
10217 [(match_operand:SI 2 "memory_operand" "m")
10218 (match_operand:SI 3 "memory_operand" "m")]))
10219 (clobber (match_scratch:SI 4 "=r"))]
10220 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10226 HOST_WIDE_INT val1 = 0, val2 = 0;
10228 if (REGNO (operands[0]) > REGNO (operands[4]))
10230 ldm[1] = operands[4];
10231 ldm[2] = operands[0];
10235 ldm[1] = operands[0];
10236 ldm[2] = operands[4];
10239 base_reg = XEXP (operands[2], 0);
10241 if (!REG_P (base_reg))
10243 val1 = INTVAL (XEXP (base_reg, 1));
10244 base_reg = XEXP (base_reg, 0);
10247 if (!REG_P (XEXP (operands[3], 0)))
10248 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10250 arith[0] = operands[0];
10251 arith[3] = operands[1];
10265 if (val1 !=0 && val2 != 0)
10269 if (val1 == 4 || val2 == 4)
10270 /* Other val must be 8, since we know they are adjacent and neither
10272 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10273 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10275 ldm[0] = ops[0] = operands[4];
10277 ops[2] = GEN_INT (val1);
10278 output_add_immediate (ops);
10280 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10282 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10286 /* Offset is out of range for a single add, so use two ldr. */
10289 ops[2] = GEN_INT (val1);
10290 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10292 ops[2] = GEN_INT (val2);
10293 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10296 else if (val1 != 0)
10299 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10301 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10306 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10308 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10310 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10313 [(set_attr "length" "12")
10314 (set_attr "predicable" "yes")
10315 (set_attr "type" "load1")]
10318 ; This pattern is never tried by combine, so do it as a peephole
10321 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10322 (match_operand:SI 1 "arm_general_register_operand" ""))
10323 (set (reg:CC CC_REGNUM)
10324 (compare:CC (match_dup 1) (const_int 0)))]
10326 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10327 (set (match_dup 0) (match_dup 1))])]
10331 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10332 ; reversed, check that the memory references aren't volatile.
10335 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10336 (match_operand:SI 4 "memory_operand" "m"))
10337 (set (match_operand:SI 1 "s_register_operand" "=rk")
10338 (match_operand:SI 5 "memory_operand" "m"))
10339 (set (match_operand:SI 2 "s_register_operand" "=rk")
10340 (match_operand:SI 6 "memory_operand" "m"))
10341 (set (match_operand:SI 3 "s_register_operand" "=rk")
10342 (match_operand:SI 7 "memory_operand" "m"))]
10343 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10345 return emit_ldm_seq (operands, 4);
10350 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10351 (match_operand:SI 3 "memory_operand" "m"))
10352 (set (match_operand:SI 1 "s_register_operand" "=rk")
10353 (match_operand:SI 4 "memory_operand" "m"))
10354 (set (match_operand:SI 2 "s_register_operand" "=rk")
10355 (match_operand:SI 5 "memory_operand" "m"))]
10356 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10358 return emit_ldm_seq (operands, 3);
10363 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10364 (match_operand:SI 2 "memory_operand" "m"))
10365 (set (match_operand:SI 1 "s_register_operand" "=rk")
10366 (match_operand:SI 3 "memory_operand" "m"))]
10367 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10369 return emit_ldm_seq (operands, 2);
10374 [(set (match_operand:SI 4 "memory_operand" "=m")
10375 (match_operand:SI 0 "s_register_operand" "rk"))
10376 (set (match_operand:SI 5 "memory_operand" "=m")
10377 (match_operand:SI 1 "s_register_operand" "rk"))
10378 (set (match_operand:SI 6 "memory_operand" "=m")
10379 (match_operand:SI 2 "s_register_operand" "rk"))
10380 (set (match_operand:SI 7 "memory_operand" "=m")
10381 (match_operand:SI 3 "s_register_operand" "rk"))]
10382 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10384 return emit_stm_seq (operands, 4);
10389 [(set (match_operand:SI 3 "memory_operand" "=m")
10390 (match_operand:SI 0 "s_register_operand" "rk"))
10391 (set (match_operand:SI 4 "memory_operand" "=m")
10392 (match_operand:SI 1 "s_register_operand" "rk"))
10393 (set (match_operand:SI 5 "memory_operand" "=m")
10394 (match_operand:SI 2 "s_register_operand" "rk"))]
10395 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10397 return emit_stm_seq (operands, 3);
10402 [(set (match_operand:SI 2 "memory_operand" "=m")
10403 (match_operand:SI 0 "s_register_operand" "rk"))
10404 (set (match_operand:SI 3 "memory_operand" "=m")
10405 (match_operand:SI 1 "s_register_operand" "rk"))]
10406 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10408 return emit_stm_seq (operands, 2);
10413 [(set (match_operand:SI 0 "s_register_operand" "")
10414 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10416 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10417 [(match_operand:SI 3 "s_register_operand" "")
10418 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10419 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10421 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10422 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10427 ;; This split can be used because CC_Z mode implies that the following
10428 ;; branch will be an equality, or an unsigned inequality, so the sign
10429 ;; extension is not needed.
10432 [(set (reg:CC_Z CC_REGNUM)
10434 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10436 (match_operand 1 "const_int_operand" "")))
10437 (clobber (match_scratch:SI 2 ""))]
10439 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10440 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10441 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10442 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10444 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10447 ;; ??? Check the patterns above for Thumb-2 usefulness
10449 (define_expand "prologue"
10450 [(clobber (const_int 0))]
10453 arm_expand_prologue ();
10455 thumb1_expand_prologue ();
10460 (define_expand "epilogue"
10461 [(clobber (const_int 0))]
10464 if (crtl->calls_eh_return)
10465 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10467 thumb1_expand_epilogue ();
10468 else if (USE_RETURN_INSN (FALSE))
10470 emit_jump_insn (gen_return ());
10473 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10475 gen_rtx_RETURN (VOIDmode)),
10476 VUNSPEC_EPILOGUE));
10481 ;; Note - although unspec_volatile's USE all hard registers,
10482 ;; USEs are ignored after relaod has completed. Thus we need
10483 ;; to add an unspec of the link register to ensure that flow
10484 ;; does not think that it is unused by the sibcall branch that
10485 ;; will replace the standard function epilogue.
10486 (define_insn "sibcall_epilogue"
10487 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10488 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10491 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10492 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10493 return arm_output_epilogue (next_nonnote_insn (insn));
10495 ;; Length is absolute worst case
10496 [(set_attr "length" "44")
10497 (set_attr "type" "block")
10498 ;; We don't clobber the conditions, but the potential length of this
10499 ;; operation is sufficient to make conditionalizing the sequence
10500 ;; unlikely to be profitable.
10501 (set_attr "conds" "clob")]
10504 (define_insn "*epilogue_insns"
10505 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10509 return arm_output_epilogue (NULL);
10510 else /* TARGET_THUMB1 */
10511 return thumb_unexpanded_epilogue ();
10513 ; Length is absolute worst case
10514 [(set_attr "length" "44")
10515 (set_attr "type" "block")
10516 ;; We don't clobber the conditions, but the potential length of this
10517 ;; operation is sufficient to make conditionalizing the sequence
10518 ;; unlikely to be profitable.
10519 (set_attr "conds" "clob")]
10522 (define_expand "eh_epilogue"
10523 [(use (match_operand:SI 0 "register_operand" ""))
10524 (use (match_operand:SI 1 "register_operand" ""))
10525 (use (match_operand:SI 2 "register_operand" ""))]
10529 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10530 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10532 rtx ra = gen_rtx_REG (Pmode, 2);
10534 emit_move_insn (ra, operands[2]);
10537 /* This is a hack -- we may have crystalized the function type too
10539 cfun->machine->func_type = 0;
10543 ;; This split is only used during output to reduce the number of patterns
10544 ;; that need assembler instructions adding to them. We allowed the setting
10545 ;; of the conditions to be implicit during rtl generation so that
10546 ;; the conditional compare patterns would work. However this conflicts to
10547 ;; some extent with the conditional data operations, so we have to split them
10550 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10551 ;; conditional execution sufficient?
10554 [(set (match_operand:SI 0 "s_register_operand" "")
10555 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10556 [(match_operand 2 "" "") (match_operand 3 "" "")])
10558 (match_operand 4 "" "")))
10559 (clobber (reg:CC CC_REGNUM))]
10560 "TARGET_ARM && reload_completed"
10561 [(set (match_dup 5) (match_dup 6))
10562 (cond_exec (match_dup 7)
10563 (set (match_dup 0) (match_dup 4)))]
10566 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10567 operands[2], operands[3]);
10568 enum rtx_code rc = GET_CODE (operands[1]);
10570 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10571 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10572 if (mode == CCFPmode || mode == CCFPEmode)
10573 rc = reverse_condition_maybe_unordered (rc);
10575 rc = reverse_condition (rc);
10577 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10582 [(set (match_operand:SI 0 "s_register_operand" "")
10583 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10584 [(match_operand 2 "" "") (match_operand 3 "" "")])
10585 (match_operand 4 "" "")
10587 (clobber (reg:CC CC_REGNUM))]
10588 "TARGET_ARM && reload_completed"
10589 [(set (match_dup 5) (match_dup 6))
10590 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10591 (set (match_dup 0) (match_dup 4)))]
10594 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10595 operands[2], operands[3]);
10597 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10598 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10603 [(set (match_operand:SI 0 "s_register_operand" "")
10604 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10605 [(match_operand 2 "" "") (match_operand 3 "" "")])
10606 (match_operand 4 "" "")
10607 (match_operand 5 "" "")))
10608 (clobber (reg:CC CC_REGNUM))]
10609 "TARGET_ARM && reload_completed"
10610 [(set (match_dup 6) (match_dup 7))
10611 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10612 (set (match_dup 0) (match_dup 4)))
10613 (cond_exec (match_dup 8)
10614 (set (match_dup 0) (match_dup 5)))]
10617 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10618 operands[2], operands[3]);
10619 enum rtx_code rc = GET_CODE (operands[1]);
10621 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10622 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10623 if (mode == CCFPmode || mode == CCFPEmode)
10624 rc = reverse_condition_maybe_unordered (rc);
10626 rc = reverse_condition (rc);
10628 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10633 [(set (match_operand:SI 0 "s_register_operand" "")
10634 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10635 [(match_operand:SI 2 "s_register_operand" "")
10636 (match_operand:SI 3 "arm_add_operand" "")])
10637 (match_operand:SI 4 "arm_rhs_operand" "")
10639 (match_operand:SI 5 "s_register_operand" ""))))
10640 (clobber (reg:CC CC_REGNUM))]
10641 "TARGET_ARM && reload_completed"
10642 [(set (match_dup 6) (match_dup 7))
10643 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10644 (set (match_dup 0) (match_dup 4)))
10645 (cond_exec (match_dup 8)
10646 (set (match_dup 0) (not:SI (match_dup 5))))]
10649 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10650 operands[2], operands[3]);
10651 enum rtx_code rc = GET_CODE (operands[1]);
10653 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10654 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10655 if (mode == CCFPmode || mode == CCFPEmode)
10656 rc = reverse_condition_maybe_unordered (rc);
10658 rc = reverse_condition (rc);
10660 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10664 (define_insn "*cond_move_not"
10665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10666 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10667 [(match_operand 3 "cc_register" "") (const_int 0)])
10668 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10670 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10674 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10675 [(set_attr "conds" "use")
10676 (set_attr "length" "4,8")]
10679 ;; The next two patterns occur when an AND operation is followed by a
10680 ;; scc insn sequence
10682 (define_insn "*sign_extract_onebit"
10683 [(set (match_operand:SI 0 "s_register_operand" "=r")
10684 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10686 (match_operand:SI 2 "const_int_operand" "n")))
10687 (clobber (reg:CC CC_REGNUM))]
10690 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10691 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10692 return \"mvnne\\t%0, #0\";
10694 [(set_attr "conds" "clob")
10695 (set_attr "length" "8")]
10698 (define_insn "*not_signextract_onebit"
10699 [(set (match_operand:SI 0 "s_register_operand" "=r")
10701 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10703 (match_operand:SI 2 "const_int_operand" "n"))))
10704 (clobber (reg:CC CC_REGNUM))]
10707 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10708 output_asm_insn (\"tst\\t%1, %2\", operands);
10709 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10710 return \"movne\\t%0, #0\";
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "12")]
10715 ;; ??? The above patterns need auditing for Thumb-2
10717 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10718 ;; expressions. For simplicity, the first register is also in the unspec
10720 (define_insn "*push_multi"
10721 [(match_parallel 2 "multi_register_push"
10722 [(set (match_operand:BLK 0 "memory_operand" "=m")
10723 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10724 UNSPEC_PUSH_MULT))])]
10728 int num_saves = XVECLEN (operands[2], 0);
10730 /* For the StrongARM at least it is faster to
10731 use STR to store only a single register.
10732 In Thumb mode always use push, and the assembler will pick
10733 something appropriate. */
10734 if (num_saves == 1 && TARGET_ARM)
10735 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10742 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10744 strcpy (pattern, \"push\\t{%1\");
10746 for (i = 1; i < num_saves; i++)
10748 strcat (pattern, \", %|\");
10750 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10753 strcat (pattern, \"}\");
10754 output_asm_insn (pattern, operands);
10759 [(set_attr "type" "store4")]
10762 (define_insn "stack_tie"
10763 [(set (mem:BLK (scratch))
10764 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10765 (match_operand:SI 1 "s_register_operand" "rk")]
10769 [(set_attr "length" "0")]
10772 ;; Similarly for the floating point registers
10773 (define_insn "*push_fp_multi"
10774 [(match_parallel 2 "multi_register_push"
10775 [(set (match_operand:BLK 0 "memory_operand" "=m")
10776 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10777 UNSPEC_PUSH_MULT))])]
10778 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10783 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10784 output_asm_insn (pattern, operands);
10787 [(set_attr "type" "f_store")]
10790 ;; Special patterns for dealing with the constant pool
10792 (define_insn "align_4"
10793 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10796 assemble_align (32);
10801 (define_insn "align_8"
10802 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10805 assemble_align (64);
10810 (define_insn "consttable_end"
10811 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10814 making_const_table = FALSE;
10819 (define_insn "consttable_1"
10820 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10823 making_const_table = TRUE;
10824 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10825 assemble_zeros (3);
10828 [(set_attr "length" "4")]
10831 (define_insn "consttable_2"
10832 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10835 making_const_table = TRUE;
10836 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10837 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10838 assemble_zeros (2);
10841 [(set_attr "length" "4")]
10844 (define_insn "consttable_4"
10845 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10849 rtx x = operands[0];
10850 making_const_table = TRUE;
10851 switch (GET_MODE_CLASS (GET_MODE (x)))
10854 if (GET_MODE (x) == HFmode)
10855 arm_emit_fp16_const (x);
10859 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10860 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10864 assemble_integer (x, 4, BITS_PER_WORD, 1);
10865 mark_symbol_refs_as_used (x);
10870 [(set_attr "length" "4")]
10873 (define_insn "consttable_8"
10874 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10878 making_const_table = TRUE;
10879 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10884 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10885 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10889 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10894 [(set_attr "length" "8")]
10897 (define_insn "consttable_16"
10898 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10902 making_const_table = TRUE;
10903 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10908 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10909 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10913 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10918 [(set_attr "length" "16")]
10921 ;; Miscellaneous Thumb patterns
10923 (define_expand "tablejump"
10924 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10925 (use (label_ref (match_operand 1 "" "")))])]
10930 /* Hopefully, CSE will eliminate this copy. */
10931 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10932 rtx reg2 = gen_reg_rtx (SImode);
10934 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10935 operands[0] = reg2;
10940 ;; NB never uses BX.
10941 (define_insn "*thumb1_tablejump"
10942 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10943 (use (label_ref (match_operand 1 "" "")))]
10946 [(set_attr "length" "2")]
10949 ;; V5 Instructions,
10951 (define_insn "clzsi2"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10954 "TARGET_32BIT && arm_arch5"
10956 [(set_attr "predicable" "yes")
10957 (set_attr "insn" "clz")])
10959 (define_insn "rbitsi2"
10960 [(set (match_operand:SI 0 "s_register_operand" "=r")
10961 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10962 "TARGET_32BIT && arm_arch_thumb2"
10964 [(set_attr "predicable" "yes")
10965 (set_attr "insn" "clz")])
10967 (define_expand "ctzsi2"
10968 [(set (match_operand:SI 0 "s_register_operand" "")
10969 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10970 "TARGET_32BIT && arm_arch_thumb2"
10973 rtx tmp = gen_reg_rtx (SImode);
10974 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10975 emit_insn (gen_clzsi2 (operands[0], tmp));
10981 ;; V5E instructions.
10983 (define_insn "prefetch"
10984 [(prefetch (match_operand:SI 0 "address_operand" "p")
10985 (match_operand:SI 1 "" "")
10986 (match_operand:SI 2 "" ""))]
10987 "TARGET_32BIT && arm_arch5e"
10990 ;; General predication pattern
10993 [(match_operator 0 "arm_comparison_operator"
10994 [(match_operand 1 "cc_register" "")
11000 (define_insn "prologue_use"
11001 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11003 "%@ %0 needed for prologue"
11004 [(set_attr "length" "0")]
11008 ;; Patterns for exception handling
11010 (define_expand "eh_return"
11011 [(use (match_operand 0 "general_operand" ""))]
11016 emit_insn (gen_arm_eh_return (operands[0]));
11018 emit_insn (gen_thumb_eh_return (operands[0]));
11023 ;; We can't expand this before we know where the link register is stored.
11024 (define_insn_and_split "arm_eh_return"
11025 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11027 (clobber (match_scratch:SI 1 "=&r"))]
11030 "&& reload_completed"
11034 arm_set_return_address (operands[0], operands[1]);
11039 (define_insn_and_split "thumb_eh_return"
11040 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11042 (clobber (match_scratch:SI 1 "=&l"))]
11045 "&& reload_completed"
11049 thumb_set_return_address (operands[0], operands[1]);
11057 (define_insn "load_tp_hard"
11058 [(set (match_operand:SI 0 "register_operand" "=r")
11059 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11061 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11062 [(set_attr "predicable" "yes")]
11065 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11066 (define_insn "load_tp_soft"
11067 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11068 (clobber (reg:SI LR_REGNUM))
11069 (clobber (reg:SI IP_REGNUM))
11070 (clobber (reg:CC CC_REGNUM))]
11072 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11073 [(set_attr "conds" "clob")]
11076 (define_insn "*arm_movtas_ze"
11077 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11080 (match_operand:SI 1 "const_int_operand" ""))]
11083 [(set_attr "predicable" "yes")
11084 (set_attr "length" "4")]
11087 ;; Load the FPA co-processor patterns
11089 ;; Load the Maverick co-processor patterns
11090 (include "cirrus.md")
11091 ;; Vector bits common to IWMMXT and Neon
11092 (include "vec-common.md")
11093 ;; Load the Intel Wireless Multimedia Extension patterns
11094 (include "iwmmxt.md")
11095 ;; Load the VFP co-processor patterns
11097 ;; Thumb-2 patterns
11098 (include "thumb2.md")
11100 (include "neon.md")