1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
105 ;; UNSPEC_VOLATILE Usage:
108 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
110 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
111 ; instruction epilogue sequence that isn't expanded
112 ; into normal RTL. Used for both normal and sibcall
114 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
115 ; for inlined constants.
116 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
118 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
120 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
122 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
124 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
126 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
128 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
129 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
130 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
131 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
132 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
133 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
134 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
139 ;;---------------------------------------------------------------------------
142 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
143 ; generating ARM code. This is used to control the length of some insn
144 ; patterns that share the same RTL in both ARM and Thumb code.
145 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
147 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
148 ; scheduling decisions for the load unit and the multiplier.
149 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
151 ; IS_XSCALE is set to 'yes' when compiling for XScale.
152 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon"
162 (const (symbol_ref "arm_fpu_attr")))
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
253 ;; Classification of NEON instructions for scheduling purposes.
254 ;; Do not set this attribute and the "type" attribute together in
255 ;; any one instruction pattern.
256 (define_attr "neon_type"
267 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mul_qqq_8_16_32_ddd_32,\
269 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
270 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
272 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
273 neon_mla_qqq_32_qqd_32_scalar,\
274 neon_mul_ddd_16_scalar_32_16_long_scalar,\
275 neon_mul_qqd_32_scalar,\
276 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
281 neon_vqshl_vrshl_vqrshl_qqq,\
283 neon_fp_vadd_ddd_vabs_dd,\
284 neon_fp_vadd_qqq_vabs_qq,\
290 neon_fp_vmla_ddd_scalar,\
291 neon_fp_vmla_qqq_scalar,\
292 neon_fp_vrecps_vrsqrts_ddd,\
293 neon_fp_vrecps_vrsqrts_qqq,\
301 neon_vld2_2_regs_vld1_vld2_all_lanes,\
304 neon_vst1_1_2_regs_vst2_2_regs,\
306 neon_vst2_4_regs_vst3_vst4,\
308 neon_vld1_vld2_lane,\
309 neon_vld3_vld4_lane,\
310 neon_vst1_vst2_lane,\
311 neon_vst3_vst4_lane,\
312 neon_vld3_vld4_all_lanes,\
320 (const_string "none"))
322 ; condition codes: this one is used by final_prescan_insn to speed up
323 ; conditionalizing instructions. It saves having to scan the rtl to see if
324 ; it uses or alters the condition codes.
326 ; USE means that the condition codes are used by the insn in the process of
327 ; outputting code, this means (at present) that we can't use the insn in
330 ; SET means that the purpose of the insn is to set the condition codes in a
331 ; well defined manner.
333 ; CLOB means that the condition codes are altered in an undefined manner, if
334 ; they are altered at all
336 ; JUMP_CLOB is used when the condition cannot be represented by a single
337 ; instruction (UNEQ and LTGT). These cannot be predicated.
339 ; UNCONDITIONAL means the instions can not be conditionally executed.
341 ; NOCOND means that the condition codes are neither altered nor affect the
342 ; output of this insn
344 (define_attr "conds" "use,set,clob,jump_clob,unconditional,nocond"
345 (if_then_else (eq_attr "type" "call")
346 (const_string "clob")
347 (if_then_else (eq_attr "neon_type" "none")
348 (const_string "nocond")
349 (const_string "unconditional"))))
351 ; Predicable means that the insn can be conditionally executed based on
352 ; an automatically added predicate (additional patterns are generated by
353 ; gen...). We default to 'no' because no Thumb patterns match this rule
354 ; and not all ARM patterns do.
355 (define_attr "predicable" "no,yes" (const_string "no"))
357 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
358 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
359 ; suffer blockages enough to warrant modelling this (and it can adversely
360 ; affect the schedule).
361 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
363 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
364 ; to stall the processor. Used with model_wbuf above.
365 (define_attr "write_conflict" "no,yes"
366 (if_then_else (eq_attr "type"
367 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
369 (const_string "no")))
371 ; Classify the insns into those that take one cycle and those that take more
372 ; than one on the main cpu execution unit.
373 (define_attr "core_cycles" "single,multi"
374 (if_then_else (eq_attr "type"
375 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
376 (const_string "single")
377 (const_string "multi")))
379 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
380 ;; distant label. Only applicable to Thumb code.
381 (define_attr "far_jump" "yes,no" (const_string "no"))
384 ;; The number of machine instructions this pattern expands to.
385 ;; Used for Thumb-2 conditional execution.
386 (define_attr "ce_count" "" (const_int 1))
388 ;;---------------------------------------------------------------------------
391 ; A list of modes that are exactly 64 bits in size. We use this to expand
392 ; some splits that are the same for all modes when operating on ARM
394 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
396 ;;---------------------------------------------------------------------------
399 (include "predicates.md")
400 (include "constraints.md")
402 ;;---------------------------------------------------------------------------
403 ;; Pipeline descriptions
405 ;; Processor type. This is created automatically from arm-cores.def.
406 (include "arm-tune.md")
408 (define_attr "tune_cortexr4" "yes,no"
410 (eq_attr "tune" "cortexr4,cortexr4f")
412 (const_string "no"))))
414 ;; True if the generic scheduling description should be used.
416 (define_attr "generic_sched" "yes,no"
418 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
419 (eq_attr "tune_cortexr4" "yes"))
421 (const_string "yes"))))
423 (define_attr "generic_vfp" "yes,no"
425 (and (eq_attr "fpu" "vfp")
426 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
427 (eq_attr "tune_cortexr4" "no"))
429 (const_string "no"))))
431 (include "arm-generic.md")
432 (include "arm926ejs.md")
433 (include "arm1020e.md")
434 (include "arm1026ejs.md")
435 (include "arm1136jfs.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-r4.md")
439 (include "cortex-r4f.md")
443 ;;---------------------------------------------------------------------------
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451 ;; Cirrus 64bit additions should not be split because we have a native
452 ;; 64bit addition instructions.
454 (define_expand "adddi3"
456 [(set (match_operand:DI 0 "s_register_operand" "")
457 (plus:DI (match_operand:DI 1 "s_register_operand" "")
458 (match_operand:DI 2 "s_register_operand" "")))
459 (clobber (reg:CC CC_REGNUM))])]
462 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
464 if (!cirrus_fp_register (operands[0], DImode))
465 operands[0] = force_reg (DImode, operands[0]);
466 if (!cirrus_fp_register (operands[1], DImode))
467 operands[1] = force_reg (DImode, operands[1]);
468 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
474 if (GET_CODE (operands[1]) != REG)
475 operands[1] = force_reg (SImode, operands[1]);
476 if (GET_CODE (operands[2]) != REG)
477 operands[2] = force_reg (SImode, operands[2]);
482 (define_insn "*thumb1_adddi3"
483 [(set (match_operand:DI 0 "register_operand" "=l")
484 (plus:DI (match_operand:DI 1 "register_operand" "%0")
485 (match_operand:DI 2 "register_operand" "l")))
486 (clobber (reg:CC CC_REGNUM))
489 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
490 [(set_attr "length" "4")]
493 (define_insn_and_split "*arm_adddi3"
494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
495 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
496 (match_operand:DI 2 "s_register_operand" "r, 0")))
497 (clobber (reg:CC CC_REGNUM))]
498 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
500 "TARGET_32BIT && reload_completed"
501 [(parallel [(set (reg:CC_C CC_REGNUM)
502 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
504 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
505 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
506 (plus:SI (match_dup 4) (match_dup 5))))]
509 operands[3] = gen_highpart (SImode, operands[0]);
510 operands[0] = gen_lowpart (SImode, operands[0]);
511 operands[4] = gen_highpart (SImode, operands[1]);
512 operands[1] = gen_lowpart (SImode, operands[1]);
513 operands[5] = gen_highpart (SImode, operands[2]);
514 operands[2] = gen_lowpart (SImode, operands[2]);
516 [(set_attr "conds" "clob")
517 (set_attr "length" "8")]
520 (define_insn_and_split "*adddi_sesidi_di"
521 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
522 (plus:DI (sign_extend:DI
523 (match_operand:SI 2 "s_register_operand" "r,r"))
524 (match_operand:DI 1 "s_register_operand" "r,0")))
525 (clobber (reg:CC CC_REGNUM))]
526 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
528 "TARGET_32BIT && reload_completed"
529 [(parallel [(set (reg:CC_C CC_REGNUM)
530 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
532 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
533 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
534 (plus:SI (ashiftrt:SI (match_dup 2)
539 operands[3] = gen_highpart (SImode, operands[0]);
540 operands[0] = gen_lowpart (SImode, operands[0]);
541 operands[4] = gen_highpart (SImode, operands[1]);
542 operands[1] = gen_lowpart (SImode, operands[1]);
543 operands[2] = gen_lowpart (SImode, operands[2]);
545 [(set_attr "conds" "clob")
546 (set_attr "length" "8")]
549 (define_insn_and_split "*adddi_zesidi_di"
550 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
551 (plus:DI (zero_extend:DI
552 (match_operand:SI 2 "s_register_operand" "r,r"))
553 (match_operand:DI 1 "s_register_operand" "r,0")))
554 (clobber (reg:CC CC_REGNUM))]
555 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
557 "TARGET_32BIT && reload_completed"
558 [(parallel [(set (reg:CC_C CC_REGNUM)
559 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
561 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
562 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
563 (plus:SI (match_dup 4) (const_int 0))))]
566 operands[3] = gen_highpart (SImode, operands[0]);
567 operands[0] = gen_lowpart (SImode, operands[0]);
568 operands[4] = gen_highpart (SImode, operands[1]);
569 operands[1] = gen_lowpart (SImode, operands[1]);
570 operands[2] = gen_lowpart (SImode, operands[2]);
572 [(set_attr "conds" "clob")
573 (set_attr "length" "8")]
576 (define_expand "addsi3"
577 [(set (match_operand:SI 0 "s_register_operand" "")
578 (plus:SI (match_operand:SI 1 "s_register_operand" "")
579 (match_operand:SI 2 "reg_or_int_operand" "")))]
582 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
584 arm_split_constant (PLUS, SImode, NULL_RTX,
585 INTVAL (operands[2]), operands[0], operands[1],
586 optimize && can_create_pseudo_p ());
592 ; If there is a scratch available, this will be faster than synthesizing the
595 [(match_scratch:SI 3 "r")
596 (set (match_operand:SI 0 "arm_general_register_operand" "")
597 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
598 (match_operand:SI 2 "const_int_operand" "")))]
600 !(const_ok_for_arm (INTVAL (operands[2]))
601 || const_ok_for_arm (-INTVAL (operands[2])))
602 && const_ok_for_arm (~INTVAL (operands[2]))"
603 [(set (match_dup 3) (match_dup 2))
604 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
608 ;; The r/r/k alternative is required when reloading the address
609 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
610 ;; put the duplicated register first, and not try the commutative version.
611 (define_insn_and_split "*arm_addsi3"
612 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
613 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
614 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
624 GET_CODE (operands[2]) == CONST_INT
625 && !(const_ok_for_arm (INTVAL (operands[2]))
626 || const_ok_for_arm (-INTVAL (operands[2])))"
627 [(clobber (const_int 0))]
629 arm_split_constant (PLUS, SImode, curr_insn,
630 INTVAL (operands[2]), operands[0],
634 [(set_attr "length" "4,4,4,4,4,16")
635 (set_attr "predicable" "yes")]
638 ;; Register group 'k' is a single register group containing only the stack
639 ;; register. Trying to reload it will always fail catastrophically,
640 ;; so never allow those alternatives to match if reloading is needed.
642 (define_insn "*thumb1_addsi3"
643 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
644 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
645 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
648 static const char * const asms[] =
650 \"add\\t%0, %0, %2\",
651 \"sub\\t%0, %0, #%n2\",
652 \"add\\t%0, %1, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %1, %2\",
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
664 [(set_attr "length" "2")]
667 ;; Reloading and elimination of the frame pointer can
668 ;; sometimes cause this optimization to be missed.
670 [(set (match_operand:SI 0 "arm_general_register_operand" "")
671 (match_operand:SI 1 "const_int_operand" ""))
673 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
675 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
676 && (INTVAL (operands[1]) & 3) == 0"
677 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
681 ;; ??? Make Thumb-2 variants which prefer low regs
682 (define_insn "*addsi3_compare0"
683 [(set (reg:CC_NOOV CC_REGNUM)
685 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
686 (match_operand:SI 2 "arm_add_operand" "rI,L"))
688 (set (match_operand:SI 0 "s_register_operand" "=r,r")
689 (plus:SI (match_dup 1) (match_dup 2)))]
693 sub%.\\t%0, %1, #%n2"
694 [(set_attr "conds" "set")]
697 (define_insn "*addsi3_compare0_scratch"
698 [(set (reg:CC_NOOV CC_REGNUM)
700 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
701 (match_operand:SI 1 "arm_add_operand" "rI,L"))
707 [(set_attr "conds" "set")]
710 (define_insn "*compare_negsi_si"
711 [(set (reg:CC_Z CC_REGNUM)
713 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
714 (match_operand:SI 1 "s_register_operand" "r")))]
717 [(set_attr "conds" "set")]
720 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
721 ;; addend is a constant.
722 (define_insn "*cmpsi2_addneg"
723 [(set (reg:CC CC_REGNUM)
725 (match_operand:SI 1 "s_register_operand" "r,r")
726 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
727 (set (match_operand:SI 0 "s_register_operand" "=r,r")
728 (plus:SI (match_dup 1)
729 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
730 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
733 add%.\\t%0, %1, #%n2"
734 [(set_attr "conds" "set")]
737 ;; Convert the sequence
739 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
743 ;; bcs dest ((unsigned)rn >= 1)
744 ;; similarly for the beq variant using bcc.
745 ;; This is a common looping idiom (while (n--))
747 [(set (match_operand:SI 0 "arm_general_register_operand" "")
748 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
750 (set (match_operand 2 "cc_register" "")
751 (compare (match_dup 0) (const_int -1)))
753 (if_then_else (match_operator 3 "equality_operator"
754 [(match_dup 2) (const_int 0)])
755 (match_operand 4 "" "")
756 (match_operand 5 "" "")))]
757 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
761 (match_dup 1) (const_int 1)))
762 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
764 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
767 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
768 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
771 operands[2], const0_rtx);"
774 ;; The next four insns work because they compare the result with one of
775 ;; the operands, and we know that the use of the condition code is
776 ;; either GEU or LTU, so we can use the carry flag from the addition
777 ;; instead of doing the compare a second time.
778 (define_insn "*addsi3_compare_op1"
779 [(set (reg:CC_C CC_REGNUM)
781 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
782 (match_operand:SI 2 "arm_add_operand" "rI,L"))
784 (set (match_operand:SI 0 "s_register_operand" "=r,r")
785 (plus:SI (match_dup 1) (match_dup 2)))]
789 sub%.\\t%0, %1, #%n2"
790 [(set_attr "conds" "set")]
793 (define_insn "*addsi3_compare_op2"
794 [(set (reg:CC_C CC_REGNUM)
796 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
797 (match_operand:SI 2 "arm_add_operand" "rI,L"))
799 (set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_dup 1) (match_dup 2)))]
804 sub%.\\t%0, %1, #%n2"
805 [(set_attr "conds" "set")]
808 (define_insn "*compare_addsi2_op0"
809 [(set (reg:CC_C CC_REGNUM)
811 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
812 (match_operand:SI 1 "arm_add_operand" "rI,L"))
818 [(set_attr "conds" "set")]
821 (define_insn "*compare_addsi2_op1"
822 [(set (reg:CC_C CC_REGNUM)
824 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
825 (match_operand:SI 1 "arm_add_operand" "rI,L"))
831 [(set_attr "conds" "set")]
834 (define_insn "*addsi3_carryin"
835 [(set (match_operand:SI 0 "s_register_operand" "=r")
836 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
837 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
838 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
841 [(set_attr "conds" "use")]
844 (define_insn "*addsi3_carryin_shift"
845 [(set (match_operand:SI 0 "s_register_operand" "=r")
846 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
848 (match_operator:SI 2 "shift_operator"
849 [(match_operand:SI 3 "s_register_operand" "r")
850 (match_operand:SI 4 "reg_or_int_operand" "rM")])
851 (match_operand:SI 1 "s_register_operand" "r"))))]
853 "adc%?\\t%0, %1, %3%S2"
854 [(set_attr "conds" "use")
855 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
856 (const_string "alu_shift")
857 (const_string "alu_shift_reg")))]
860 (define_insn "*addsi3_carryin_alt1"
861 [(set (match_operand:SI 0 "s_register_operand" "=r")
862 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
863 (match_operand:SI 2 "arm_rhs_operand" "rI"))
864 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
867 [(set_attr "conds" "use")]
870 (define_insn "*addsi3_carryin_alt2"
871 [(set (match_operand:SI 0 "s_register_operand" "=r")
872 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
873 (match_operand:SI 1 "s_register_operand" "r"))
874 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
877 [(set_attr "conds" "use")]
880 (define_insn "*addsi3_carryin_alt3"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
883 (match_operand:SI 2 "arm_rhs_operand" "rI"))
884 (match_operand:SI 1 "s_register_operand" "r")))]
887 [(set_attr "conds" "use")]
890 (define_expand "incscc"
891 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
892 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
893 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
894 (match_operand:SI 1 "s_register_operand" "0,?r")))]
899 (define_insn "*arm_incscc"
900 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
901 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
902 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
903 (match_operand:SI 1 "s_register_operand" "0,?r")))]
907 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
908 [(set_attr "conds" "use")
909 (set_attr "length" "4,8")]
912 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
914 [(set (match_operand:SI 0 "s_register_operand" "")
915 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
916 (match_operand:SI 2 "s_register_operand" ""))
918 (clobber (match_operand:SI 3 "s_register_operand" ""))]
920 [(set (match_dup 3) (match_dup 1))
921 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
923 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
926 (define_expand "addsf3"
927 [(set (match_operand:SF 0 "s_register_operand" "")
928 (plus:SF (match_operand:SF 1 "s_register_operand" "")
929 (match_operand:SF 2 "arm_float_add_operand" "")))]
930 "TARGET_32BIT && TARGET_HARD_FLOAT"
933 && !cirrus_fp_register (operands[2], SFmode))
934 operands[2] = force_reg (SFmode, operands[2]);
937 (define_expand "adddf3"
938 [(set (match_operand:DF 0 "s_register_operand" "")
939 (plus:DF (match_operand:DF 1 "s_register_operand" "")
940 (match_operand:DF 2 "arm_float_add_operand" "")))]
941 "TARGET_32BIT && TARGET_HARD_FLOAT"
944 && !cirrus_fp_register (operands[2], DFmode))
945 operands[2] = force_reg (DFmode, operands[2]);
948 (define_expand "subdi3"
950 [(set (match_operand:DI 0 "s_register_operand" "")
951 (minus:DI (match_operand:DI 1 "s_register_operand" "")
952 (match_operand:DI 2 "s_register_operand" "")))
953 (clobber (reg:CC CC_REGNUM))])]
956 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
958 && cirrus_fp_register (operands[0], DImode)
959 && cirrus_fp_register (operands[1], DImode))
961 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
967 if (GET_CODE (operands[1]) != REG)
968 operands[1] = force_reg (SImode, operands[1]);
969 if (GET_CODE (operands[2]) != REG)
970 operands[2] = force_reg (SImode, operands[2]);
975 (define_insn "*arm_subdi3"
976 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
977 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
978 (match_operand:DI 2 "s_register_operand" "r,0,0")))
979 (clobber (reg:CC CC_REGNUM))]
981 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
982 [(set_attr "conds" "clob")
983 (set_attr "length" "8")]
986 (define_insn "*thumb_subdi3"
987 [(set (match_operand:DI 0 "register_operand" "=l")
988 (minus:DI (match_operand:DI 1 "register_operand" "0")
989 (match_operand:DI 2 "register_operand" "l")))
990 (clobber (reg:CC CC_REGNUM))]
992 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
993 [(set_attr "length" "4")]
996 (define_insn "*subdi_di_zesidi"
997 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
998 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
1000 (match_operand:SI 2 "s_register_operand" "r,r"))))
1001 (clobber (reg:CC CC_REGNUM))]
1003 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1004 [(set_attr "conds" "clob")
1005 (set_attr "length" "8")]
1008 (define_insn "*subdi_di_sesidi"
1009 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1010 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1012 (match_operand:SI 2 "s_register_operand" "r,r"))))
1013 (clobber (reg:CC CC_REGNUM))]
1015 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1016 [(set_attr "conds" "clob")
1017 (set_attr "length" "8")]
1020 (define_insn "*subdi_zesidi_di"
1021 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1022 (minus:DI (zero_extend:DI
1023 (match_operand:SI 2 "s_register_operand" "r,r"))
1024 (match_operand:DI 1 "s_register_operand" "?r,0")))
1025 (clobber (reg:CC CC_REGNUM))]
1027 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1028 [(set_attr "conds" "clob")
1029 (set_attr "length" "8")]
1032 (define_insn "*subdi_sesidi_di"
1033 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1034 (minus:DI (sign_extend:DI
1035 (match_operand:SI 2 "s_register_operand" "r,r"))
1036 (match_operand:DI 1 "s_register_operand" "?r,0")))
1037 (clobber (reg:CC CC_REGNUM))]
1039 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1040 [(set_attr "conds" "clob")
1041 (set_attr "length" "8")]
1044 (define_insn "*subdi_zesidi_zesidi"
1045 [(set (match_operand:DI 0 "s_register_operand" "=r")
1046 (minus:DI (zero_extend:DI
1047 (match_operand:SI 1 "s_register_operand" "r"))
1049 (match_operand:SI 2 "s_register_operand" "r"))))
1050 (clobber (reg:CC CC_REGNUM))]
1052 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1053 [(set_attr "conds" "clob")
1054 (set_attr "length" "8")]
1057 (define_expand "subsi3"
1058 [(set (match_operand:SI 0 "s_register_operand" "")
1059 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1060 (match_operand:SI 2 "s_register_operand" "")))]
1063 if (GET_CODE (operands[1]) == CONST_INT)
1067 arm_split_constant (MINUS, SImode, NULL_RTX,
1068 INTVAL (operands[1]), operands[0],
1069 operands[2], optimize && can_create_pseudo_p ());
1072 else /* TARGET_THUMB1 */
1073 operands[1] = force_reg (SImode, operands[1]);
1078 (define_insn "*thumb1_subsi3_insn"
1079 [(set (match_operand:SI 0 "register_operand" "=l")
1080 (minus:SI (match_operand:SI 1 "register_operand" "l")
1081 (match_operand:SI 2 "register_operand" "l")))]
1084 [(set_attr "length" "2")]
1087 ; ??? Check Thumb-2 split length
1088 (define_insn_and_split "*arm_subsi3_insn"
1089 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1090 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1091 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1098 && GET_CODE (operands[1]) == CONST_INT
1099 && !const_ok_for_arm (INTVAL (operands[1]))"
1100 [(clobber (const_int 0))]
1102 arm_split_constant (MINUS, SImode, curr_insn,
1103 INTVAL (operands[1]), operands[0], operands[2], 0);
1106 [(set_attr "length" "4,4,16")
1107 (set_attr "predicable" "yes")]
1111 [(match_scratch:SI 3 "r")
1112 (set (match_operand:SI 0 "arm_general_register_operand" "")
1113 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1114 (match_operand:SI 2 "arm_general_register_operand" "")))]
1116 && !const_ok_for_arm (INTVAL (operands[1]))
1117 && const_ok_for_arm (~INTVAL (operands[1]))"
1118 [(set (match_dup 3) (match_dup 1))
1119 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1123 (define_insn "*subsi3_compare0"
1124 [(set (reg:CC_NOOV CC_REGNUM)
1126 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1127 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1129 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1130 (minus:SI (match_dup 1) (match_dup 2)))]
1135 [(set_attr "conds" "set")]
1138 (define_expand "decscc"
1139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1140 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1141 (match_operator:SI 2 "arm_comparison_operator"
1142 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1147 (define_insn "*arm_decscc"
1148 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1149 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1150 (match_operator:SI 2 "arm_comparison_operator"
1151 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1155 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1156 [(set_attr "conds" "use")
1157 (set_attr "length" "*,8")]
1160 (define_expand "subsf3"
1161 [(set (match_operand:SF 0 "s_register_operand" "")
1162 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1163 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1164 "TARGET_32BIT && TARGET_HARD_FLOAT"
1166 if (TARGET_MAVERICK)
1168 if (!cirrus_fp_register (operands[1], SFmode))
1169 operands[1] = force_reg (SFmode, operands[1]);
1170 if (!cirrus_fp_register (operands[2], SFmode))
1171 operands[2] = force_reg (SFmode, operands[2]);
1175 (define_expand "subdf3"
1176 [(set (match_operand:DF 0 "s_register_operand" "")
1177 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1178 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT"
1181 if (TARGET_MAVERICK)
1183 if (!cirrus_fp_register (operands[1], DFmode))
1184 operands[1] = force_reg (DFmode, operands[1]);
1185 if (!cirrus_fp_register (operands[2], DFmode))
1186 operands[2] = force_reg (DFmode, operands[2]);
1191 ;; Multiplication insns
1193 (define_expand "mulsi3"
1194 [(set (match_operand:SI 0 "s_register_operand" "")
1195 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1196 (match_operand:SI 1 "s_register_operand" "")))]
1201 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1202 (define_insn "*arm_mulsi3"
1203 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1204 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1205 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1206 "TARGET_32BIT && !arm_arch6"
1207 "mul%?\\t%0, %2, %1"
1208 [(set_attr "insn" "mul")
1209 (set_attr "predicable" "yes")]
1212 (define_insn "*arm_mulsi3_v6"
1213 [(set (match_operand:SI 0 "s_register_operand" "=r")
1214 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1215 (match_operand:SI 2 "s_register_operand" "r")))]
1216 "TARGET_32BIT && arm_arch6"
1217 "mul%?\\t%0, %1, %2"
1218 [(set_attr "insn" "mul")
1219 (set_attr "predicable" "yes")]
1222 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1223 ; 1 and 2; are the same, because reload will make operand 0 match
1224 ; operand 1 without realizing that this conflicts with operand 2. We fix
1225 ; this by adding another alternative to match this case, and then `reload'
1226 ; it ourselves. This alternative must come first.
1227 (define_insn "*thumb_mulsi3"
1228 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1229 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1230 (match_operand:SI 2 "register_operand" "l,l,l")))]
1231 "TARGET_THUMB1 && !arm_arch6"
1233 if (which_alternative < 2)
1234 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1236 return \"mul\\t%0, %2\";
1238 [(set_attr "length" "4,4,2")
1239 (set_attr "insn" "mul")]
1242 (define_insn "*thumb_mulsi3_v6"
1243 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1244 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1245 (match_operand:SI 2 "register_operand" "l,0,0")))]
1246 "TARGET_THUMB1 && arm_arch6"
1251 [(set_attr "length" "2")
1252 (set_attr "insn" "mul")]
1255 (define_insn "*mulsi3_compare0"
1256 [(set (reg:CC_NOOV CC_REGNUM)
1257 (compare:CC_NOOV (mult:SI
1258 (match_operand:SI 2 "s_register_operand" "r,r")
1259 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1261 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1262 (mult:SI (match_dup 2) (match_dup 1)))]
1263 "TARGET_ARM && !arm_arch6"
1264 "mul%.\\t%0, %2, %1"
1265 [(set_attr "conds" "set")
1266 (set_attr "insn" "muls")]
1269 (define_insn "*mulsi3_compare0_v6"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1271 (compare:CC_NOOV (mult:SI
1272 (match_operand:SI 2 "s_register_operand" "r")
1273 (match_operand:SI 1 "s_register_operand" "r"))
1275 (set (match_operand:SI 0 "s_register_operand" "=r")
1276 (mult:SI (match_dup 2) (match_dup 1)))]
1277 "TARGET_ARM && arm_arch6 && optimize_size"
1278 "mul%.\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "insn" "muls")]
1283 (define_insn "*mulsi_compare0_scratch"
1284 [(set (reg:CC_NOOV CC_REGNUM)
1285 (compare:CC_NOOV (mult:SI
1286 (match_operand:SI 2 "s_register_operand" "r,r")
1287 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1289 (clobber (match_scratch:SI 0 "=&r,&r"))]
1290 "TARGET_ARM && !arm_arch6"
1291 "mul%.\\t%0, %2, %1"
1292 [(set_attr "conds" "set")
1293 (set_attr "insn" "muls")]
1296 (define_insn "*mulsi_compare0_scratch_v6"
1297 [(set (reg:CC_NOOV CC_REGNUM)
1298 (compare:CC_NOOV (mult:SI
1299 (match_operand:SI 2 "s_register_operand" "r")
1300 (match_operand:SI 1 "s_register_operand" "r"))
1302 (clobber (match_scratch:SI 0 "=r"))]
1303 "TARGET_ARM && arm_arch6 && optimize_size"
1304 "mul%.\\t%0, %2, %1"
1305 [(set_attr "conds" "set")
1306 (set_attr "insn" "muls")]
1309 ;; Unnamed templates to match MLA instruction.
1311 (define_insn "*mulsi3addsi"
1312 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1314 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1315 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1316 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1317 "TARGET_32BIT && !arm_arch6"
1318 "mla%?\\t%0, %2, %1, %3"
1319 [(set_attr "insn" "mla")
1320 (set_attr "predicable" "yes")]
1323 (define_insn "*mulsi3addsi_v6"
1324 [(set (match_operand:SI 0 "s_register_operand" "=r")
1326 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1327 (match_operand:SI 1 "s_register_operand" "r"))
1328 (match_operand:SI 3 "s_register_operand" "r")))]
1329 "TARGET_32BIT && arm_arch6"
1330 "mla%?\\t%0, %2, %1, %3"
1331 [(set_attr "insn" "mla")
1332 (set_attr "predicable" "yes")]
1335 (define_insn "*mulsi3addsi_compare0"
1336 [(set (reg:CC_NOOV CC_REGNUM)
1339 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1340 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1341 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1343 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1344 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1346 "TARGET_ARM && arm_arch6"
1347 "mla%.\\t%0, %2, %1, %3"
1348 [(set_attr "conds" "set")
1349 (set_attr "insn" "mlas")]
1352 (define_insn "*mulsi3addsi_compare0_v6"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1356 (match_operand:SI 2 "s_register_operand" "r")
1357 (match_operand:SI 1 "s_register_operand" "r"))
1358 (match_operand:SI 3 "s_register_operand" "r"))
1360 (set (match_operand:SI 0 "s_register_operand" "=r")
1361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1363 "TARGET_ARM && arm_arch6 && optimize_size"
1364 "mla%.\\t%0, %2, %1, %3"
1365 [(set_attr "conds" "set")
1366 (set_attr "insn" "mlas")]
1369 (define_insn "*mulsi3addsi_compare0_scratch"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1373 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1374 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1375 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1377 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1378 "TARGET_ARM && !arm_arch6"
1379 "mla%.\\t%0, %2, %1, %3"
1380 [(set_attr "conds" "set")
1381 (set_attr "insn" "mlas")]
1384 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1388 (match_operand:SI 2 "s_register_operand" "r")
1389 (match_operand:SI 1 "s_register_operand" "r"))
1390 (match_operand:SI 3 "s_register_operand" "r"))
1392 (clobber (match_scratch:SI 0 "=r"))]
1393 "TARGET_ARM && arm_arch6 && optimize_size"
1394 "mla%.\\t%0, %2, %1, %3"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "mlas")]
1399 (define_insn "*mulsi3subsi"
1400 [(set (match_operand:SI 0 "s_register_operand" "=r")
1402 (match_operand:SI 3 "s_register_operand" "r")
1403 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1404 (match_operand:SI 1 "s_register_operand" "r"))))]
1405 "TARGET_32BIT && arm_arch_thumb2"
1406 "mls%?\\t%0, %2, %1, %3"
1407 [(set_attr "insn" "mla")
1408 (set_attr "predicable" "yes")]
1411 ;; Unnamed template to match long long multiply-accumulate (smlal)
1413 (define_insn "*mulsidi3adddi"
1414 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1417 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1418 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1419 (match_operand:DI 1 "s_register_operand" "0")))]
1420 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1421 "smlal%?\\t%Q0, %R0, %3, %2"
1422 [(set_attr "insn" "smlal")
1423 (set_attr "predicable" "yes")]
1426 (define_insn "*mulsidi3adddi_v6"
1427 [(set (match_operand:DI 0 "s_register_operand" "=r")
1430 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1431 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1432 (match_operand:DI 1 "s_register_operand" "0")))]
1433 "TARGET_32BIT && arm_arch6"
1434 "smlal%?\\t%Q0, %R0, %3, %2"
1435 [(set_attr "insn" "smlal")
1436 (set_attr "predicable" "yes")]
1439 ;; 32x32->64 widening multiply.
1440 ;; As with mulsi3, the only difference between the v3-5 and v6+
1441 ;; versions of these patterns is the requirement that the output not
1442 ;; overlap the inputs, but that still means we have to have a named
1443 ;; expander and two different starred insns.
1445 (define_expand "mulsidi3"
1446 [(set (match_operand:DI 0 "s_register_operand" "")
1448 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1449 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1450 "TARGET_32BIT && arm_arch3m"
1454 (define_insn "*mulsidi3_nov6"
1455 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1457 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1458 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1459 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1460 "smull%?\\t%Q0, %R0, %1, %2"
1461 [(set_attr "insn" "smull")
1462 (set_attr "predicable" "yes")]
1465 (define_insn "*mulsidi3_v6"
1466 [(set (match_operand:DI 0 "s_register_operand" "=r")
1468 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1469 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1470 "TARGET_32BIT && arm_arch6"
1471 "smull%?\\t%Q0, %R0, %1, %2"
1472 [(set_attr "insn" "smull")
1473 (set_attr "predicable" "yes")]
1476 (define_expand "umulsidi3"
1477 [(set (match_operand:DI 0 "s_register_operand" "")
1479 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1480 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1481 "TARGET_32BIT && arm_arch3m"
1485 (define_insn "*umulsidi3_nov6"
1486 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1488 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1489 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1490 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1491 "umull%?\\t%Q0, %R0, %1, %2"
1492 [(set_attr "insn" "umull")
1493 (set_attr "predicable" "yes")]
1496 (define_insn "*umulsidi3_v6"
1497 [(set (match_operand:DI 0 "s_register_operand" "=r")
1499 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1500 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1501 "TARGET_32BIT && arm_arch6"
1502 "umull%?\\t%Q0, %R0, %1, %2"
1503 [(set_attr "insn" "umull")
1504 (set_attr "predicable" "yes")]
1507 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1509 (define_insn "*umulsidi3adddi"
1510 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1513 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1514 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1515 (match_operand:DI 1 "s_register_operand" "0")))]
1516 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1517 "umlal%?\\t%Q0, %R0, %3, %2"
1518 [(set_attr "insn" "umlal")
1519 (set_attr "predicable" "yes")]
1522 (define_insn "*umulsidi3adddi_v6"
1523 [(set (match_operand:DI 0 "s_register_operand" "=r")
1526 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1527 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1528 (match_operand:DI 1 "s_register_operand" "0")))]
1529 "TARGET_32BIT && arm_arch6"
1530 "umlal%?\\t%Q0, %R0, %3, %2"
1531 [(set_attr "insn" "umlal")
1532 (set_attr "predicable" "yes")]
1535 (define_expand "smulsi3_highpart"
1537 [(set (match_operand:SI 0 "s_register_operand" "")
1541 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1542 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1544 (clobber (match_scratch:SI 3 ""))])]
1545 "TARGET_32BIT && arm_arch3m"
1549 (define_insn "*smulsi3_highpart_nov6"
1550 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1554 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1555 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1557 (clobber (match_scratch:SI 3 "=&r,&r"))]
1558 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1559 "smull%?\\t%3, %0, %2, %1"
1560 [(set_attr "insn" "smull")
1561 (set_attr "predicable" "yes")]
1564 (define_insn "*smulsi3_highpart_v6"
1565 [(set (match_operand:SI 0 "s_register_operand" "=r")
1569 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1570 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1572 (clobber (match_scratch:SI 3 "=r"))]
1573 "TARGET_32BIT && arm_arch6"
1574 "smull%?\\t%3, %0, %2, %1"
1575 [(set_attr "insn" "smull")
1576 (set_attr "predicable" "yes")]
1579 (define_expand "umulsi3_highpart"
1581 [(set (match_operand:SI 0 "s_register_operand" "")
1585 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1586 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1588 (clobber (match_scratch:SI 3 ""))])]
1589 "TARGET_32BIT && arm_arch3m"
1593 (define_insn "*umulsi3_highpart_nov6"
1594 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1598 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1599 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1601 (clobber (match_scratch:SI 3 "=&r,&r"))]
1602 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1603 "umull%?\\t%3, %0, %2, %1"
1604 [(set_attr "insn" "umull")
1605 (set_attr "predicable" "yes")]
1608 (define_insn "*umulsi3_highpart_v6"
1609 [(set (match_operand:SI 0 "s_register_operand" "=r")
1613 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1614 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1616 (clobber (match_scratch:SI 3 "=r"))]
1617 "TARGET_32BIT && arm_arch6"
1618 "umull%?\\t%3, %0, %2, %1"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1623 (define_insn "mulhisi3"
1624 [(set (match_operand:SI 0 "s_register_operand" "=r")
1625 (mult:SI (sign_extend:SI
1626 (match_operand:HI 1 "s_register_operand" "%r"))
1628 (match_operand:HI 2 "s_register_operand" "r"))))]
1629 "TARGET_DSP_MULTIPLY"
1630 "smulbb%?\\t%0, %1, %2"
1631 [(set_attr "insn" "smulxy")
1632 (set_attr "predicable" "yes")]
1635 (define_insn "*mulhisi3tb"
1636 [(set (match_operand:SI 0 "s_register_operand" "=r")
1637 (mult:SI (ashiftrt:SI
1638 (match_operand:SI 1 "s_register_operand" "r")
1641 (match_operand:HI 2 "s_register_operand" "r"))))]
1642 "TARGET_DSP_MULTIPLY"
1643 "smultb%?\\t%0, %1, %2"
1644 [(set_attr "insn" "smulxy")
1645 (set_attr "predicable" "yes")]
1648 (define_insn "*mulhisi3bt"
1649 [(set (match_operand:SI 0 "s_register_operand" "=r")
1650 (mult:SI (sign_extend:SI
1651 (match_operand:HI 1 "s_register_operand" "r"))
1653 (match_operand:SI 2 "s_register_operand" "r")
1655 "TARGET_DSP_MULTIPLY"
1656 "smulbt%?\\t%0, %1, %2"
1657 [(set_attr "insn" "smulxy")
1658 (set_attr "predicable" "yes")]
1661 (define_insn "*mulhisi3tt"
1662 [(set (match_operand:SI 0 "s_register_operand" "=r")
1663 (mult:SI (ashiftrt:SI
1664 (match_operand:SI 1 "s_register_operand" "r")
1667 (match_operand:SI 2 "s_register_operand" "r")
1669 "TARGET_DSP_MULTIPLY"
1670 "smultt%?\\t%0, %1, %2"
1671 [(set_attr "insn" "smulxy")
1672 (set_attr "predicable" "yes")]
1675 (define_insn "*mulhisi3addsi"
1676 [(set (match_operand:SI 0 "s_register_operand" "=r")
1677 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1678 (mult:SI (sign_extend:SI
1679 (match_operand:HI 2 "s_register_operand" "%r"))
1681 (match_operand:HI 3 "s_register_operand" "r")))))]
1682 "TARGET_DSP_MULTIPLY"
1683 "smlabb%?\\t%0, %2, %3, %1"
1684 [(set_attr "insn" "smlaxy")
1685 (set_attr "predicable" "yes")]
1688 (define_insn "*mulhidi3adddi"
1689 [(set (match_operand:DI 0 "s_register_operand" "=r")
1691 (match_operand:DI 1 "s_register_operand" "0")
1692 (mult:DI (sign_extend:DI
1693 (match_operand:HI 2 "s_register_operand" "%r"))
1695 (match_operand:HI 3 "s_register_operand" "r")))))]
1696 "TARGET_DSP_MULTIPLY"
1697 "smlalbb%?\\t%Q0, %R0, %2, %3"
1698 [(set_attr "insn" "smlalxy")
1699 (set_attr "predicable" "yes")])
1701 (define_expand "mulsf3"
1702 [(set (match_operand:SF 0 "s_register_operand" "")
1703 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1704 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1705 "TARGET_32BIT && TARGET_HARD_FLOAT"
1708 && !cirrus_fp_register (operands[2], SFmode))
1709 operands[2] = force_reg (SFmode, operands[2]);
1712 (define_expand "muldf3"
1713 [(set (match_operand:DF 0 "s_register_operand" "")
1714 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1715 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1716 "TARGET_32BIT && TARGET_HARD_FLOAT"
1719 && !cirrus_fp_register (operands[2], DFmode))
1720 operands[2] = force_reg (DFmode, operands[2]);
1725 (define_expand "divsf3"
1726 [(set (match_operand:SF 0 "s_register_operand" "")
1727 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1728 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1729 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1732 (define_expand "divdf3"
1733 [(set (match_operand:DF 0 "s_register_operand" "")
1734 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1735 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1736 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1741 (define_expand "modsf3"
1742 [(set (match_operand:SF 0 "s_register_operand" "")
1743 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1744 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1745 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1748 (define_expand "moddf3"
1749 [(set (match_operand:DF 0 "s_register_operand" "")
1750 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1751 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1752 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1755 ;; Boolean and,ior,xor insns
1757 ;; Split up double word logical operations
1759 ;; Split up simple DImode logical operations. Simply perform the logical
1760 ;; operation on the upper and lower halves of the registers.
1762 [(set (match_operand:DI 0 "s_register_operand" "")
1763 (match_operator:DI 6 "logical_binary_operator"
1764 [(match_operand:DI 1 "s_register_operand" "")
1765 (match_operand:DI 2 "s_register_operand" "")]))]
1766 "TARGET_32BIT && reload_completed
1767 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1768 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1769 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1772 operands[3] = gen_highpart (SImode, operands[0]);
1773 operands[0] = gen_lowpart (SImode, operands[0]);
1774 operands[4] = gen_highpart (SImode, operands[1]);
1775 operands[1] = gen_lowpart (SImode, operands[1]);
1776 operands[5] = gen_highpart (SImode, operands[2]);
1777 operands[2] = gen_lowpart (SImode, operands[2]);
1782 [(set (match_operand:DI 0 "s_register_operand" "")
1783 (match_operator:DI 6 "logical_binary_operator"
1784 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1785 (match_operand:DI 1 "s_register_operand" "")]))]
1786 "TARGET_32BIT && reload_completed"
1787 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1788 (set (match_dup 3) (match_op_dup:SI 6
1789 [(ashiftrt:SI (match_dup 2) (const_int 31))
1793 operands[3] = gen_highpart (SImode, operands[0]);
1794 operands[0] = gen_lowpart (SImode, operands[0]);
1795 operands[4] = gen_highpart (SImode, operands[1]);
1796 operands[1] = gen_lowpart (SImode, operands[1]);
1797 operands[5] = gen_highpart (SImode, operands[2]);
1798 operands[2] = gen_lowpart (SImode, operands[2]);
1802 ;; The zero extend of operand 2 means we can just copy the high part of
1803 ;; operand1 into operand0.
1805 [(set (match_operand:DI 0 "s_register_operand" "")
1807 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1808 (match_operand:DI 1 "s_register_operand" "")))]
1809 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1810 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1811 (set (match_dup 3) (match_dup 4))]
1814 operands[4] = gen_highpart (SImode, operands[1]);
1815 operands[3] = gen_highpart (SImode, operands[0]);
1816 operands[0] = gen_lowpart (SImode, operands[0]);
1817 operands[1] = gen_lowpart (SImode, operands[1]);
1821 ;; The zero extend of operand 2 means we can just copy the high part of
1822 ;; operand1 into operand0.
1824 [(set (match_operand:DI 0 "s_register_operand" "")
1826 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1827 (match_operand:DI 1 "s_register_operand" "")))]
1828 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1829 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1830 (set (match_dup 3) (match_dup 4))]
1833 operands[4] = gen_highpart (SImode, operands[1]);
1834 operands[3] = gen_highpart (SImode, operands[0]);
1835 operands[0] = gen_lowpart (SImode, operands[0]);
1836 operands[1] = gen_lowpart (SImode, operands[1]);
1840 (define_insn "anddi3"
1841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1842 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1843 (match_operand:DI 2 "s_register_operand" "r,r")))]
1844 "TARGET_32BIT && ! TARGET_IWMMXT"
1846 [(set_attr "length" "8")]
1849 (define_insn_and_split "*anddi_zesidi_di"
1850 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1851 (and:DI (zero_extend:DI
1852 (match_operand:SI 2 "s_register_operand" "r,r"))
1853 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1856 "TARGET_32BIT && reload_completed"
1857 ; The zero extend of operand 2 clears the high word of the output
1859 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1860 (set (match_dup 3) (const_int 0))]
1863 operands[3] = gen_highpart (SImode, operands[0]);
1864 operands[0] = gen_lowpart (SImode, operands[0]);
1865 operands[1] = gen_lowpart (SImode, operands[1]);
1867 [(set_attr "length" "8")]
1870 (define_insn "*anddi_sesdi_di"
1871 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1872 (and:DI (sign_extend:DI
1873 (match_operand:SI 2 "s_register_operand" "r,r"))
1874 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1877 [(set_attr "length" "8")]
1880 (define_expand "andsi3"
1881 [(set (match_operand:SI 0 "s_register_operand" "")
1882 (and:SI (match_operand:SI 1 "s_register_operand" "")
1883 (match_operand:SI 2 "reg_or_int_operand" "")))]
1888 if (GET_CODE (operands[2]) == CONST_INT)
1890 arm_split_constant (AND, SImode, NULL_RTX,
1891 INTVAL (operands[2]), operands[0],
1892 operands[1], optimize && can_create_pseudo_p ());
1897 else /* TARGET_THUMB1 */
1899 if (GET_CODE (operands[2]) != CONST_INT)
1900 operands[2] = force_reg (SImode, operands[2]);
1905 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1907 operands[2] = force_reg (SImode,
1908 GEN_INT (~INTVAL (operands[2])));
1910 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1915 for (i = 9; i <= 31; i++)
1917 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1919 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1923 else if ((((HOST_WIDE_INT) 1) << i) - 1
1924 == ~INTVAL (operands[2]))
1926 rtx shift = GEN_INT (i);
1927 rtx reg = gen_reg_rtx (SImode);
1929 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1930 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1936 operands[2] = force_reg (SImode, operands[2]);
1942 ; ??? Check split length for Thumb-2
1943 (define_insn_and_split "*arm_andsi3_insn"
1944 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1945 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1946 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1950 bic%?\\t%0, %1, #%B2
1953 && GET_CODE (operands[2]) == CONST_INT
1954 && !(const_ok_for_arm (INTVAL (operands[2]))
1955 || const_ok_for_arm (~INTVAL (operands[2])))"
1956 [(clobber (const_int 0))]
1958 arm_split_constant (AND, SImode, curr_insn,
1959 INTVAL (operands[2]), operands[0], operands[1], 0);
1962 [(set_attr "length" "4,4,16")
1963 (set_attr "predicable" "yes")]
1966 (define_insn "*thumb1_andsi3_insn"
1967 [(set (match_operand:SI 0 "register_operand" "=l")
1968 (and:SI (match_operand:SI 1 "register_operand" "%0")
1969 (match_operand:SI 2 "register_operand" "l")))]
1972 [(set_attr "length" "2")]
1975 (define_insn "*andsi3_compare0"
1976 [(set (reg:CC_NOOV CC_REGNUM)
1978 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1979 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1981 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1982 (and:SI (match_dup 1) (match_dup 2)))]
1986 bic%.\\t%0, %1, #%B2"
1987 [(set_attr "conds" "set")]
1990 (define_insn "*andsi3_compare0_scratch"
1991 [(set (reg:CC_NOOV CC_REGNUM)
1993 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1994 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1996 (clobber (match_scratch:SI 2 "=X,r"))]
2000 bic%.\\t%2, %0, #%B1"
2001 [(set_attr "conds" "set")]
2004 (define_insn "*zeroextractsi_compare0_scratch"
2005 [(set (reg:CC_NOOV CC_REGNUM)
2006 (compare:CC_NOOV (zero_extract:SI
2007 (match_operand:SI 0 "s_register_operand" "r")
2008 (match_operand 1 "const_int_operand" "n")
2009 (match_operand 2 "const_int_operand" "n"))
2012 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2013 && INTVAL (operands[1]) > 0
2014 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2015 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2017 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2018 << INTVAL (operands[2]));
2019 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2022 [(set_attr "conds" "set")]
2025 (define_insn_and_split "*ne_zeroextractsi"
2026 [(set (match_operand:SI 0 "s_register_operand" "=r")
2027 (ne:SI (zero_extract:SI
2028 (match_operand:SI 1 "s_register_operand" "r")
2029 (match_operand:SI 2 "const_int_operand" "n")
2030 (match_operand:SI 3 "const_int_operand" "n"))
2032 (clobber (reg:CC CC_REGNUM))]
2034 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2035 && INTVAL (operands[2]) > 0
2036 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2037 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2040 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2041 && INTVAL (operands[2]) > 0
2042 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2043 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2044 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2045 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2047 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2049 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2050 (match_dup 0) (const_int 1)))]
2052 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2053 << INTVAL (operands[3]));
2055 [(set_attr "conds" "clob")
2056 (set (attr "length")
2057 (if_then_else (eq_attr "is_thumb" "yes")
2062 (define_insn_and_split "*ne_zeroextractsi_shifted"
2063 [(set (match_operand:SI 0 "s_register_operand" "=r")
2064 (ne:SI (zero_extract:SI
2065 (match_operand:SI 1 "s_register_operand" "r")
2066 (match_operand:SI 2 "const_int_operand" "n")
2069 (clobber (reg:CC CC_REGNUM))]
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2076 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2081 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2083 [(set_attr "conds" "clob")
2084 (set_attr "length" "8")]
2087 (define_insn_and_split "*ite_ne_zeroextractsi"
2088 [(set (match_operand:SI 0 "s_register_operand" "=r")
2089 (if_then_else:SI (ne (zero_extract:SI
2090 (match_operand:SI 1 "s_register_operand" "r")
2091 (match_operand:SI 2 "const_int_operand" "n")
2092 (match_operand:SI 3 "const_int_operand" "n"))
2094 (match_operand:SI 4 "arm_not_operand" "rIK")
2096 (clobber (reg:CC CC_REGNUM))]
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2102 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2105 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2106 && INTVAL (operands[2]) > 0
2107 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2108 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2109 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2110 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2111 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2113 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2115 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2116 (match_dup 0) (match_dup 4)))]
2118 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2119 << INTVAL (operands[3]));
2121 [(set_attr "conds" "clob")
2122 (set_attr "length" "8")]
2125 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2126 [(set (match_operand:SI 0 "s_register_operand" "=r")
2127 (if_then_else:SI (ne (zero_extract:SI
2128 (match_operand:SI 1 "s_register_operand" "r")
2129 (match_operand:SI 2 "const_int_operand" "n")
2132 (match_operand:SI 3 "arm_not_operand" "rIK")
2134 (clobber (reg:CC CC_REGNUM))]
2135 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2137 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2138 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2139 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2141 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2143 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2144 (match_dup 0) (match_dup 3)))]
2146 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2148 [(set_attr "conds" "clob")
2149 (set_attr "length" "8")]
2153 [(set (match_operand:SI 0 "s_register_operand" "")
2154 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2155 (match_operand:SI 2 "const_int_operand" "")
2156 (match_operand:SI 3 "const_int_operand" "")))
2157 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2159 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2160 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2162 HOST_WIDE_INT temp = INTVAL (operands[2]);
2164 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2165 operands[3] = GEN_INT (32 - temp);
2169 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2171 [(set (match_operand:SI 0 "s_register_operand" "")
2172 (match_operator:SI 1 "shiftable_operator"
2173 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2174 (match_operand:SI 3 "const_int_operand" "")
2175 (match_operand:SI 4 "const_int_operand" ""))
2176 (match_operand:SI 5 "s_register_operand" "")]))
2177 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2179 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2182 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2185 HOST_WIDE_INT temp = INTVAL (operands[3]);
2187 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2188 operands[4] = GEN_INT (32 - temp);
2193 [(set (match_operand:SI 0 "s_register_operand" "")
2194 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2195 (match_operand:SI 2 "const_int_operand" "")
2196 (match_operand:SI 3 "const_int_operand" "")))]
2198 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2199 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2201 HOST_WIDE_INT temp = INTVAL (operands[2]);
2203 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2204 operands[3] = GEN_INT (32 - temp);
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (match_operator:SI 1 "shiftable_operator"
2211 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")
2213 (match_operand:SI 4 "const_int_operand" ""))
2214 (match_operand:SI 5 "s_register_operand" "")]))
2215 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2217 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2220 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2223 HOST_WIDE_INT temp = INTVAL (operands[3]);
2225 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2226 operands[4] = GEN_INT (32 - temp);
2230 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2231 ;;; represented by the bitfield, then this will produce incorrect results.
2232 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2233 ;;; which have a real bit-field insert instruction, the truncation happens
2234 ;;; in the bit-field insert instruction itself. Since arm does not have a
2235 ;;; bit-field insert instruction, we would have to emit code here to truncate
2236 ;;; the value before we insert. This loses some of the advantage of having
2237 ;;; this insv pattern, so this pattern needs to be reevalutated.
2239 (define_expand "insv"
2240 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2241 (match_operand:SI 1 "general_operand" "")
2242 (match_operand:SI 2 "general_operand" ""))
2243 (match_operand:SI 3 "reg_or_int_operand" ""))]
2244 "TARGET_ARM || arm_arch_thumb2"
2247 int start_bit = INTVAL (operands[2]);
2248 int width = INTVAL (operands[1]);
2249 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2250 rtx target, subtarget;
2252 if (arm_arch_thumb2)
2254 bool use_bfi = TRUE;
2256 if (GET_CODE (operands[3]) == CONST_INT)
2258 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2262 emit_insn (gen_insv_zero (operands[0], operands[1],
2267 /* See if the set can be done with a single orr instruction. */
2268 if (val == mask && const_ok_for_arm (val << start_bit))
2274 if (GET_CODE (operands[3]) != REG)
2275 operands[3] = force_reg (SImode, operands[3]);
2277 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2283 target = copy_rtx (operands[0]);
2284 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2285 subreg as the final target. */
2286 if (GET_CODE (target) == SUBREG)
2288 subtarget = gen_reg_rtx (SImode);
2289 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2290 < GET_MODE_SIZE (SImode))
2291 target = SUBREG_REG (target);
2296 if (GET_CODE (operands[3]) == CONST_INT)
2298 /* Since we are inserting a known constant, we may be able to
2299 reduce the number of bits that we have to clear so that
2300 the mask becomes simple. */
2301 /* ??? This code does not check to see if the new mask is actually
2302 simpler. It may not be. */
2303 rtx op1 = gen_reg_rtx (SImode);
2304 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2305 start of this pattern. */
2306 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2307 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2309 emit_insn (gen_andsi3 (op1, operands[0],
2310 gen_int_mode (~mask2, SImode)));
2311 emit_insn (gen_iorsi3 (subtarget, op1,
2312 gen_int_mode (op3_value << start_bit, SImode)));
2314 else if (start_bit == 0
2315 && !(const_ok_for_arm (mask)
2316 || const_ok_for_arm (~mask)))
2318 /* A Trick, since we are setting the bottom bits in the word,
2319 we can shift operand[3] up, operand[0] down, OR them together
2320 and rotate the result back again. This takes 3 insns, and
2321 the third might be mergeable into another op. */
2322 /* The shift up copes with the possibility that operand[3] is
2323 wider than the bitfield. */
2324 rtx op0 = gen_reg_rtx (SImode);
2325 rtx op1 = gen_reg_rtx (SImode);
2327 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2328 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2329 emit_insn (gen_iorsi3 (op1, op1, op0));
2330 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2332 else if ((width + start_bit == 32)
2333 && !(const_ok_for_arm (mask)
2334 || const_ok_for_arm (~mask)))
2336 /* Similar trick, but slightly less efficient. */
2338 rtx op0 = gen_reg_rtx (SImode);
2339 rtx op1 = gen_reg_rtx (SImode);
2341 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2342 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2343 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2344 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2348 rtx op0 = gen_int_mode (mask, SImode);
2349 rtx op1 = gen_reg_rtx (SImode);
2350 rtx op2 = gen_reg_rtx (SImode);
2352 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2354 rtx tmp = gen_reg_rtx (SImode);
2356 emit_insn (gen_movsi (tmp, op0));
2360 /* Mask out any bits in operand[3] that are not needed. */
2361 emit_insn (gen_andsi3 (op1, operands[3], op0));
2363 if (GET_CODE (op0) == CONST_INT
2364 && (const_ok_for_arm (mask << start_bit)
2365 || const_ok_for_arm (~(mask << start_bit))))
2367 op0 = gen_int_mode (~(mask << start_bit), SImode);
2368 emit_insn (gen_andsi3 (op2, operands[0], op0));
2372 if (GET_CODE (op0) == CONST_INT)
2374 rtx tmp = gen_reg_rtx (SImode);
2376 emit_insn (gen_movsi (tmp, op0));
2381 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2383 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2387 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2389 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2392 if (subtarget != target)
2394 /* If TARGET is still a SUBREG, then it must be wider than a word,
2395 so we must be careful only to set the subword we were asked to. */
2396 if (GET_CODE (target) == SUBREG)
2397 emit_move_insn (target, subtarget);
2399 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2406 (define_insn "insv_zero"
2407 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2408 (match_operand:SI 1 "const_int_operand" "M")
2409 (match_operand:SI 2 "const_int_operand" "M"))
2413 [(set_attr "length" "4")
2414 (set_attr "predicable" "yes")]
2417 (define_insn "insv_t2"
2418 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2419 (match_operand:SI 1 "const_int_operand" "M")
2420 (match_operand:SI 2 "const_int_operand" "M"))
2421 (match_operand:SI 3 "s_register_operand" "r"))]
2423 "bfi%?\t%0, %3, %2, %1"
2424 [(set_attr "length" "4")
2425 (set_attr "predicable" "yes")]
2428 ; constants for op 2 will never be given to these patterns.
2429 (define_insn_and_split "*anddi_notdi_di"
2430 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2431 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2432 (match_operand:DI 2 "s_register_operand" "0,r")))]
2435 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2436 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2437 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2440 operands[3] = gen_highpart (SImode, operands[0]);
2441 operands[0] = gen_lowpart (SImode, operands[0]);
2442 operands[4] = gen_highpart (SImode, operands[1]);
2443 operands[1] = gen_lowpart (SImode, operands[1]);
2444 operands[5] = gen_highpart (SImode, operands[2]);
2445 operands[2] = gen_lowpart (SImode, operands[2]);
2447 [(set_attr "length" "8")
2448 (set_attr "predicable" "yes")]
2451 (define_insn_and_split "*anddi_notzesidi_di"
2452 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2453 (and:DI (not:DI (zero_extend:DI
2454 (match_operand:SI 2 "s_register_operand" "r,r")))
2455 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2458 bic%?\\t%Q0, %Q1, %2
2460 ; (not (zero_extend ...)) allows us to just copy the high word from
2461 ; operand1 to operand0.
2464 && operands[0] != operands[1]"
2465 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2466 (set (match_dup 3) (match_dup 4))]
2469 operands[3] = gen_highpart (SImode, operands[0]);
2470 operands[0] = gen_lowpart (SImode, operands[0]);
2471 operands[4] = gen_highpart (SImode, operands[1]);
2472 operands[1] = gen_lowpart (SImode, operands[1]);
2474 [(set_attr "length" "4,8")
2475 (set_attr "predicable" "yes")]
2478 (define_insn_and_split "*anddi_notsesidi_di"
2479 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2480 (and:DI (not:DI (sign_extend:DI
2481 (match_operand:SI 2 "s_register_operand" "r,r")))
2482 (match_operand:DI 1 "s_register_operand" "0,r")))]
2485 "TARGET_32BIT && reload_completed"
2486 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2487 (set (match_dup 3) (and:SI (not:SI
2488 (ashiftrt:SI (match_dup 2) (const_int 31)))
2492 operands[3] = gen_highpart (SImode, operands[0]);
2493 operands[0] = gen_lowpart (SImode, operands[0]);
2494 operands[4] = gen_highpart (SImode, operands[1]);
2495 operands[1] = gen_lowpart (SImode, operands[1]);
2497 [(set_attr "length" "8")
2498 (set_attr "predicable" "yes")]
2501 (define_insn "andsi_notsi_si"
2502 [(set (match_operand:SI 0 "s_register_operand" "=r")
2503 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2504 (match_operand:SI 1 "s_register_operand" "r")))]
2506 "bic%?\\t%0, %1, %2"
2507 [(set_attr "predicable" "yes")]
2510 (define_insn "bicsi3"
2511 [(set (match_operand:SI 0 "register_operand" "=l")
2512 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2513 (match_operand:SI 2 "register_operand" "0")))]
2516 [(set_attr "length" "2")]
2519 (define_insn "andsi_not_shiftsi_si"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2522 [(match_operand:SI 2 "s_register_operand" "r")
2523 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2524 (match_operand:SI 1 "s_register_operand" "r")))]
2526 "bic%?\\t%0, %1, %2%S4"
2527 [(set_attr "predicable" "yes")
2528 (set_attr "shift" "2")
2529 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2530 (const_string "alu_shift")
2531 (const_string "alu_shift_reg")))]
2534 (define_insn "*andsi_notsi_si_compare0"
2535 [(set (reg:CC_NOOV CC_REGNUM)
2537 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2538 (match_operand:SI 1 "s_register_operand" "r"))
2540 (set (match_operand:SI 0 "s_register_operand" "=r")
2541 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2543 "bic%.\\t%0, %1, %2"
2544 [(set_attr "conds" "set")]
2547 (define_insn "*andsi_notsi_si_compare0_scratch"
2548 [(set (reg:CC_NOOV CC_REGNUM)
2550 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2551 (match_operand:SI 1 "s_register_operand" "r"))
2553 (clobber (match_scratch:SI 0 "=r"))]
2555 "bic%.\\t%0, %1, %2"
2556 [(set_attr "conds" "set")]
2559 (define_insn "iordi3"
2560 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2561 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2562 (match_operand:DI 2 "s_register_operand" "r,r")))]
2563 "TARGET_32BIT && ! TARGET_IWMMXT"
2565 [(set_attr "length" "8")
2566 (set_attr "predicable" "yes")]
2569 (define_insn "*iordi_zesidi_di"
2570 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2571 (ior:DI (zero_extend:DI
2572 (match_operand:SI 2 "s_register_operand" "r,r"))
2573 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2576 orr%?\\t%Q0, %Q1, %2
2578 [(set_attr "length" "4,8")
2579 (set_attr "predicable" "yes")]
2582 (define_insn "*iordi_sesidi_di"
2583 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2584 (ior:DI (sign_extend:DI
2585 (match_operand:SI 2 "s_register_operand" "r,r"))
2586 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2589 [(set_attr "length" "8")
2590 (set_attr "predicable" "yes")]
2593 (define_expand "iorsi3"
2594 [(set (match_operand:SI 0 "s_register_operand" "")
2595 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2596 (match_operand:SI 2 "reg_or_int_operand" "")))]
2599 if (GET_CODE (operands[2]) == CONST_INT)
2603 arm_split_constant (IOR, SImode, NULL_RTX,
2604 INTVAL (operands[2]), operands[0], operands[1],
2605 optimize && can_create_pseudo_p ());
2608 else /* TARGET_THUMB1 */
2609 operands [2] = force_reg (SImode, operands [2]);
2614 (define_insn_and_split "*arm_iorsi3"
2615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2616 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2617 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2623 && GET_CODE (operands[2]) == CONST_INT
2624 && !const_ok_for_arm (INTVAL (operands[2]))"
2625 [(clobber (const_int 0))]
2627 arm_split_constant (IOR, SImode, curr_insn,
2628 INTVAL (operands[2]), operands[0], operands[1], 0);
2631 [(set_attr "length" "4,16")
2632 (set_attr "predicable" "yes")]
2635 (define_insn "*thumb1_iorsi3"
2636 [(set (match_operand:SI 0 "register_operand" "=l")
2637 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2638 (match_operand:SI 2 "register_operand" "l")))]
2641 [(set_attr "length" "2")]
2645 [(match_scratch:SI 3 "r")
2646 (set (match_operand:SI 0 "arm_general_register_operand" "")
2647 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2648 (match_operand:SI 2 "const_int_operand" "")))]
2650 && !const_ok_for_arm (INTVAL (operands[2]))
2651 && const_ok_for_arm (~INTVAL (operands[2]))"
2652 [(set (match_dup 3) (match_dup 2))
2653 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2657 (define_insn "*iorsi3_compare0"
2658 [(set (reg:CC_NOOV CC_REGNUM)
2659 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2660 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2662 (set (match_operand:SI 0 "s_register_operand" "=r")
2663 (ior:SI (match_dup 1) (match_dup 2)))]
2665 "orr%.\\t%0, %1, %2"
2666 [(set_attr "conds" "set")]
2669 (define_insn "*iorsi3_compare0_scratch"
2670 [(set (reg:CC_NOOV CC_REGNUM)
2671 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2672 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2674 (clobber (match_scratch:SI 0 "=r"))]
2676 "orr%.\\t%0, %1, %2"
2677 [(set_attr "conds" "set")]
2680 (define_insn "xordi3"
2681 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2682 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2683 (match_operand:DI 2 "s_register_operand" "r,r")))]
2684 "TARGET_32BIT && !TARGET_IWMMXT"
2686 [(set_attr "length" "8")
2687 (set_attr "predicable" "yes")]
2690 (define_insn "*xordi_zesidi_di"
2691 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2692 (xor:DI (zero_extend:DI
2693 (match_operand:SI 2 "s_register_operand" "r,r"))
2694 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2697 eor%?\\t%Q0, %Q1, %2
2699 [(set_attr "length" "4,8")
2700 (set_attr "predicable" "yes")]
2703 (define_insn "*xordi_sesidi_di"
2704 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2705 (xor:DI (sign_extend:DI
2706 (match_operand:SI 2 "s_register_operand" "r,r"))
2707 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2710 [(set_attr "length" "8")
2711 (set_attr "predicable" "yes")]
2714 (define_expand "xorsi3"
2715 [(set (match_operand:SI 0 "s_register_operand" "")
2716 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2717 (match_operand:SI 2 "arm_rhs_operand" "")))]
2720 if (GET_CODE (operands[2]) == CONST_INT)
2721 operands[2] = force_reg (SImode, operands[2]);
2725 (define_insn "*arm_xorsi3"
2726 [(set (match_operand:SI 0 "s_register_operand" "=r")
2727 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2728 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2730 "eor%?\\t%0, %1, %2"
2731 [(set_attr "predicable" "yes")]
2734 (define_insn "*thumb1_xorsi3"
2735 [(set (match_operand:SI 0 "register_operand" "=l")
2736 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2737 (match_operand:SI 2 "register_operand" "l")))]
2740 [(set_attr "length" "2")]
2743 (define_insn "*xorsi3_compare0"
2744 [(set (reg:CC_NOOV CC_REGNUM)
2745 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2746 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2748 (set (match_operand:SI 0 "s_register_operand" "=r")
2749 (xor:SI (match_dup 1) (match_dup 2)))]
2751 "eor%.\\t%0, %1, %2"
2752 [(set_attr "conds" "set")]
2755 (define_insn "*xorsi3_compare0_scratch"
2756 [(set (reg:CC_NOOV CC_REGNUM)
2757 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2758 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2762 [(set_attr "conds" "set")]
2765 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2766 ; (NOT D) we can sometimes merge the final NOT into one of the following
2770 [(set (match_operand:SI 0 "s_register_operand" "")
2771 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2772 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2773 (match_operand:SI 3 "arm_rhs_operand" "")))
2774 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2776 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2777 (not:SI (match_dup 3))))
2778 (set (match_dup 0) (not:SI (match_dup 4)))]
2782 (define_insn "*andsi_iorsi3_notsi"
2783 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2784 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2785 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2786 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2788 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2789 [(set_attr "length" "8")
2790 (set_attr "ce_count" "2")
2791 (set_attr "predicable" "yes")]
2794 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2795 ; insns are available?
2797 [(set (match_operand:SI 0 "s_register_operand" "")
2798 (match_operator:SI 1 "logical_binary_operator"
2799 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2800 (match_operand:SI 3 "const_int_operand" "")
2801 (match_operand:SI 4 "const_int_operand" ""))
2802 (match_operator:SI 9 "logical_binary_operator"
2803 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2804 (match_operand:SI 6 "const_int_operand" ""))
2805 (match_operand:SI 7 "s_register_operand" "")])]))
2806 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2808 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2809 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2812 [(ashift:SI (match_dup 2) (match_dup 4))
2816 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2819 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2823 [(set (match_operand:SI 0 "s_register_operand" "")
2824 (match_operator:SI 1 "logical_binary_operator"
2825 [(match_operator:SI 9 "logical_binary_operator"
2826 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2827 (match_operand:SI 6 "const_int_operand" ""))
2828 (match_operand:SI 7 "s_register_operand" "")])
2829 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2830 (match_operand:SI 3 "const_int_operand" "")
2831 (match_operand:SI 4 "const_int_operand" ""))]))
2832 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2834 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2835 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2838 [(ashift:SI (match_dup 2) (match_dup 4))
2842 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2845 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2849 [(set (match_operand:SI 0 "s_register_operand" "")
2850 (match_operator:SI 1 "logical_binary_operator"
2851 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2852 (match_operand:SI 3 "const_int_operand" "")
2853 (match_operand:SI 4 "const_int_operand" ""))
2854 (match_operator:SI 9 "logical_binary_operator"
2855 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2856 (match_operand:SI 6 "const_int_operand" ""))
2857 (match_operand:SI 7 "s_register_operand" "")])]))
2858 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2860 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2861 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2864 [(ashift:SI (match_dup 2) (match_dup 4))
2868 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2871 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2875 [(set (match_operand:SI 0 "s_register_operand" "")
2876 (match_operator:SI 1 "logical_binary_operator"
2877 [(match_operator:SI 9 "logical_binary_operator"
2878 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2879 (match_operand:SI 6 "const_int_operand" ""))
2880 (match_operand:SI 7 "s_register_operand" "")])
2881 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2882 (match_operand:SI 3 "const_int_operand" "")
2883 (match_operand:SI 4 "const_int_operand" ""))]))
2884 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2886 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2887 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2890 [(ashift:SI (match_dup 2) (match_dup 4))
2894 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2897 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2901 ;; Minimum and maximum insns
2903 (define_expand "smaxsi3"
2905 (set (match_operand:SI 0 "s_register_operand" "")
2906 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2907 (match_operand:SI 2 "arm_rhs_operand" "")))
2908 (clobber (reg:CC CC_REGNUM))])]
2911 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2913 /* No need for a clobber of the condition code register here. */
2914 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2915 gen_rtx_SMAX (SImode, operands[1],
2921 (define_insn "*smax_0"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r")
2923 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2926 "bic%?\\t%0, %1, %1, asr #31"
2927 [(set_attr "predicable" "yes")]
2930 (define_insn "*smax_m1"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r")
2932 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2935 "orr%?\\t%0, %1, %1, asr #31"
2936 [(set_attr "predicable" "yes")]
2939 (define_insn "*arm_smax_insn"
2940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2941 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2942 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2943 (clobber (reg:CC CC_REGNUM))]
2946 cmp\\t%1, %2\;movlt\\t%0, %2
2947 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2948 [(set_attr "conds" "clob")
2949 (set_attr "length" "8,12")]
2952 (define_expand "sminsi3"
2954 (set (match_operand:SI 0 "s_register_operand" "")
2955 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2956 (match_operand:SI 2 "arm_rhs_operand" "")))
2957 (clobber (reg:CC CC_REGNUM))])]
2960 if (operands[2] == const0_rtx)
2962 /* No need for a clobber of the condition code register here. */
2963 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2964 gen_rtx_SMIN (SImode, operands[1],
2970 (define_insn "*smin_0"
2971 [(set (match_operand:SI 0 "s_register_operand" "=r")
2972 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2975 "and%?\\t%0, %1, %1, asr #31"
2976 [(set_attr "predicable" "yes")]
2979 (define_insn "*arm_smin_insn"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2981 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2982 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2983 (clobber (reg:CC CC_REGNUM))]
2986 cmp\\t%1, %2\;movge\\t%0, %2
2987 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2988 [(set_attr "conds" "clob")
2989 (set_attr "length" "8,12")]
2992 (define_expand "umaxsi3"
2994 (set (match_operand:SI 0 "s_register_operand" "")
2995 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2996 (match_operand:SI 2 "arm_rhs_operand" "")))
2997 (clobber (reg:CC CC_REGNUM))])]
3002 (define_insn "*arm_umaxsi3"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3004 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3005 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3006 (clobber (reg:CC CC_REGNUM))]
3009 cmp\\t%1, %2\;movcc\\t%0, %2
3010 cmp\\t%1, %2\;movcs\\t%0, %1
3011 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3012 [(set_attr "conds" "clob")
3013 (set_attr "length" "8,8,12")]
3016 (define_expand "uminsi3"
3018 (set (match_operand:SI 0 "s_register_operand" "")
3019 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3020 (match_operand:SI 2 "arm_rhs_operand" "")))
3021 (clobber (reg:CC CC_REGNUM))])]
3026 (define_insn "*arm_uminsi3"
3027 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3028 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3029 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3030 (clobber (reg:CC CC_REGNUM))]
3033 cmp\\t%1, %2\;movcs\\t%0, %2
3034 cmp\\t%1, %2\;movcc\\t%0, %1
3035 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3036 [(set_attr "conds" "clob")
3037 (set_attr "length" "8,8,12")]
3040 (define_insn "*store_minmaxsi"
3041 [(set (match_operand:SI 0 "memory_operand" "=m")
3042 (match_operator:SI 3 "minmax_operator"
3043 [(match_operand:SI 1 "s_register_operand" "r")
3044 (match_operand:SI 2 "s_register_operand" "r")]))
3045 (clobber (reg:CC CC_REGNUM))]
3048 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3049 operands[1], operands[2]);
3050 output_asm_insn (\"cmp\\t%1, %2\", operands);
3052 output_asm_insn (\"ite\t%d3\", operands);
3053 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3054 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3057 [(set_attr "conds" "clob")
3058 (set (attr "length")
3059 (if_then_else (eq_attr "is_thumb" "yes")
3062 (set_attr "type" "store1")]
3065 ; Reject the frame pointer in operand[1], since reloading this after
3066 ; it has been eliminated can cause carnage.
3067 (define_insn "*minmax_arithsi"
3068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3069 (match_operator:SI 4 "shiftable_operator"
3070 [(match_operator:SI 5 "minmax_operator"
3071 [(match_operand:SI 2 "s_register_operand" "r,r")
3072 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3073 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3074 (clobber (reg:CC CC_REGNUM))]
3075 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3078 enum rtx_code code = GET_CODE (operands[4]);
3081 if (which_alternative != 0 || operands[3] != const0_rtx
3082 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3087 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3088 operands[2], operands[3]);
3089 output_asm_insn (\"cmp\\t%2, %3\", operands);
3093 output_asm_insn (\"ite\\t%d5\", operands);
3095 output_asm_insn (\"it\\t%d5\", operands);
3097 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3099 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3102 [(set_attr "conds" "clob")
3103 (set (attr "length")
3104 (if_then_else (eq_attr "is_thumb" "yes")
3110 ;; Shift and rotation insns
3112 (define_expand "ashldi3"
3113 [(set (match_operand:DI 0 "s_register_operand" "")
3114 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3115 (match_operand:SI 2 "reg_or_int_operand" "")))]
3118 if (GET_CODE (operands[2]) == CONST_INT)
3120 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3122 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3125 /* Ideally we shouldn't fail here if we could know that operands[1]
3126 ends up already living in an iwmmxt register. Otherwise it's
3127 cheaper to have the alternate code being generated than moving
3128 values to iwmmxt regs and back. */
3131 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3136 (define_insn "arm_ashldi3_1bit"
3137 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3138 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3140 (clobber (reg:CC CC_REGNUM))]
3142 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3143 [(set_attr "conds" "clob")
3144 (set_attr "length" "8")]
3147 (define_expand "ashlsi3"
3148 [(set (match_operand:SI 0 "s_register_operand" "")
3149 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3150 (match_operand:SI 2 "arm_rhs_operand" "")))]
3153 if (GET_CODE (operands[2]) == CONST_INT
3154 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3156 emit_insn (gen_movsi (operands[0], const0_rtx));
3162 (define_insn "*thumb1_ashlsi3"
3163 [(set (match_operand:SI 0 "register_operand" "=l,l")
3164 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3165 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3168 [(set_attr "length" "2")]
3171 (define_expand "ashrdi3"
3172 [(set (match_operand:DI 0 "s_register_operand" "")
3173 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3174 (match_operand:SI 2 "reg_or_int_operand" "")))]
3177 if (GET_CODE (operands[2]) == CONST_INT)
3179 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3181 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3184 /* Ideally we shouldn't fail here if we could know that operands[1]
3185 ends up already living in an iwmmxt register. Otherwise it's
3186 cheaper to have the alternate code being generated than moving
3187 values to iwmmxt regs and back. */
3190 else if (!TARGET_REALLY_IWMMXT)
3195 (define_insn "arm_ashrdi3_1bit"
3196 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3197 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3199 (clobber (reg:CC CC_REGNUM))]
3201 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3202 [(set_attr "conds" "clob")
3203 (set_attr "length" "8")]
3206 (define_expand "ashrsi3"
3207 [(set (match_operand:SI 0 "s_register_operand" "")
3208 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3209 (match_operand:SI 2 "arm_rhs_operand" "")))]
3212 if (GET_CODE (operands[2]) == CONST_INT
3213 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3214 operands[2] = GEN_INT (31);
3218 (define_insn "*thumb1_ashrsi3"
3219 [(set (match_operand:SI 0 "register_operand" "=l,l")
3220 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3221 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3224 [(set_attr "length" "2")]
3227 (define_expand "lshrdi3"
3228 [(set (match_operand:DI 0 "s_register_operand" "")
3229 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3230 (match_operand:SI 2 "reg_or_int_operand" "")))]
3233 if (GET_CODE (operands[2]) == CONST_INT)
3235 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3237 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3240 /* Ideally we shouldn't fail here if we could know that operands[1]
3241 ends up already living in an iwmmxt register. Otherwise it's
3242 cheaper to have the alternate code being generated than moving
3243 values to iwmmxt regs and back. */
3246 else if (!TARGET_REALLY_IWMMXT)
3251 (define_insn "arm_lshrdi3_1bit"
3252 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3253 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3255 (clobber (reg:CC CC_REGNUM))]
3257 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3258 [(set_attr "conds" "clob")
3259 (set_attr "length" "8")]
3262 (define_expand "lshrsi3"
3263 [(set (match_operand:SI 0 "s_register_operand" "")
3264 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3265 (match_operand:SI 2 "arm_rhs_operand" "")))]
3268 if (GET_CODE (operands[2]) == CONST_INT
3269 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3271 emit_insn (gen_movsi (operands[0], const0_rtx));
3277 (define_insn "*thumb1_lshrsi3"
3278 [(set (match_operand:SI 0 "register_operand" "=l,l")
3279 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3280 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3283 [(set_attr "length" "2")]
3286 (define_expand "rotlsi3"
3287 [(set (match_operand:SI 0 "s_register_operand" "")
3288 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3289 (match_operand:SI 2 "reg_or_int_operand" "")))]
3292 if (GET_CODE (operands[2]) == CONST_INT)
3293 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3296 rtx reg = gen_reg_rtx (SImode);
3297 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3303 (define_expand "rotrsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "")
3305 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "arm_rhs_operand" "")))]
3311 if (GET_CODE (operands[2]) == CONST_INT
3312 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3313 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3315 else /* TARGET_THUMB1 */
3317 if (GET_CODE (operands [2]) == CONST_INT)
3318 operands [2] = force_reg (SImode, operands[2]);
3323 (define_insn "*thumb1_rotrsi3"
3324 [(set (match_operand:SI 0 "register_operand" "=l")
3325 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3326 (match_operand:SI 2 "register_operand" "l")))]
3329 [(set_attr "length" "2")]
3332 (define_insn "*arm_shiftsi3"
3333 [(set (match_operand:SI 0 "s_register_operand" "=r")
3334 (match_operator:SI 3 "shift_operator"
3335 [(match_operand:SI 1 "s_register_operand" "r")
3336 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3338 "* return arm_output_shift(operands, 0);"
3339 [(set_attr "predicable" "yes")
3340 (set_attr "shift" "1")
3341 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3342 (const_string "alu_shift")
3343 (const_string "alu_shift_reg")))]
3346 (define_insn "*shiftsi3_compare0"
3347 [(set (reg:CC_NOOV CC_REGNUM)
3348 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3349 [(match_operand:SI 1 "s_register_operand" "r")
3350 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3352 (set (match_operand:SI 0 "s_register_operand" "=r")
3353 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3355 "* return arm_output_shift(operands, 1);"
3356 [(set_attr "conds" "set")
3357 (set_attr "shift" "1")
3358 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3359 (const_string "alu_shift")
3360 (const_string "alu_shift_reg")))]
3363 (define_insn "*shiftsi3_compare0_scratch"
3364 [(set (reg:CC_NOOV CC_REGNUM)
3365 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3366 [(match_operand:SI 1 "s_register_operand" "r")
3367 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3369 (clobber (match_scratch:SI 0 "=r"))]
3371 "* return arm_output_shift(operands, 1);"
3372 [(set_attr "conds" "set")
3373 (set_attr "shift" "1")]
3376 (define_insn "*arm_notsi_shiftsi"
3377 [(set (match_operand:SI 0 "s_register_operand" "=r")
3378 (not:SI (match_operator:SI 3 "shift_operator"
3379 [(match_operand:SI 1 "s_register_operand" "r")
3380 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3383 [(set_attr "predicable" "yes")
3384 (set_attr "shift" "1")
3385 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3386 (const_string "alu_shift")
3387 (const_string "alu_shift_reg")))]
3390 (define_insn "*arm_notsi_shiftsi_compare0"
3391 [(set (reg:CC_NOOV CC_REGNUM)
3392 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3393 [(match_operand:SI 1 "s_register_operand" "r")
3394 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3396 (set (match_operand:SI 0 "s_register_operand" "=r")
3397 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3400 [(set_attr "conds" "set")
3401 (set_attr "shift" "1")
3402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3403 (const_string "alu_shift")
3404 (const_string "alu_shift_reg")))]
3407 (define_insn "*arm_not_shiftsi_compare0_scratch"
3408 [(set (reg:CC_NOOV CC_REGNUM)
3409 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3410 [(match_operand:SI 1 "s_register_operand" "r")
3411 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3413 (clobber (match_scratch:SI 0 "=r"))]
3416 [(set_attr "conds" "set")
3417 (set_attr "shift" "1")
3418 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3419 (const_string "alu_shift")
3420 (const_string "alu_shift_reg")))]
3423 ;; We don't really have extzv, but defining this using shifts helps
3424 ;; to reduce register pressure later on.
3426 (define_expand "extzv"
3428 (ashift:SI (match_operand:SI 1 "register_operand" "")
3429 (match_operand:SI 2 "const_int_operand" "")))
3430 (set (match_operand:SI 0 "register_operand" "")
3431 (lshiftrt:SI (match_dup 4)
3432 (match_operand:SI 3 "const_int_operand" "")))]
3433 "TARGET_THUMB1 || arm_arch_thumb2"
3436 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3437 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3439 if (arm_arch_thumb2)
3441 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3446 operands[3] = GEN_INT (rshift);
3450 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3454 operands[2] = GEN_INT (lshift);
3455 operands[4] = gen_reg_rtx (SImode);
3460 [(set (match_operand:SI 0 "s_register_operand" "=r")
3461 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3462 (match_operand:SI 2 "const_int_operand" "M")
3463 (match_operand:SI 3 "const_int_operand" "M")))]
3465 "sbfx%?\t%0, %1, %3, %2"
3466 [(set_attr "length" "4")
3467 (set_attr "predicable" "yes")]
3470 (define_insn "extzv_t2"
3471 [(set (match_operand:SI 0 "s_register_operand" "=r")
3472 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3473 (match_operand:SI 2 "const_int_operand" "M")
3474 (match_operand:SI 3 "const_int_operand" "M")))]
3476 "ubfx%?\t%0, %1, %3, %2"
3477 [(set_attr "length" "4")
3478 (set_attr "predicable" "yes")]
3482 ;; Unary arithmetic insns
3484 (define_expand "negdi2"
3486 [(set (match_operand:DI 0 "s_register_operand" "")
3487 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3488 (clobber (reg:CC CC_REGNUM))])]
3493 if (GET_CODE (operands[1]) != REG)
3494 operands[1] = force_reg (SImode, operands[1]);
3499 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3500 ;; The second alternative is to allow the common case of a *full* overlap.
3501 (define_insn "*arm_negdi2"
3502 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3503 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3504 (clobber (reg:CC CC_REGNUM))]
3506 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3507 [(set_attr "conds" "clob")
3508 (set_attr "length" "8")]
3511 (define_insn "*thumb1_negdi2"
3512 [(set (match_operand:DI 0 "register_operand" "=&l")
3513 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3514 (clobber (reg:CC CC_REGNUM))]
3516 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3517 [(set_attr "length" "6")]
3520 (define_expand "negsi2"
3521 [(set (match_operand:SI 0 "s_register_operand" "")
3522 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3527 (define_insn "*arm_negsi2"
3528 [(set (match_operand:SI 0 "s_register_operand" "=r")
3529 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3531 "rsb%?\\t%0, %1, #0"
3532 [(set_attr "predicable" "yes")]
3535 (define_insn "*thumb1_negsi2"
3536 [(set (match_operand:SI 0 "register_operand" "=l")
3537 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3540 [(set_attr "length" "2")]
3543 (define_expand "negsf2"
3544 [(set (match_operand:SF 0 "s_register_operand" "")
3545 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3546 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3550 (define_expand "negdf2"
3551 [(set (match_operand:DF 0 "s_register_operand" "")
3552 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3553 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3556 ;; abssi2 doesn't really clobber the condition codes if a different register
3557 ;; is being set. To keep things simple, assume during rtl manipulations that
3558 ;; it does, but tell the final scan operator the truth. Similarly for
3561 (define_expand "abssi2"
3563 [(set (match_operand:SI 0 "s_register_operand" "")
3564 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3565 (clobber (match_dup 2))])]
3569 operands[2] = gen_rtx_SCRATCH (SImode);
3571 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3574 (define_insn "*arm_abssi2"
3575 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3576 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3577 (clobber (reg:CC CC_REGNUM))]
3580 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3581 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3582 [(set_attr "conds" "clob,*")
3583 (set_attr "shift" "1")
3584 ;; predicable can't be set based on the variant, so left as no
3585 (set_attr "length" "8")]
3588 (define_insn_and_split "*thumb1_abssi2"
3589 [(set (match_operand:SI 0 "s_register_operand" "=l")
3590 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3591 (clobber (match_scratch:SI 2 "=&l"))]
3594 "TARGET_THUMB1 && reload_completed"
3595 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3596 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3597 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3599 [(set_attr "length" "6")]
3602 (define_insn "*arm_neg_abssi2"
3603 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3604 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3605 (clobber (reg:CC CC_REGNUM))]
3608 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3609 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3610 [(set_attr "conds" "clob,*")
3611 (set_attr "shift" "1")
3612 ;; predicable can't be set based on the variant, so left as no
3613 (set_attr "length" "8")]
3616 (define_insn_and_split "*thumb1_neg_abssi2"
3617 [(set (match_operand:SI 0 "s_register_operand" "=l")
3618 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3619 (clobber (match_scratch:SI 2 "=&l"))]
3622 "TARGET_THUMB1 && reload_completed"
3623 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3624 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3625 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3627 [(set_attr "length" "6")]
3630 (define_expand "abssf2"
3631 [(set (match_operand:SF 0 "s_register_operand" "")
3632 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3633 "TARGET_32BIT && TARGET_HARD_FLOAT"
3636 (define_expand "absdf2"
3637 [(set (match_operand:DF 0 "s_register_operand" "")
3638 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3639 "TARGET_32BIT && TARGET_HARD_FLOAT"
3642 (define_expand "sqrtsf2"
3643 [(set (match_operand:SF 0 "s_register_operand" "")
3644 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3645 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3648 (define_expand "sqrtdf2"
3649 [(set (match_operand:DF 0 "s_register_operand" "")
3650 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3651 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3654 (define_insn_and_split "one_cmpldi2"
3655 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3656 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3659 "TARGET_32BIT && reload_completed"
3660 [(set (match_dup 0) (not:SI (match_dup 1)))
3661 (set (match_dup 2) (not:SI (match_dup 3)))]
3664 operands[2] = gen_highpart (SImode, operands[0]);
3665 operands[0] = gen_lowpart (SImode, operands[0]);
3666 operands[3] = gen_highpart (SImode, operands[1]);
3667 operands[1] = gen_lowpart (SImode, operands[1]);
3669 [(set_attr "length" "8")
3670 (set_attr "predicable" "yes")]
3673 (define_expand "one_cmplsi2"
3674 [(set (match_operand:SI 0 "s_register_operand" "")
3675 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3680 (define_insn "*arm_one_cmplsi2"
3681 [(set (match_operand:SI 0 "s_register_operand" "=r")
3682 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3685 [(set_attr "predicable" "yes")]
3688 (define_insn "*thumb1_one_cmplsi2"
3689 [(set (match_operand:SI 0 "register_operand" "=l")
3690 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3693 [(set_attr "length" "2")]
3696 (define_insn "*notsi_compare0"
3697 [(set (reg:CC_NOOV CC_REGNUM)
3698 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3700 (set (match_operand:SI 0 "s_register_operand" "=r")
3701 (not:SI (match_dup 1)))]
3704 [(set_attr "conds" "set")]
3707 (define_insn "*notsi_compare0_scratch"
3708 [(set (reg:CC_NOOV CC_REGNUM)
3709 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3711 (clobber (match_scratch:SI 0 "=r"))]
3714 [(set_attr "conds" "set")]
3717 ;; Fixed <--> Floating conversion insns
3719 (define_expand "floatsisf2"
3720 [(set (match_operand:SF 0 "s_register_operand" "")
3721 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3722 "TARGET_32BIT && TARGET_HARD_FLOAT"
3724 if (TARGET_MAVERICK)
3726 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3731 (define_expand "floatsidf2"
3732 [(set (match_operand:DF 0 "s_register_operand" "")
3733 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3734 "TARGET_32BIT && TARGET_HARD_FLOAT"
3736 if (TARGET_MAVERICK)
3738 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3743 (define_expand "fix_truncsfsi2"
3744 [(set (match_operand:SI 0 "s_register_operand" "")
3745 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3746 "TARGET_32BIT && TARGET_HARD_FLOAT"
3748 if (TARGET_MAVERICK)
3750 if (!cirrus_fp_register (operands[0], SImode))
3751 operands[0] = force_reg (SImode, operands[0]);
3752 if (!cirrus_fp_register (operands[1], SFmode))
3753 operands[1] = force_reg (SFmode, operands[0]);
3754 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3759 (define_expand "fix_truncdfsi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "")
3761 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3762 "TARGET_32BIT && TARGET_HARD_FLOAT"
3764 if (TARGET_MAVERICK)
3766 if (!cirrus_fp_register (operands[1], DFmode))
3767 operands[1] = force_reg (DFmode, operands[0]);
3768 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3775 (define_expand "truncdfsf2"
3776 [(set (match_operand:SF 0 "s_register_operand" "")
3778 (match_operand:DF 1 "s_register_operand" "")))]
3779 "TARGET_32BIT && TARGET_HARD_FLOAT"
3783 ;; Zero and sign extension instructions.
3785 (define_expand "zero_extendsidi2"
3786 [(set (match_operand:DI 0 "s_register_operand" "")
3787 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3792 (define_insn "*arm_zero_extendsidi2"
3793 [(set (match_operand:DI 0 "s_register_operand" "=r")
3794 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3797 if (REGNO (operands[1])
3798 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3799 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3800 return \"mov%?\\t%R0, #0\";
3802 [(set_attr "length" "8")
3803 (set_attr "predicable" "yes")]
3806 (define_expand "zero_extendqidi2"
3807 [(set (match_operand:DI 0 "s_register_operand" "")
3808 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3813 (define_insn "*arm_zero_extendqidi2"
3814 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3815 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3818 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3819 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3820 [(set_attr "length" "8")
3821 (set_attr "predicable" "yes")
3822 (set_attr "type" "*,load_byte")
3823 (set_attr "pool_range" "*,4092")
3824 (set_attr "neg_pool_range" "*,4084")]
3827 (define_expand "extendsidi2"
3828 [(set (match_operand:DI 0 "s_register_operand" "")
3829 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3834 (define_insn "*arm_extendsidi2"
3835 [(set (match_operand:DI 0 "s_register_operand" "=r")
3836 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3839 if (REGNO (operands[1])
3840 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3841 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3842 return \"mov%?\\t%R0, %Q0, asr #31\";
3844 [(set_attr "length" "8")
3845 (set_attr "shift" "1")
3846 (set_attr "predicable" "yes")]
3849 (define_expand "zero_extendhisi2"
3851 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3853 (set (match_operand:SI 0 "s_register_operand" "")
3854 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3858 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3860 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3861 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3865 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3867 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3871 if (!s_register_operand (operands[1], HImode))
3872 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3876 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3877 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3881 operands[1] = gen_lowpart (SImode, operands[1]);
3882 operands[2] = gen_reg_rtx (SImode);
3886 (define_insn "*thumb1_zero_extendhisi2"
3887 [(set (match_operand:SI 0 "register_operand" "=l")
3888 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3889 "TARGET_THUMB1 && !arm_arch6"
3891 rtx mem = XEXP (operands[1], 0);
3893 if (GET_CODE (mem) == CONST)
3894 mem = XEXP (mem, 0);
3896 if (GET_CODE (mem) == LABEL_REF)
3897 return \"ldr\\t%0, %1\";
3899 if (GET_CODE (mem) == PLUS)
3901 rtx a = XEXP (mem, 0);
3902 rtx b = XEXP (mem, 1);
3904 /* This can happen due to bugs in reload. */
3905 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3908 ops[0] = operands[0];
3911 output_asm_insn (\"mov %0, %1\", ops);
3913 XEXP (mem, 0) = operands[0];
3916 else if ( GET_CODE (a) == LABEL_REF
3917 && GET_CODE (b) == CONST_INT)
3918 return \"ldr\\t%0, %1\";
3921 return \"ldrh\\t%0, %1\";
3923 [(set_attr "length" "4")
3924 (set_attr "type" "load_byte")
3925 (set_attr "pool_range" "60")]
3928 (define_insn "*thumb1_zero_extendhisi2_v6"
3929 [(set (match_operand:SI 0 "register_operand" "=l,l")
3930 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3931 "TARGET_THUMB1 && arm_arch6"
3935 if (which_alternative == 0)
3936 return \"uxth\\t%0, %1\";
3938 mem = XEXP (operands[1], 0);
3940 if (GET_CODE (mem) == CONST)
3941 mem = XEXP (mem, 0);
3943 if (GET_CODE (mem) == LABEL_REF)
3944 return \"ldr\\t%0, %1\";
3946 if (GET_CODE (mem) == PLUS)
3948 rtx a = XEXP (mem, 0);
3949 rtx b = XEXP (mem, 1);
3951 /* This can happen due to bugs in reload. */
3952 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3955 ops[0] = operands[0];
3958 output_asm_insn (\"mov %0, %1\", ops);
3960 XEXP (mem, 0) = operands[0];
3963 else if ( GET_CODE (a) == LABEL_REF
3964 && GET_CODE (b) == CONST_INT)
3965 return \"ldr\\t%0, %1\";
3968 return \"ldrh\\t%0, %1\";
3970 [(set_attr "length" "2,4")
3971 (set_attr "type" "alu_shift,load_byte")
3972 (set_attr "pool_range" "*,60")]
3975 (define_insn "*arm_zero_extendhisi2"
3976 [(set (match_operand:SI 0 "s_register_operand" "=r")
3977 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3978 "TARGET_ARM && arm_arch4 && !arm_arch6"
3980 [(set_attr "type" "load_byte")
3981 (set_attr "predicable" "yes")
3982 (set_attr "pool_range" "256")
3983 (set_attr "neg_pool_range" "244")]
3986 (define_insn "*arm_zero_extendhisi2_v6"
3987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3988 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3989 "TARGET_ARM && arm_arch6"
3993 [(set_attr "type" "alu_shift,load_byte")
3994 (set_attr "predicable" "yes")
3995 (set_attr "pool_range" "*,256")
3996 (set_attr "neg_pool_range" "*,244")]
3999 (define_insn "*arm_zero_extendhisi2addsi"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4002 (match_operand:SI 2 "s_register_operand" "r")))]
4004 "uxtah%?\\t%0, %2, %1"
4005 [(set_attr "type" "alu_shift")
4006 (set_attr "predicable" "yes")]
4009 (define_expand "zero_extendqisi2"
4010 [(set (match_operand:SI 0 "s_register_operand" "")
4011 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4014 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4018 emit_insn (gen_andsi3 (operands[0],
4019 gen_lowpart (SImode, operands[1]),
4022 else /* TARGET_THUMB */
4024 rtx temp = gen_reg_rtx (SImode);
4027 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4028 operands[1] = gen_lowpart (SImode, operands[1]);
4031 ops[1] = operands[1];
4032 ops[2] = GEN_INT (24);
4034 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4035 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4037 ops[0] = operands[0];
4039 ops[2] = GEN_INT (24);
4041 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4042 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4049 (define_insn "*thumb1_zero_extendqisi2"
4050 [(set (match_operand:SI 0 "register_operand" "=l")
4051 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4052 "TARGET_THUMB1 && !arm_arch6"
4054 [(set_attr "length" "2")
4055 (set_attr "type" "load_byte")
4056 (set_attr "pool_range" "32")]
4059 (define_insn "*thumb1_zero_extendqisi2_v6"
4060 [(set (match_operand:SI 0 "register_operand" "=l,l")
4061 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4062 "TARGET_THUMB1 && arm_arch6"
4066 [(set_attr "length" "2,2")
4067 (set_attr "type" "alu_shift,load_byte")
4068 (set_attr "pool_range" "*,32")]
4071 (define_insn "*arm_zero_extendqisi2"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r")
4073 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4074 "TARGET_ARM && !arm_arch6"
4075 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4076 [(set_attr "type" "load_byte")
4077 (set_attr "predicable" "yes")
4078 (set_attr "pool_range" "4096")
4079 (set_attr "neg_pool_range" "4084")]
4082 (define_insn "*arm_zero_extendqisi2_v6"
4083 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4084 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4085 "TARGET_ARM && arm_arch6"
4088 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4089 [(set_attr "type" "alu_shift,load_byte")
4090 (set_attr "predicable" "yes")
4091 (set_attr "pool_range" "*,4096")
4092 (set_attr "neg_pool_range" "*,4084")]
4095 (define_insn "*arm_zero_extendqisi2addsi"
4096 [(set (match_operand:SI 0 "s_register_operand" "=r")
4097 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4098 (match_operand:SI 2 "s_register_operand" "r")))]
4100 "uxtab%?\\t%0, %2, %1"
4101 [(set_attr "predicable" "yes")
4102 (set_attr "insn" "xtab")
4103 (set_attr "type" "alu_shift")]
4107 [(set (match_operand:SI 0 "s_register_operand" "")
4108 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4109 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4110 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4111 [(set (match_dup 2) (match_dup 1))
4112 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4117 [(set (match_operand:SI 0 "s_register_operand" "")
4118 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4119 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4120 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4121 [(set (match_dup 2) (match_dup 1))
4122 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4126 (define_insn "*compareqi_eq0"
4127 [(set (reg:CC_Z CC_REGNUM)
4128 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4132 [(set_attr "conds" "set")]
4135 (define_expand "extendhisi2"
4137 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4139 (set (match_operand:SI 0 "s_register_operand" "")
4140 (ashiftrt:SI (match_dup 2)
4145 if (GET_CODE (operands[1]) == MEM)
4149 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4154 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4155 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4160 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4162 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4166 if (!s_register_operand (operands[1], HImode))
4167 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4172 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4174 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4175 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4180 operands[1] = gen_lowpart (SImode, operands[1]);
4181 operands[2] = gen_reg_rtx (SImode);
4185 (define_insn "thumb1_extendhisi2"
4186 [(set (match_operand:SI 0 "register_operand" "=l")
4187 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4188 (clobber (match_scratch:SI 2 "=&l"))]
4189 "TARGET_THUMB1 && !arm_arch6"
4193 rtx mem = XEXP (operands[1], 0);
4195 /* This code used to try to use 'V', and fix the address only if it was
4196 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4197 range of QImode offsets, and offsettable_address_p does a QImode
4200 if (GET_CODE (mem) == CONST)
4201 mem = XEXP (mem, 0);
4203 if (GET_CODE (mem) == LABEL_REF)
4204 return \"ldr\\t%0, %1\";
4206 if (GET_CODE (mem) == PLUS)
4208 rtx a = XEXP (mem, 0);
4209 rtx b = XEXP (mem, 1);
4211 if (GET_CODE (a) == LABEL_REF
4212 && GET_CODE (b) == CONST_INT)
4213 return \"ldr\\t%0, %1\";
4215 if (GET_CODE (b) == REG)
4216 return \"ldrsh\\t%0, %1\";
4224 ops[2] = const0_rtx;
4227 gcc_assert (GET_CODE (ops[1]) == REG);
4229 ops[0] = operands[0];
4230 ops[3] = operands[2];
4231 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4234 [(set_attr "length" "4")
4235 (set_attr "type" "load_byte")
4236 (set_attr "pool_range" "1020")]
4239 ;; We used to have an early-clobber on the scratch register here.
4240 ;; However, there's a bug somewhere in reload which means that this
4241 ;; can be partially ignored during spill allocation if the memory
4242 ;; address also needs reloading; this causes us to die later on when
4243 ;; we try to verify the operands. Fortunately, we don't really need
4244 ;; the early-clobber: we can always use operand 0 if operand 2
4245 ;; overlaps the address.
4246 (define_insn "*thumb1_extendhisi2_insn_v6"
4247 [(set (match_operand:SI 0 "register_operand" "=l,l")
4248 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4249 (clobber (match_scratch:SI 2 "=X,l"))]
4250 "TARGET_THUMB1 && arm_arch6"
4256 if (which_alternative == 0)
4257 return \"sxth\\t%0, %1\";
4259 mem = XEXP (operands[1], 0);
4261 /* This code used to try to use 'V', and fix the address only if it was
4262 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4263 range of QImode offsets, and offsettable_address_p does a QImode
4266 if (GET_CODE (mem) == CONST)
4267 mem = XEXP (mem, 0);
4269 if (GET_CODE (mem) == LABEL_REF)
4270 return \"ldr\\t%0, %1\";
4272 if (GET_CODE (mem) == PLUS)
4274 rtx a = XEXP (mem, 0);
4275 rtx b = XEXP (mem, 1);
4277 if (GET_CODE (a) == LABEL_REF
4278 && GET_CODE (b) == CONST_INT)
4279 return \"ldr\\t%0, %1\";
4281 if (GET_CODE (b) == REG)
4282 return \"ldrsh\\t%0, %1\";
4290 ops[2] = const0_rtx;
4293 gcc_assert (GET_CODE (ops[1]) == REG);
4295 ops[0] = operands[0];
4296 if (reg_mentioned_p (operands[2], ops[1]))
4299 ops[3] = operands[2];
4300 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4303 [(set_attr "length" "2,4")
4304 (set_attr "type" "alu_shift,load_byte")
4305 (set_attr "pool_range" "*,1020")]
4308 ;; This pattern will only be used when ldsh is not available
4309 (define_expand "extendhisi2_mem"
4310 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4312 (zero_extend:SI (match_dup 7)))
4313 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4314 (set (match_operand:SI 0 "" "")
4315 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4320 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4322 mem1 = change_address (operands[1], QImode, addr);
4323 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4324 operands[0] = gen_lowpart (SImode, operands[0]);
4326 operands[2] = gen_reg_rtx (SImode);
4327 operands[3] = gen_reg_rtx (SImode);
4328 operands[6] = gen_reg_rtx (SImode);
4331 if (BYTES_BIG_ENDIAN)
4333 operands[4] = operands[2];
4334 operands[5] = operands[3];
4338 operands[4] = operands[3];
4339 operands[5] = operands[2];
4344 (define_insn "*arm_extendhisi2"
4345 [(set (match_operand:SI 0 "s_register_operand" "=r")
4346 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4347 "TARGET_ARM && arm_arch4 && !arm_arch6"
4348 "ldr%(sh%)\\t%0, %1"
4349 [(set_attr "type" "load_byte")
4350 (set_attr "predicable" "yes")
4351 (set_attr "pool_range" "256")
4352 (set_attr "neg_pool_range" "244")]
4355 ;; ??? Check Thumb-2 pool range
4356 (define_insn "*arm_extendhisi2_v6"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4358 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4359 "TARGET_32BIT && arm_arch6"
4363 [(set_attr "type" "alu_shift,load_byte")
4364 (set_attr "predicable" "yes")
4365 (set_attr "pool_range" "*,256")
4366 (set_attr "neg_pool_range" "*,244")]
4369 (define_insn "*arm_extendhisi2addsi"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4372 (match_operand:SI 2 "s_register_operand" "r")))]
4374 "sxtah%?\\t%0, %2, %1"
4377 (define_expand "extendqihi2"
4379 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4381 (set (match_operand:HI 0 "s_register_operand" "")
4382 (ashiftrt:SI (match_dup 2)
4387 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4389 emit_insn (gen_rtx_SET (VOIDmode,
4391 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4394 if (!s_register_operand (operands[1], QImode))
4395 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4396 operands[0] = gen_lowpart (SImode, operands[0]);
4397 operands[1] = gen_lowpart (SImode, operands[1]);
4398 operands[2] = gen_reg_rtx (SImode);
4402 (define_insn "*arm_extendqihi_insn"
4403 [(set (match_operand:HI 0 "s_register_operand" "=r")
4404 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4405 "TARGET_ARM && arm_arch4"
4406 "ldr%(sb%)\\t%0, %1"
4407 [(set_attr "type" "load_byte")
4408 (set_attr "predicable" "yes")
4409 (set_attr "pool_range" "256")
4410 (set_attr "neg_pool_range" "244")]
4413 (define_expand "extendqisi2"
4415 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4417 (set (match_operand:SI 0 "s_register_operand" "")
4418 (ashiftrt:SI (match_dup 2)
4423 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4425 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4426 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4430 if (!s_register_operand (operands[1], QImode))
4431 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4435 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4436 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4440 operands[1] = gen_lowpart (SImode, operands[1]);
4441 operands[2] = gen_reg_rtx (SImode);
4445 (define_insn "*arm_extendqisi"
4446 [(set (match_operand:SI 0 "s_register_operand" "=r")
4447 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4448 "TARGET_ARM && arm_arch4 && !arm_arch6"
4449 "ldr%(sb%)\\t%0, %1"
4450 [(set_attr "type" "load_byte")
4451 (set_attr "predicable" "yes")
4452 (set_attr "pool_range" "256")
4453 (set_attr "neg_pool_range" "244")]
4456 (define_insn "*arm_extendqisi_v6"
4457 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4459 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4460 "TARGET_ARM && arm_arch6"
4464 [(set_attr "type" "alu_shift,load_byte")
4465 (set_attr "predicable" "yes")
4466 (set_attr "pool_range" "*,256")
4467 (set_attr "neg_pool_range" "*,244")]
4470 (define_insn "*arm_extendqisi2addsi"
4471 [(set (match_operand:SI 0 "s_register_operand" "=r")
4472 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4473 (match_operand:SI 2 "s_register_operand" "r")))]
4475 "sxtab%?\\t%0, %2, %1"
4476 [(set_attr "type" "alu_shift")
4477 (set_attr "insn" "xtab")
4478 (set_attr "predicable" "yes")]
4481 (define_insn "*thumb1_extendqisi2"
4482 [(set (match_operand:SI 0 "register_operand" "=l,l")
4483 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4484 "TARGET_THUMB1 && !arm_arch6"
4488 rtx mem = XEXP (operands[1], 0);
4490 if (GET_CODE (mem) == CONST)
4491 mem = XEXP (mem, 0);
4493 if (GET_CODE (mem) == LABEL_REF)
4494 return \"ldr\\t%0, %1\";
4496 if (GET_CODE (mem) == PLUS
4497 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4498 return \"ldr\\t%0, %1\";
4500 if (which_alternative == 0)
4501 return \"ldrsb\\t%0, %1\";
4503 ops[0] = operands[0];
4505 if (GET_CODE (mem) == PLUS)
4507 rtx a = XEXP (mem, 0);
4508 rtx b = XEXP (mem, 1);
4513 if (GET_CODE (a) == REG)
4515 if (GET_CODE (b) == REG)
4516 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4517 else if (REGNO (a) == REGNO (ops[0]))
4519 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4520 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4521 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4524 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4528 gcc_assert (GET_CODE (b) == REG);
4529 if (REGNO (b) == REGNO (ops[0]))
4531 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4532 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4533 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4536 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4539 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4541 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4542 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4543 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4548 ops[2] = const0_rtx;
4550 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4554 [(set_attr "length" "2,6")
4555 (set_attr "type" "load_byte,load_byte")
4556 (set_attr "pool_range" "32,32")]
4559 (define_insn "*thumb1_extendqisi2_v6"
4560 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4561 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4562 "TARGET_THUMB1 && arm_arch6"
4568 if (which_alternative == 0)
4569 return \"sxtb\\t%0, %1\";
4571 mem = XEXP (operands[1], 0);
4573 if (GET_CODE (mem) == CONST)
4574 mem = XEXP (mem, 0);
4576 if (GET_CODE (mem) == LABEL_REF)
4577 return \"ldr\\t%0, %1\";
4579 if (GET_CODE (mem) == PLUS
4580 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4581 return \"ldr\\t%0, %1\";
4583 if (which_alternative == 0)
4584 return \"ldrsb\\t%0, %1\";
4586 ops[0] = operands[0];
4588 if (GET_CODE (mem) == PLUS)
4590 rtx a = XEXP (mem, 0);
4591 rtx b = XEXP (mem, 1);
4596 if (GET_CODE (a) == REG)
4598 if (GET_CODE (b) == REG)
4599 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4600 else if (REGNO (a) == REGNO (ops[0]))
4602 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4603 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4606 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4610 gcc_assert (GET_CODE (b) == REG);
4611 if (REGNO (b) == REGNO (ops[0]))
4613 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4614 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4617 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4620 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4622 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4623 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4628 ops[2] = const0_rtx;
4630 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4634 [(set_attr "length" "2,2,4")
4635 (set_attr "type" "alu_shift,load_byte,load_byte")
4636 (set_attr "pool_range" "*,32,32")]
4639 (define_expand "extendsfdf2"
4640 [(set (match_operand:DF 0 "s_register_operand" "")
4641 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4642 "TARGET_32BIT && TARGET_HARD_FLOAT"
4646 ;; Move insns (including loads and stores)
4648 ;; XXX Just some ideas about movti.
4649 ;; I don't think these are a good idea on the arm, there just aren't enough
4651 ;;(define_expand "loadti"
4652 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4653 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4656 ;;(define_expand "storeti"
4657 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4658 ;; (match_operand:TI 1 "s_register_operand" ""))]
4661 ;;(define_expand "movti"
4662 ;; [(set (match_operand:TI 0 "general_operand" "")
4663 ;; (match_operand:TI 1 "general_operand" ""))]
4669 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4670 ;; operands[1] = copy_to_reg (operands[1]);
4671 ;; if (GET_CODE (operands[0]) == MEM)
4672 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4673 ;; else if (GET_CODE (operands[1]) == MEM)
4674 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4678 ;; emit_insn (insn);
4682 ;; Recognize garbage generated above.
4685 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4686 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4690 ;; register mem = (which_alternative < 3);
4691 ;; register const char *template;
4693 ;; operands[mem] = XEXP (operands[mem], 0);
4694 ;; switch (which_alternative)
4696 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4697 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4698 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4699 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4700 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4701 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4703 ;; output_asm_insn (template, operands);
4707 (define_expand "movdi"
4708 [(set (match_operand:DI 0 "general_operand" "")
4709 (match_operand:DI 1 "general_operand" ""))]
4712 if (can_create_pseudo_p ())
4714 if (GET_CODE (operands[0]) != REG)
4715 operands[1] = force_reg (DImode, operands[1]);
4720 (define_insn "*arm_movdi"
4721 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4722 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4724 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4726 && ( register_operand (operands[0], DImode)
4727 || register_operand (operands[1], DImode))"
4729 switch (which_alternative)
4736 return output_move_double (operands);
4739 [(set_attr "length" "8,12,16,8,8")
4740 (set_attr "type" "*,*,*,load2,store2")
4741 (set_attr "pool_range" "*,*,*,1020,*")
4742 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4746 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4747 (match_operand:ANY64 1 "const_double_operand" ""))]
4750 && (arm_const_double_inline_cost (operands[1])
4751 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4754 arm_split_constant (SET, SImode, curr_insn,
4755 INTVAL (gen_lowpart (SImode, operands[1])),
4756 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4757 arm_split_constant (SET, SImode, curr_insn,
4758 INTVAL (gen_highpart_mode (SImode,
4759 GET_MODE (operands[0]),
4761 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4766 ; If optimizing for size, or if we have load delay slots, then
4767 ; we want to split the constant into two separate operations.
4768 ; In both cases this may split a trivial part into a single data op
4769 ; leaving a single complex constant to load. We can also get longer
4770 ; offsets in a LDR which means we get better chances of sharing the pool
4771 ; entries. Finally, we can normally do a better job of scheduling
4772 ; LDR instructions than we can with LDM.
4773 ; This pattern will only match if the one above did not.
4775 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4776 (match_operand:ANY64 1 "const_double_operand" ""))]
4777 "TARGET_ARM && reload_completed
4778 && arm_const_double_by_parts (operands[1])"
4779 [(set (match_dup 0) (match_dup 1))
4780 (set (match_dup 2) (match_dup 3))]
4782 operands[2] = gen_highpart (SImode, operands[0]);
4783 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4785 operands[0] = gen_lowpart (SImode, operands[0]);
4786 operands[1] = gen_lowpart (SImode, operands[1]);
4791 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4792 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4793 "TARGET_EITHER && reload_completed"
4794 [(set (match_dup 0) (match_dup 1))
4795 (set (match_dup 2) (match_dup 3))]
4797 operands[2] = gen_highpart (SImode, operands[0]);
4798 operands[3] = gen_highpart (SImode, operands[1]);
4799 operands[0] = gen_lowpart (SImode, operands[0]);
4800 operands[1] = gen_lowpart (SImode, operands[1]);
4802 /* Handle a partial overlap. */
4803 if (rtx_equal_p (operands[0], operands[3]))
4805 rtx tmp0 = operands[0];
4806 rtx tmp1 = operands[1];
4808 operands[0] = operands[2];
4809 operands[1] = operands[3];
4816 ;; We can't actually do base+index doubleword loads if the index and
4817 ;; destination overlap. Split here so that we at least have chance to
4820 [(set (match_operand:DI 0 "s_register_operand" "")
4821 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4822 (match_operand:SI 2 "s_register_operand" ""))))]
4824 && reg_overlap_mentioned_p (operands[0], operands[1])
4825 && reg_overlap_mentioned_p (operands[0], operands[2])"
4827 (plus:SI (match_dup 1)
4830 (mem:DI (match_dup 4)))]
4832 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4836 ;;; ??? This should have alternatives for constants.
4837 ;;; ??? This was originally identical to the movdf_insn pattern.
4838 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4839 ;;; thumb_reorg with a memory reference.
4840 (define_insn "*thumb1_movdi_insn"
4841 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4842 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4844 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4845 && ( register_operand (operands[0], DImode)
4846 || register_operand (operands[1], DImode))"
4849 switch (which_alternative)
4853 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4854 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4855 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4857 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4859 operands[1] = GEN_INT (- INTVAL (operands[1]));
4860 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4862 return \"ldmia\\t%1, {%0, %H0}\";
4864 return \"stmia\\t%0, {%1, %H1}\";
4866 return thumb_load_double_from_address (operands);
4868 operands[2] = gen_rtx_MEM (SImode,
4869 plus_constant (XEXP (operands[0], 0), 4));
4870 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4873 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4874 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4875 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4878 [(set_attr "length" "4,4,6,2,2,6,4,4")
4879 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4880 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4883 (define_expand "movsi"
4884 [(set (match_operand:SI 0 "general_operand" "")
4885 (match_operand:SI 1 "general_operand" ""))]
4889 rtx base, offset, tmp;
4893 /* Everything except mem = const or mem = mem can be done easily. */
4894 if (GET_CODE (operands[0]) == MEM)
4895 operands[1] = force_reg (SImode, operands[1]);
4896 if (arm_general_register_operand (operands[0], SImode)
4897 && GET_CODE (operands[1]) == CONST_INT
4898 && !(const_ok_for_arm (INTVAL (operands[1]))
4899 || const_ok_for_arm (~INTVAL (operands[1]))))
4901 arm_split_constant (SET, SImode, NULL_RTX,
4902 INTVAL (operands[1]), operands[0], NULL_RTX,
4903 optimize && can_create_pseudo_p ());
4907 if (TARGET_USE_MOVT && !target_word_relocations
4908 && GET_CODE (operands[1]) == SYMBOL_REF
4909 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4911 arm_emit_movpair (operands[0], operands[1]);
4915 else /* TARGET_THUMB1... */
4917 if (can_create_pseudo_p ())
4919 if (GET_CODE (operands[0]) != REG)
4920 operands[1] = force_reg (SImode, operands[1]);
4924 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4926 split_const (operands[1], &base, &offset);
4927 if (GET_CODE (base) == SYMBOL_REF
4928 && !offset_within_block_p (base, INTVAL (offset)))
4930 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4931 emit_move_insn (tmp, base);
4932 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4937 /* Recognize the case where operand[1] is a reference to thread-local
4938 data and load its address to a register. */
4939 if (arm_tls_referenced_p (operands[1]))
4941 rtx tmp = operands[1];
4944 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4946 addend = XEXP (XEXP (tmp, 0), 1);
4947 tmp = XEXP (XEXP (tmp, 0), 0);
4950 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4951 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4953 tmp = legitimize_tls_address (tmp,
4954 !can_create_pseudo_p () ? operands[0] : 0);
4957 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4958 tmp = force_operand (tmp, operands[0]);
4963 && (CONSTANT_P (operands[1])
4964 || symbol_mentioned_p (operands[1])
4965 || label_mentioned_p (operands[1])))
4966 operands[1] = legitimize_pic_address (operands[1], SImode,
4967 (!can_create_pseudo_p ()
4974 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
4975 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
4976 ;; so this does not matter.
4977 (define_insn "*arm_movt"
4978 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4979 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
4980 (match_operand:SI 2 "general_operand" "i")))]
4982 "movt%?\t%0, #:upper16:%c2"
4983 [(set_attr "predicable" "yes")
4984 (set_attr "length" "4")]
4987 (define_insn "*arm_movw"
4988 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4989 (high:SI (match_operand:SI 1 "general_operand" "i")))]
4991 "movw%?\t%0, #:lower16:%c1"
4992 [(set_attr "predicable" "yes")
4993 (set_attr "length" "4")]
4996 (define_insn "*arm_movsi_insn"
4997 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4998 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4999 "TARGET_ARM && ! TARGET_IWMMXT
5000 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5001 && ( register_operand (operands[0], SImode)
5002 || register_operand (operands[1], SImode))"
5010 [(set_attr "type" "*,*,*,*,load1,store1")
5011 (set_attr "predicable" "yes")
5012 (set_attr "pool_range" "*,*,*,*,4096,*")
5013 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5017 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5018 (match_operand:SI 1 "const_int_operand" ""))]
5020 && (!(const_ok_for_arm (INTVAL (operands[1]))
5021 || const_ok_for_arm (~INTVAL (operands[1]))))"
5022 [(clobber (const_int 0))]
5024 arm_split_constant (SET, SImode, NULL_RTX,
5025 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5030 (define_insn "*thumb1_movsi_insn"
5031 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5032 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5034 && ( register_operand (operands[0], SImode)
5035 || register_operand (operands[1], SImode))"
5046 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5047 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5048 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5052 [(set (match_operand:SI 0 "register_operand" "")
5053 (match_operand:SI 1 "const_int_operand" ""))]
5054 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5055 [(set (match_dup 0) (match_dup 1))
5056 (set (match_dup 0) (neg:SI (match_dup 0)))]
5057 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5061 [(set (match_operand:SI 0 "register_operand" "")
5062 (match_operand:SI 1 "const_int_operand" ""))]
5063 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5064 [(set (match_dup 0) (match_dup 1))
5065 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5068 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
5069 unsigned HOST_WIDE_INT mask = 0xff;
5072 for (i = 0; i < 25; i++)
5073 if ((val & (mask << i)) == val)
5076 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5080 operands[1] = GEN_INT (val >> i);
5081 operands[2] = GEN_INT (i);
5085 ;; When generating pic, we need to load the symbol offset into a register.
5086 ;; So that the optimizer does not confuse this with a normal symbol load
5087 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5088 ;; since that is the only type of relocation we can use.
5090 ;; The rather odd constraints on the following are to force reload to leave
5091 ;; the insn alone, and to force the minipool generation pass to then move
5092 ;; the GOT symbol to memory.
5094 (define_insn "pic_load_addr_arm"
5095 [(set (match_operand:SI 0 "s_register_operand" "=r")
5096 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5097 "TARGET_ARM && flag_pic"
5099 [(set_attr "type" "load1")
5100 (set (attr "pool_range") (const_int 4096))
5101 (set (attr "neg_pool_range") (const_int 4084))]
5104 (define_insn "pic_load_addr_thumb1"
5105 [(set (match_operand:SI 0 "s_register_operand" "=l")
5106 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5107 "TARGET_THUMB1 && flag_pic"
5109 [(set_attr "type" "load1")
5110 (set (attr "pool_range") (const_int 1024))]
5113 (define_insn "pic_add_dot_plus_four"
5114 [(set (match_operand:SI 0 "register_operand" "=r")
5115 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5117 (match_operand 2 "" "")]
5121 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5122 INTVAL (operands[2]));
5123 return \"add\\t%0, %|pc\";
5125 [(set_attr "length" "2")]
5128 (define_insn "pic_add_dot_plus_eight"
5129 [(set (match_operand:SI 0 "register_operand" "=r")
5130 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5132 (match_operand 2 "" "")]
5136 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5137 INTVAL (operands[2]));
5138 return \"add%?\\t%0, %|pc, %1\";
5140 [(set_attr "predicable" "yes")]
5143 (define_insn "tls_load_dot_plus_eight"
5144 [(set (match_operand:SI 0 "register_operand" "+r")
5145 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5147 (match_operand 2 "" "")]
5151 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5152 INTVAL (operands[2]));
5153 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5155 [(set_attr "predicable" "yes")]
5158 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5159 ;; followed by a load. These sequences can be crunched down to
5160 ;; tls_load_dot_plus_eight by a peephole.
5163 [(set (match_operand:SI 0 "register_operand" "")
5164 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5166 (match_operand 1 "" "")]
5168 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5169 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5171 (mem:SI (unspec:SI [(match_dup 3)
5178 (define_insn "pic_offset_arm"
5179 [(set (match_operand:SI 0 "register_operand" "=r")
5180 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5181 (unspec:SI [(match_operand:SI 2 "" "X")]
5182 UNSPEC_PIC_OFFSET))))]
5183 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5184 "ldr%?\\t%0, [%1,%2]"
5185 [(set_attr "type" "load1")]
5188 (define_expand "builtin_setjmp_receiver"
5189 [(label_ref (match_operand 0 "" ""))]
5193 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5195 if (arm_pic_register != INVALID_REGNUM)
5196 arm_load_pic_register (1UL << 3);
5200 ;; If copying one reg to another we can set the condition codes according to
5201 ;; its value. Such a move is common after a return from subroutine and the
5202 ;; result is being tested against zero.
5204 (define_insn "*movsi_compare0"
5205 [(set (reg:CC CC_REGNUM)
5206 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5208 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5214 [(set_attr "conds" "set")]
5217 ;; Subroutine to store a half word from a register into memory.
5218 ;; Operand 0 is the source register (HImode)
5219 ;; Operand 1 is the destination address in a register (SImode)
5221 ;; In both this routine and the next, we must be careful not to spill
5222 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5223 ;; can generate unrecognizable rtl.
5225 (define_expand "storehi"
5226 [;; store the low byte
5227 (set (match_operand 1 "" "") (match_dup 3))
5228 ;; extract the high byte
5230 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5231 ;; store the high byte
5232 (set (match_dup 4) (match_dup 5))]
5236 rtx op1 = operands[1];
5237 rtx addr = XEXP (op1, 0);
5238 enum rtx_code code = GET_CODE (addr);
5240 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5242 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5244 operands[4] = adjust_address (op1, QImode, 1);
5245 operands[1] = adjust_address (operands[1], QImode, 0);
5246 operands[3] = gen_lowpart (QImode, operands[0]);
5247 operands[0] = gen_lowpart (SImode, operands[0]);
5248 operands[2] = gen_reg_rtx (SImode);
5249 operands[5] = gen_lowpart (QImode, operands[2]);
5253 (define_expand "storehi_bigend"
5254 [(set (match_dup 4) (match_dup 3))
5256 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5257 (set (match_operand 1 "" "") (match_dup 5))]
5261 rtx op1 = operands[1];
5262 rtx addr = XEXP (op1, 0);
5263 enum rtx_code code = GET_CODE (addr);
5265 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5267 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5269 operands[4] = adjust_address (op1, QImode, 1);
5270 operands[1] = adjust_address (operands[1], QImode, 0);
5271 operands[3] = gen_lowpart (QImode, operands[0]);
5272 operands[0] = gen_lowpart (SImode, operands[0]);
5273 operands[2] = gen_reg_rtx (SImode);
5274 operands[5] = gen_lowpart (QImode, operands[2]);
5278 ;; Subroutine to store a half word integer constant into memory.
5279 (define_expand "storeinthi"
5280 [(set (match_operand 0 "" "")
5281 (match_operand 1 "" ""))
5282 (set (match_dup 3) (match_dup 2))]
5286 HOST_WIDE_INT value = INTVAL (operands[1]);
5287 rtx addr = XEXP (operands[0], 0);
5288 rtx op0 = operands[0];
5289 enum rtx_code code = GET_CODE (addr);
5291 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5293 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5295 operands[1] = gen_reg_rtx (SImode);
5296 if (BYTES_BIG_ENDIAN)
5298 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5299 if ((value & 255) == ((value >> 8) & 255))
5300 operands[2] = operands[1];
5303 operands[2] = gen_reg_rtx (SImode);
5304 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5309 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5310 if ((value & 255) == ((value >> 8) & 255))
5311 operands[2] = operands[1];
5314 operands[2] = gen_reg_rtx (SImode);
5315 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5319 operands[3] = adjust_address (op0, QImode, 1);
5320 operands[0] = adjust_address (operands[0], QImode, 0);
5321 operands[2] = gen_lowpart (QImode, operands[2]);
5322 operands[1] = gen_lowpart (QImode, operands[1]);
5326 (define_expand "storehi_single_op"
5327 [(set (match_operand:HI 0 "memory_operand" "")
5328 (match_operand:HI 1 "general_operand" ""))]
5329 "TARGET_32BIT && arm_arch4"
5331 if (!s_register_operand (operands[1], HImode))
5332 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5336 (define_expand "movhi"
5337 [(set (match_operand:HI 0 "general_operand" "")
5338 (match_operand:HI 1 "general_operand" ""))]
5343 if (can_create_pseudo_p ())
5345 if (GET_CODE (operands[0]) == MEM)
5349 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5352 if (GET_CODE (operands[1]) == CONST_INT)
5353 emit_insn (gen_storeinthi (operands[0], operands[1]));
5356 if (GET_CODE (operands[1]) == MEM)
5357 operands[1] = force_reg (HImode, operands[1]);
5358 if (BYTES_BIG_ENDIAN)
5359 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5361 emit_insn (gen_storehi (operands[1], operands[0]));
5365 /* Sign extend a constant, and keep it in an SImode reg. */
5366 else if (GET_CODE (operands[1]) == CONST_INT)
5368 rtx reg = gen_reg_rtx (SImode);
5369 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5371 /* If the constant is already valid, leave it alone. */
5372 if (!const_ok_for_arm (val))
5374 /* If setting all the top bits will make the constant
5375 loadable in a single instruction, then set them.
5376 Otherwise, sign extend the number. */
5378 if (const_ok_for_arm (~(val | ~0xffff)))
5380 else if (val & 0x8000)
5384 emit_insn (gen_movsi (reg, GEN_INT (val)));
5385 operands[1] = gen_lowpart (HImode, reg);
5387 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5388 && GET_CODE (operands[1]) == MEM)
5390 rtx reg = gen_reg_rtx (SImode);
5392 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5393 operands[1] = gen_lowpart (HImode, reg);
5395 else if (!arm_arch4)
5397 if (GET_CODE (operands[1]) == MEM)
5400 rtx offset = const0_rtx;
5401 rtx reg = gen_reg_rtx (SImode);
5403 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5404 || (GET_CODE (base) == PLUS
5405 && (GET_CODE (offset = XEXP (base, 1))
5407 && ((INTVAL(offset) & 1) != 1)
5408 && GET_CODE (base = XEXP (base, 0)) == REG))
5409 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5413 new_rtx = widen_memory_access (operands[1], SImode,
5414 ((INTVAL (offset) & ~3)
5415 - INTVAL (offset)));
5416 emit_insn (gen_movsi (reg, new_rtx));
5417 if (((INTVAL (offset) & 2) != 0)
5418 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5420 rtx reg2 = gen_reg_rtx (SImode);
5422 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5427 emit_insn (gen_movhi_bytes (reg, operands[1]));
5429 operands[1] = gen_lowpart (HImode, reg);
5433 /* Handle loading a large integer during reload. */
5434 else if (GET_CODE (operands[1]) == CONST_INT
5435 && !const_ok_for_arm (INTVAL (operands[1]))
5436 && !const_ok_for_arm (~INTVAL (operands[1])))
5438 /* Writing a constant to memory needs a scratch, which should
5439 be handled with SECONDARY_RELOADs. */
5440 gcc_assert (GET_CODE (operands[0]) == REG);
5442 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5443 emit_insn (gen_movsi (operands[0], operands[1]));
5447 else if (TARGET_THUMB2)
5449 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5450 if (can_create_pseudo_p ())
5452 if (GET_CODE (operands[0]) != REG)
5453 operands[1] = force_reg (HImode, operands[1]);
5454 /* Zero extend a constant, and keep it in an SImode reg. */
5455 else if (GET_CODE (operands[1]) == CONST_INT)
5457 rtx reg = gen_reg_rtx (SImode);
5458 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5460 emit_insn (gen_movsi (reg, GEN_INT (val)));
5461 operands[1] = gen_lowpart (HImode, reg);
5465 else /* TARGET_THUMB1 */
5467 if (can_create_pseudo_p ())
5469 if (GET_CODE (operands[1]) == CONST_INT)
5471 rtx reg = gen_reg_rtx (SImode);
5473 emit_insn (gen_movsi (reg, operands[1]));
5474 operands[1] = gen_lowpart (HImode, reg);
5477 /* ??? We shouldn't really get invalid addresses here, but this can
5478 happen if we are passed a SP (never OK for HImode/QImode) or
5479 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5480 HImode/QImode) relative address. */
5481 /* ??? This should perhaps be fixed elsewhere, for instance, in
5482 fixup_stack_1, by checking for other kinds of invalid addresses,
5483 e.g. a bare reference to a virtual register. This may confuse the
5484 alpha though, which must handle this case differently. */
5485 if (GET_CODE (operands[0]) == MEM
5486 && !memory_address_p (GET_MODE (operands[0]),
5487 XEXP (operands[0], 0)))
5489 = replace_equiv_address (operands[0],
5490 copy_to_reg (XEXP (operands[0], 0)));
5492 if (GET_CODE (operands[1]) == MEM
5493 && !memory_address_p (GET_MODE (operands[1]),
5494 XEXP (operands[1], 0)))
5496 = replace_equiv_address (operands[1],
5497 copy_to_reg (XEXP (operands[1], 0)));
5499 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5501 rtx reg = gen_reg_rtx (SImode);
5503 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5504 operands[1] = gen_lowpart (HImode, reg);
5507 if (GET_CODE (operands[0]) == MEM)
5508 operands[1] = force_reg (HImode, operands[1]);
5510 else if (GET_CODE (operands[1]) == CONST_INT
5511 && !satisfies_constraint_I (operands[1]))
5513 /* Handle loading a large integer during reload. */
5515 /* Writing a constant to memory needs a scratch, which should
5516 be handled with SECONDARY_RELOADs. */
5517 gcc_assert (GET_CODE (operands[0]) == REG);
5519 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5520 emit_insn (gen_movsi (operands[0], operands[1]));
5527 (define_insn "*thumb1_movhi_insn"
5528 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5529 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5531 && ( register_operand (operands[0], HImode)
5532 || register_operand (operands[1], HImode))"
5534 switch (which_alternative)
5536 case 0: return \"add %0, %1, #0\";
5537 case 2: return \"strh %1, %0\";
5538 case 3: return \"mov %0, %1\";
5539 case 4: return \"mov %0, %1\";
5540 case 5: return \"mov %0, %1\";
5541 default: gcc_unreachable ();
5543 /* The stack pointer can end up being taken as an index register.
5544 Catch this case here and deal with it. */
5545 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5546 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5547 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5550 ops[0] = operands[0];
5551 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5553 output_asm_insn (\"mov %0, %1\", ops);
5555 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5558 return \"ldrh %0, %1\";
5560 [(set_attr "length" "2,4,2,2,2,2")
5561 (set_attr "type" "*,load1,store1,*,*,*")]
5565 (define_expand "movhi_bytes"
5566 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5568 (zero_extend:SI (match_dup 6)))
5569 (set (match_operand:SI 0 "" "")
5570 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5575 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5577 mem1 = change_address (operands[1], QImode, addr);
5578 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5579 operands[0] = gen_lowpart (SImode, operands[0]);
5581 operands[2] = gen_reg_rtx (SImode);
5582 operands[3] = gen_reg_rtx (SImode);
5585 if (BYTES_BIG_ENDIAN)
5587 operands[4] = operands[2];
5588 operands[5] = operands[3];
5592 operands[4] = operands[3];
5593 operands[5] = operands[2];
5598 (define_expand "movhi_bigend"
5600 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5603 (ashiftrt:SI (match_dup 2) (const_int 16)))
5604 (set (match_operand:HI 0 "s_register_operand" "")
5608 operands[2] = gen_reg_rtx (SImode);
5609 operands[3] = gen_reg_rtx (SImode);
5610 operands[4] = gen_lowpart (HImode, operands[3]);
5614 ;; Pattern to recognize insn generated default case above
5615 (define_insn "*movhi_insn_arch4"
5616 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5617 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5620 && (GET_CODE (operands[1]) != CONST_INT
5621 || const_ok_for_arm (INTVAL (operands[1]))
5622 || const_ok_for_arm (~INTVAL (operands[1])))"
5624 mov%?\\t%0, %1\\t%@ movhi
5625 mvn%?\\t%0, #%B1\\t%@ movhi
5626 str%(h%)\\t%1, %0\\t%@ movhi
5627 ldr%(h%)\\t%0, %1\\t%@ movhi"
5628 [(set_attr "type" "*,*,store1,load1")
5629 (set_attr "predicable" "yes")
5630 (set_attr "pool_range" "*,*,*,256")
5631 (set_attr "neg_pool_range" "*,*,*,244")]
5634 (define_insn "*movhi_bytes"
5635 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5636 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5639 mov%?\\t%0, %1\\t%@ movhi
5640 mvn%?\\t%0, #%B1\\t%@ movhi"
5641 [(set_attr "predicable" "yes")]
5644 (define_expand "thumb_movhi_clobber"
5645 [(set (match_operand:HI 0 "memory_operand" "")
5646 (match_operand:HI 1 "register_operand" ""))
5647 (clobber (match_operand:DI 2 "register_operand" ""))]
5650 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5651 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5653 emit_insn (gen_movhi (operands[0], operands[1]));
5656 /* XXX Fixme, need to handle other cases here as well. */
5661 ;; We use a DImode scratch because we may occasionally need an additional
5662 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5663 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5664 (define_expand "reload_outhi"
5665 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5666 (match_operand:HI 1 "s_register_operand" "r")
5667 (match_operand:DI 2 "s_register_operand" "=&l")])]
5670 arm_reload_out_hi (operands);
5672 thumb_reload_out_hi (operands);
5677 (define_expand "reload_inhi"
5678 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5679 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5680 (match_operand:DI 2 "s_register_operand" "=&r")])]
5684 arm_reload_in_hi (operands);
5686 thumb_reload_out_hi (operands);
5690 (define_expand "movqi"
5691 [(set (match_operand:QI 0 "general_operand" "")
5692 (match_operand:QI 1 "general_operand" ""))]
5695 /* Everything except mem = const or mem = mem can be done easily */
5697 if (can_create_pseudo_p ())
5699 if (GET_CODE (operands[1]) == CONST_INT)
5701 rtx reg = gen_reg_rtx (SImode);
5703 emit_insn (gen_movsi (reg, operands[1]));
5704 operands[1] = gen_lowpart (QImode, reg);
5709 /* ??? We shouldn't really get invalid addresses here, but this can
5710 happen if we are passed a SP (never OK for HImode/QImode) or
5711 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5712 HImode/QImode) relative address. */
5713 /* ??? This should perhaps be fixed elsewhere, for instance, in
5714 fixup_stack_1, by checking for other kinds of invalid addresses,
5715 e.g. a bare reference to a virtual register. This may confuse the
5716 alpha though, which must handle this case differently. */
5717 if (GET_CODE (operands[0]) == MEM
5718 && !memory_address_p (GET_MODE (operands[0]),
5719 XEXP (operands[0], 0)))
5721 = replace_equiv_address (operands[0],
5722 copy_to_reg (XEXP (operands[0], 0)));
5723 if (GET_CODE (operands[1]) == MEM
5724 && !memory_address_p (GET_MODE (operands[1]),
5725 XEXP (operands[1], 0)))
5727 = replace_equiv_address (operands[1],
5728 copy_to_reg (XEXP (operands[1], 0)));
5731 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5733 rtx reg = gen_reg_rtx (SImode);
5735 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5736 operands[1] = gen_lowpart (QImode, reg);
5739 if (GET_CODE (operands[0]) == MEM)
5740 operands[1] = force_reg (QImode, operands[1]);
5742 else if (TARGET_THUMB
5743 && GET_CODE (operands[1]) == CONST_INT
5744 && !satisfies_constraint_I (operands[1]))
5746 /* Handle loading a large integer during reload. */
5748 /* Writing a constant to memory needs a scratch, which should
5749 be handled with SECONDARY_RELOADs. */
5750 gcc_assert (GET_CODE (operands[0]) == REG);
5752 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5753 emit_insn (gen_movsi (operands[0], operands[1]));
5760 (define_insn "*arm_movqi_insn"
5761 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5762 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5764 && ( register_operand (operands[0], QImode)
5765 || register_operand (operands[1], QImode))"
5771 [(set_attr "type" "*,*,load1,store1")
5772 (set_attr "predicable" "yes")]
5775 (define_insn "*thumb1_movqi_insn"
5776 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5777 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5779 && ( register_operand (operands[0], QImode)
5780 || register_operand (operands[1], QImode))"
5788 [(set_attr "length" "2")
5789 (set_attr "type" "*,load1,store1,*,*,*")
5790 (set_attr "pool_range" "*,32,*,*,*,*")]
5793 (define_expand "movsf"
5794 [(set (match_operand:SF 0 "general_operand" "")
5795 (match_operand:SF 1 "general_operand" ""))]
5800 if (GET_CODE (operands[0]) == MEM)
5801 operands[1] = force_reg (SFmode, operands[1]);
5803 else /* TARGET_THUMB1 */
5805 if (can_create_pseudo_p ())
5807 if (GET_CODE (operands[0]) != REG)
5808 operands[1] = force_reg (SFmode, operands[1]);
5814 ;; Transform a floating-point move of a constant into a core register into
5815 ;; an SImode operation.
5817 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5818 (match_operand:SF 1 "immediate_operand" ""))]
5821 && GET_CODE (operands[1]) == CONST_DOUBLE"
5822 [(set (match_dup 2) (match_dup 3))]
5824 operands[2] = gen_lowpart (SImode, operands[0]);
5825 operands[3] = gen_lowpart (SImode, operands[1]);
5826 if (operands[2] == 0 || operands[3] == 0)
5831 (define_insn "*arm_movsf_soft_insn"
5832 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5833 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5835 && TARGET_SOFT_FLOAT
5836 && (GET_CODE (operands[0]) != MEM
5837 || register_operand (operands[1], SFmode))"
5840 ldr%?\\t%0, %1\\t%@ float
5841 str%?\\t%1, %0\\t%@ float"
5842 [(set_attr "length" "4,4,4")
5843 (set_attr "predicable" "yes")
5844 (set_attr "type" "*,load1,store1")
5845 (set_attr "pool_range" "*,4096,*")
5846 (set_attr "neg_pool_range" "*,4084,*")]
5849 ;;; ??? This should have alternatives for constants.
5850 (define_insn "*thumb1_movsf_insn"
5851 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5852 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5854 && ( register_operand (operands[0], SFmode)
5855 || register_operand (operands[1], SFmode))"
5864 [(set_attr "length" "2")
5865 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5866 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5869 (define_expand "movdf"
5870 [(set (match_operand:DF 0 "general_operand" "")
5871 (match_operand:DF 1 "general_operand" ""))]
5876 if (GET_CODE (operands[0]) == MEM)
5877 operands[1] = force_reg (DFmode, operands[1]);
5879 else /* TARGET_THUMB */
5881 if (can_create_pseudo_p ())
5883 if (GET_CODE (operands[0]) != REG)
5884 operands[1] = force_reg (DFmode, operands[1]);
5890 ;; Reloading a df mode value stored in integer regs to memory can require a
5892 (define_expand "reload_outdf"
5893 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5894 (match_operand:DF 1 "s_register_operand" "r")
5895 (match_operand:SI 2 "s_register_operand" "=&r")]
5899 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5902 operands[2] = XEXP (operands[0], 0);
5903 else if (code == POST_INC || code == PRE_DEC)
5905 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5906 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5907 emit_insn (gen_movdi (operands[0], operands[1]));
5910 else if (code == PRE_INC)
5912 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5914 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5917 else if (code == POST_DEC)
5918 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5920 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5921 XEXP (XEXP (operands[0], 0), 1)));
5923 emit_insn (gen_rtx_SET (VOIDmode,
5924 replace_equiv_address (operands[0], operands[2]),
5927 if (code == POST_DEC)
5928 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5934 (define_insn "*movdf_soft_insn"
5935 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5936 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5937 "TARGET_ARM && TARGET_SOFT_FLOAT
5938 && ( register_operand (operands[0], DFmode)
5939 || register_operand (operands[1], DFmode))"
5941 switch (which_alternative)
5948 return output_move_double (operands);
5951 [(set_attr "length" "8,12,16,8,8")
5952 (set_attr "type" "*,*,*,load2,store2")
5953 (set_attr "pool_range" "1020")
5954 (set_attr "neg_pool_range" "1008")]
5957 ;;; ??? This should have alternatives for constants.
5958 ;;; ??? This was originally identical to the movdi_insn pattern.
5959 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5960 ;;; thumb_reorg with a memory reference.
5961 (define_insn "*thumb_movdf_insn"
5962 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5963 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5965 && ( register_operand (operands[0], DFmode)
5966 || register_operand (operands[1], DFmode))"
5968 switch (which_alternative)
5972 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5973 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5974 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5976 return \"ldmia\\t%1, {%0, %H0}\";
5978 return \"stmia\\t%0, {%1, %H1}\";
5980 return thumb_load_double_from_address (operands);
5982 operands[2] = gen_rtx_MEM (SImode,
5983 plus_constant (XEXP (operands[0], 0), 4));
5984 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5987 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5988 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5989 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5992 [(set_attr "length" "4,2,2,6,4,4")
5993 (set_attr "type" "*,load2,store2,load2,store2,*")
5994 (set_attr "pool_range" "*,*,*,1020,*,*")]
5997 (define_expand "movxf"
5998 [(set (match_operand:XF 0 "general_operand" "")
5999 (match_operand:XF 1 "general_operand" ""))]
6000 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6002 if (GET_CODE (operands[0]) == MEM)
6003 operands[1] = force_reg (XFmode, operands[1]);
6009 ;; load- and store-multiple insns
6010 ;; The arm can load/store any set of registers, provided that they are in
6011 ;; ascending order; but that is beyond GCC so stick with what it knows.
6013 (define_expand "load_multiple"
6014 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6015 (match_operand:SI 1 "" ""))
6016 (use (match_operand:SI 2 "" ""))])]
6019 HOST_WIDE_INT offset = 0;
6021 /* Support only fixed point registers. */
6022 if (GET_CODE (operands[2]) != CONST_INT
6023 || INTVAL (operands[2]) > 14
6024 || INTVAL (operands[2]) < 2
6025 || GET_CODE (operands[1]) != MEM
6026 || GET_CODE (operands[0]) != REG
6027 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6028 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6032 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6033 force_reg (SImode, XEXP (operands[1], 0)),
6034 TRUE, FALSE, operands[1], &offset);
6037 ;; Load multiple with write-back
6039 (define_insn "*ldmsi_postinc4"
6040 [(match_parallel 0 "load_multiple_operation"
6041 [(set (match_operand:SI 1 "s_register_operand" "=r")
6042 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6044 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6045 (mem:SI (match_dup 2)))
6046 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6047 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6048 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6049 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6050 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6051 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6052 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6053 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6054 [(set_attr "type" "load4")
6055 (set_attr "predicable" "yes")]
6058 (define_insn "*ldmsi_postinc4_thumb1"
6059 [(match_parallel 0 "load_multiple_operation"
6060 [(set (match_operand:SI 1 "s_register_operand" "=l")
6061 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6063 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6064 (mem:SI (match_dup 2)))
6065 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6066 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6067 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6068 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6069 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6070 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6071 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6072 "ldmia\\t%1!, {%3, %4, %5, %6}"
6073 [(set_attr "type" "load4")]
6076 (define_insn "*ldmsi_postinc3"
6077 [(match_parallel 0 "load_multiple_operation"
6078 [(set (match_operand:SI 1 "s_register_operand" "=r")
6079 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6081 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6082 (mem:SI (match_dup 2)))
6083 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6084 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6085 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6086 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6087 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6088 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6089 [(set_attr "type" "load3")
6090 (set_attr "predicable" "yes")]
6093 (define_insn "*ldmsi_postinc2"
6094 [(match_parallel 0 "load_multiple_operation"
6095 [(set (match_operand:SI 1 "s_register_operand" "=r")
6096 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6098 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6099 (mem:SI (match_dup 2)))
6100 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6101 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6102 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6103 "ldm%(ia%)\\t%1!, {%3, %4}"
6104 [(set_attr "type" "load2")
6105 (set_attr "predicable" "yes")]
6108 ;; Ordinary load multiple
6110 (define_insn "*ldmsi4"
6111 [(match_parallel 0 "load_multiple_operation"
6112 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6113 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6114 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6115 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6116 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6117 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6118 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6119 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6120 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6121 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6122 [(set_attr "type" "load4")
6123 (set_attr "predicable" "yes")]
6126 (define_insn "*ldmsi3"
6127 [(match_parallel 0 "load_multiple_operation"
6128 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6129 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6130 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6131 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6132 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6133 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6134 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6135 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6136 [(set_attr "type" "load3")
6137 (set_attr "predicable" "yes")]
6140 (define_insn "*ldmsi2"
6141 [(match_parallel 0 "load_multiple_operation"
6142 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6143 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6144 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6145 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6146 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6147 "ldm%(ia%)\\t%1, {%2, %3}"
6148 [(set_attr "type" "load2")
6149 (set_attr "predicable" "yes")]
6152 (define_expand "store_multiple"
6153 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6154 (match_operand:SI 1 "" ""))
6155 (use (match_operand:SI 2 "" ""))])]
6158 HOST_WIDE_INT offset = 0;
6160 /* Support only fixed point registers. */
6161 if (GET_CODE (operands[2]) != CONST_INT
6162 || INTVAL (operands[2]) > 14
6163 || INTVAL (operands[2]) < 2
6164 || GET_CODE (operands[1]) != REG
6165 || GET_CODE (operands[0]) != MEM
6166 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6167 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6171 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6172 force_reg (SImode, XEXP (operands[0], 0)),
6173 TRUE, FALSE, operands[0], &offset);
6176 ;; Store multiple with write-back
6178 (define_insn "*stmsi_postinc4"
6179 [(match_parallel 0 "store_multiple_operation"
6180 [(set (match_operand:SI 1 "s_register_operand" "=r")
6181 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6183 (set (mem:SI (match_dup 2))
6184 (match_operand:SI 3 "arm_hard_register_operand" ""))
6185 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6186 (match_operand:SI 4 "arm_hard_register_operand" ""))
6187 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6188 (match_operand:SI 5 "arm_hard_register_operand" ""))
6189 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6190 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6191 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6192 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6193 [(set_attr "predicable" "yes")
6194 (set_attr "type" "store4")]
6197 (define_insn "*stmsi_postinc4_thumb1"
6198 [(match_parallel 0 "store_multiple_operation"
6199 [(set (match_operand:SI 1 "s_register_operand" "=l")
6200 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6202 (set (mem:SI (match_dup 2))
6203 (match_operand:SI 3 "arm_hard_register_operand" ""))
6204 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6205 (match_operand:SI 4 "arm_hard_register_operand" ""))
6206 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6207 (match_operand:SI 5 "arm_hard_register_operand" ""))
6208 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6209 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6210 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6211 "stmia\\t%1!, {%3, %4, %5, %6}"
6212 [(set_attr "type" "store4")]
6215 (define_insn "*stmsi_postinc3"
6216 [(match_parallel 0 "store_multiple_operation"
6217 [(set (match_operand:SI 1 "s_register_operand" "=r")
6218 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6220 (set (mem:SI (match_dup 2))
6221 (match_operand:SI 3 "arm_hard_register_operand" ""))
6222 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6223 (match_operand:SI 4 "arm_hard_register_operand" ""))
6224 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6225 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6226 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6227 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6228 [(set_attr "predicable" "yes")
6229 (set_attr "type" "store3")]
6232 (define_insn "*stmsi_postinc2"
6233 [(match_parallel 0 "store_multiple_operation"
6234 [(set (match_operand:SI 1 "s_register_operand" "=r")
6235 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6237 (set (mem:SI (match_dup 2))
6238 (match_operand:SI 3 "arm_hard_register_operand" ""))
6239 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6240 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6241 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6242 "stm%(ia%)\\t%1!, {%3, %4}"
6243 [(set_attr "predicable" "yes")
6244 (set_attr "type" "store2")]
6247 ;; Ordinary store multiple
6249 (define_insn "*stmsi4"
6250 [(match_parallel 0 "store_multiple_operation"
6251 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6252 (match_operand:SI 2 "arm_hard_register_operand" ""))
6253 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6254 (match_operand:SI 3 "arm_hard_register_operand" ""))
6255 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6256 (match_operand:SI 4 "arm_hard_register_operand" ""))
6257 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6258 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6259 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6260 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6261 [(set_attr "predicable" "yes")
6262 (set_attr "type" "store4")]
6265 (define_insn "*stmsi3"
6266 [(match_parallel 0 "store_multiple_operation"
6267 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6268 (match_operand:SI 2 "arm_hard_register_operand" ""))
6269 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6270 (match_operand:SI 3 "arm_hard_register_operand" ""))
6271 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6272 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6273 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6274 "stm%(ia%)\\t%1, {%2, %3, %4}"
6275 [(set_attr "predicable" "yes")
6276 (set_attr "type" "store3")]
6279 (define_insn "*stmsi2"
6280 [(match_parallel 0 "store_multiple_operation"
6281 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6282 (match_operand:SI 2 "arm_hard_register_operand" ""))
6283 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6284 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6285 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6286 "stm%(ia%)\\t%1, {%2, %3}"
6287 [(set_attr "predicable" "yes")
6288 (set_attr "type" "store2")]
6291 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6292 ;; We could let this apply for blocks of less than this, but it clobbers so
6293 ;; many registers that there is then probably a better way.
6295 (define_expand "movmemqi"
6296 [(match_operand:BLK 0 "general_operand" "")
6297 (match_operand:BLK 1 "general_operand" "")
6298 (match_operand:SI 2 "const_int_operand" "")
6299 (match_operand:SI 3 "const_int_operand" "")]
6304 if (arm_gen_movmemqi (operands))
6308 else /* TARGET_THUMB1 */
6310 if ( INTVAL (operands[3]) != 4
6311 || INTVAL (operands[2]) > 48)
6314 thumb_expand_movmemqi (operands);
6320 ;; Thumb block-move insns
6322 (define_insn "movmem12b"
6323 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6324 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6325 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6326 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6327 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6328 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6329 (set (match_operand:SI 0 "register_operand" "=l")
6330 (plus:SI (match_dup 2) (const_int 12)))
6331 (set (match_operand:SI 1 "register_operand" "=l")
6332 (plus:SI (match_dup 3) (const_int 12)))
6333 (clobber (match_scratch:SI 4 "=&l"))
6334 (clobber (match_scratch:SI 5 "=&l"))
6335 (clobber (match_scratch:SI 6 "=&l"))]
6337 "* return thumb_output_move_mem_multiple (3, operands);"
6338 [(set_attr "length" "4")
6339 ; This isn't entirely accurate... It loads as well, but in terms of
6340 ; scheduling the following insn it is better to consider it as a store
6341 (set_attr "type" "store3")]
6344 (define_insn "movmem8b"
6345 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6346 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6347 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6348 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6349 (set (match_operand:SI 0 "register_operand" "=l")
6350 (plus:SI (match_dup 2) (const_int 8)))
6351 (set (match_operand:SI 1 "register_operand" "=l")
6352 (plus:SI (match_dup 3) (const_int 8)))
6353 (clobber (match_scratch:SI 4 "=&l"))
6354 (clobber (match_scratch:SI 5 "=&l"))]
6356 "* return thumb_output_move_mem_multiple (2, operands);"
6357 [(set_attr "length" "4")
6358 ; This isn't entirely accurate... It loads as well, but in terms of
6359 ; scheduling the following insn it is better to consider it as a store
6360 (set_attr "type" "store2")]
6365 ;; Compare & branch insns
6366 ;; The range calculations are based as follows:
6367 ;; For forward branches, the address calculation returns the address of
6368 ;; the next instruction. This is 2 beyond the branch instruction.
6369 ;; For backward branches, the address calculation returns the address of
6370 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6371 ;; instruction for the shortest sequence, and 4 before the branch instruction
6372 ;; if we have to jump around an unconditional branch.
6373 ;; To the basic branch range the PC offset must be added (this is +4).
6374 ;; So for forward branches we have
6375 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6376 ;; And for backward branches we have
6377 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6379 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6380 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6382 (define_expand "cbranchsi4"
6383 [(set (pc) (if_then_else
6384 (match_operator 0 "arm_comparison_operator"
6385 [(match_operand:SI 1 "s_register_operand" "")
6386 (match_operand:SI 2 "nonmemory_operand" "")])
6387 (label_ref (match_operand 3 "" ""))
6391 if (thumb1_cmpneg_operand (operands[2], SImode))
6393 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6394 operands[3], operands[0]));
6397 if (!thumb1_cmp_operand (operands[2], SImode))
6398 operands[2] = force_reg (SImode, operands[2]);
6401 (define_insn "*cbranchsi4_insn"
6402 [(set (pc) (if_then_else
6403 (match_operator 0 "arm_comparison_operator"
6404 [(match_operand:SI 1 "s_register_operand" "l,*h")
6405 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6406 (label_ref (match_operand 3 "" ""))
6410 output_asm_insn (\"cmp\\t%1, %2\", operands);
6412 switch (get_attr_length (insn))
6414 case 4: return \"b%d0\\t%l3\";
6415 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6416 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6419 [(set (attr "far_jump")
6421 (eq_attr "length" "8")
6422 (const_string "yes")
6423 (const_string "no")))
6424 (set (attr "length")
6426 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6427 (le (minus (match_dup 3) (pc)) (const_int 256)))
6430 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6431 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6436 (define_insn "cbranchsi4_scratch"
6437 [(set (pc) (if_then_else
6438 (match_operator 4 "arm_comparison_operator"
6439 [(match_operand:SI 1 "s_register_operand" "l,0")
6440 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6441 (label_ref (match_operand 3 "" ""))
6443 (clobber (match_scratch:SI 0 "=l,l"))]
6446 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6448 switch (get_attr_length (insn))
6450 case 4: return \"b%d4\\t%l3\";
6451 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6452 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6455 [(set (attr "far_jump")
6457 (eq_attr "length" "8")
6458 (const_string "yes")
6459 (const_string "no")))
6460 (set (attr "length")
6462 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6463 (le (minus (match_dup 3) (pc)) (const_int 256)))
6466 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6467 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6471 (define_insn "*movsi_cbranchsi4"
6474 (match_operator 3 "arm_comparison_operator"
6475 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6477 (label_ref (match_operand 2 "" ""))
6479 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6483 if (which_alternative == 0)
6484 output_asm_insn (\"cmp\t%0, #0\", operands);
6485 else if (which_alternative == 1)
6486 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6489 output_asm_insn (\"cmp\t%1, #0\", operands);
6490 if (which_alternative == 2)
6491 output_asm_insn (\"mov\t%0, %1\", operands);
6493 output_asm_insn (\"str\t%1, %0\", operands);
6495 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6497 case 4: return \"b%d3\\t%l2\";
6498 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6499 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6502 [(set (attr "far_jump")
6504 (ior (and (gt (symbol_ref ("which_alternative"))
6506 (eq_attr "length" "8"))
6507 (eq_attr "length" "10"))
6508 (const_string "yes")
6509 (const_string "no")))
6510 (set (attr "length")
6512 (le (symbol_ref ("which_alternative"))
6515 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6516 (le (minus (match_dup 2) (pc)) (const_int 256)))
6519 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6520 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6524 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6525 (le (minus (match_dup 2) (pc)) (const_int 256)))
6528 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6529 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6534 (define_insn "*negated_cbranchsi4"
6537 (match_operator 0 "equality_operator"
6538 [(match_operand:SI 1 "s_register_operand" "l")
6539 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6540 (label_ref (match_operand 3 "" ""))
6544 output_asm_insn (\"cmn\\t%1, %2\", operands);
6545 switch (get_attr_length (insn))
6547 case 4: return \"b%d0\\t%l3\";
6548 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6549 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6552 [(set (attr "far_jump")
6554 (eq_attr "length" "8")
6555 (const_string "yes")
6556 (const_string "no")))
6557 (set (attr "length")
6559 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6560 (le (minus (match_dup 3) (pc)) (const_int 256)))
6563 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6564 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6569 (define_insn "*tbit_cbranch"
6572 (match_operator 0 "equality_operator"
6573 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6575 (match_operand:SI 2 "const_int_operand" "i"))
6577 (label_ref (match_operand 3 "" ""))
6579 (clobber (match_scratch:SI 4 "=l"))]
6584 op[0] = operands[4];
6585 op[1] = operands[1];
6586 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6588 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6589 switch (get_attr_length (insn))
6591 case 4: return \"b%d0\\t%l3\";
6592 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6593 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6596 [(set (attr "far_jump")
6598 (eq_attr "length" "8")
6599 (const_string "yes")
6600 (const_string "no")))
6601 (set (attr "length")
6603 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6604 (le (minus (match_dup 3) (pc)) (const_int 256)))
6607 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6608 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6613 (define_insn "*tlobits_cbranch"
6616 (match_operator 0 "equality_operator"
6617 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6618 (match_operand:SI 2 "const_int_operand" "i")
6621 (label_ref (match_operand 3 "" ""))
6623 (clobber (match_scratch:SI 4 "=l"))]
6628 op[0] = operands[4];
6629 op[1] = operands[1];
6630 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6632 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6633 switch (get_attr_length (insn))
6635 case 4: return \"b%d0\\t%l3\";
6636 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6637 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6640 [(set (attr "far_jump")
6642 (eq_attr "length" "8")
6643 (const_string "yes")
6644 (const_string "no")))
6645 (set (attr "length")
6647 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6648 (le (minus (match_dup 3) (pc)) (const_int 256)))
6651 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6652 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6657 (define_insn "*tstsi3_cbranch"
6660 (match_operator 3 "equality_operator"
6661 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6662 (match_operand:SI 1 "s_register_operand" "l"))
6664 (label_ref (match_operand 2 "" ""))
6669 output_asm_insn (\"tst\\t%0, %1\", operands);
6670 switch (get_attr_length (insn))
6672 case 4: return \"b%d3\\t%l2\";
6673 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6674 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6677 [(set (attr "far_jump")
6679 (eq_attr "length" "8")
6680 (const_string "yes")
6681 (const_string "no")))
6682 (set (attr "length")
6684 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6685 (le (minus (match_dup 2) (pc)) (const_int 256)))
6688 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6689 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6694 (define_insn "*andsi3_cbranch"
6697 (match_operator 5 "equality_operator"
6698 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6699 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6701 (label_ref (match_operand 4 "" ""))
6703 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6704 (and:SI (match_dup 2) (match_dup 3)))
6705 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6709 if (which_alternative == 0)
6710 output_asm_insn (\"and\\t%0, %3\", operands);
6711 else if (which_alternative == 1)
6713 output_asm_insn (\"and\\t%1, %3\", operands);
6714 output_asm_insn (\"mov\\t%0, %1\", operands);
6718 output_asm_insn (\"and\\t%1, %3\", operands);
6719 output_asm_insn (\"str\\t%1, %0\", operands);
6722 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6724 case 4: return \"b%d5\\t%l4\";
6725 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6726 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6729 [(set (attr "far_jump")
6731 (ior (and (eq (symbol_ref ("which_alternative"))
6733 (eq_attr "length" "8"))
6734 (eq_attr "length" "10"))
6735 (const_string "yes")
6736 (const_string "no")))
6737 (set (attr "length")
6739 (eq (symbol_ref ("which_alternative"))
6742 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6743 (le (minus (match_dup 4) (pc)) (const_int 256)))
6746 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6747 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6751 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6752 (le (minus (match_dup 4) (pc)) (const_int 256)))
6755 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6756 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6761 (define_insn "*orrsi3_cbranch_scratch"
6764 (match_operator 4 "equality_operator"
6765 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6766 (match_operand:SI 2 "s_register_operand" "l"))
6768 (label_ref (match_operand 3 "" ""))
6770 (clobber (match_scratch:SI 0 "=l"))]
6774 output_asm_insn (\"orr\\t%0, %2\", operands);
6775 switch (get_attr_length (insn))
6777 case 4: return \"b%d4\\t%l3\";
6778 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6779 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6782 [(set (attr "far_jump")
6784 (eq_attr "length" "8")
6785 (const_string "yes")
6786 (const_string "no")))
6787 (set (attr "length")
6789 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6790 (le (minus (match_dup 3) (pc)) (const_int 256)))
6793 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6794 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6799 (define_insn "*orrsi3_cbranch"
6802 (match_operator 5 "equality_operator"
6803 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6804 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6806 (label_ref (match_operand 4 "" ""))
6808 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6809 (ior:SI (match_dup 2) (match_dup 3)))
6810 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6814 if (which_alternative == 0)
6815 output_asm_insn (\"orr\\t%0, %3\", operands);
6816 else if (which_alternative == 1)
6818 output_asm_insn (\"orr\\t%1, %3\", operands);
6819 output_asm_insn (\"mov\\t%0, %1\", operands);
6823 output_asm_insn (\"orr\\t%1, %3\", operands);
6824 output_asm_insn (\"str\\t%1, %0\", operands);
6827 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6829 case 4: return \"b%d5\\t%l4\";
6830 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6831 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6834 [(set (attr "far_jump")
6836 (ior (and (eq (symbol_ref ("which_alternative"))
6838 (eq_attr "length" "8"))
6839 (eq_attr "length" "10"))
6840 (const_string "yes")
6841 (const_string "no")))
6842 (set (attr "length")
6844 (eq (symbol_ref ("which_alternative"))
6847 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6848 (le (minus (match_dup 4) (pc)) (const_int 256)))
6851 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6852 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6856 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6857 (le (minus (match_dup 4) (pc)) (const_int 256)))
6860 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6861 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6866 (define_insn "*xorsi3_cbranch_scratch"
6869 (match_operator 4 "equality_operator"
6870 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6871 (match_operand:SI 2 "s_register_operand" "l"))
6873 (label_ref (match_operand 3 "" ""))
6875 (clobber (match_scratch:SI 0 "=l"))]
6879 output_asm_insn (\"eor\\t%0, %2\", operands);
6880 switch (get_attr_length (insn))
6882 case 4: return \"b%d4\\t%l3\";
6883 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6884 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6887 [(set (attr "far_jump")
6889 (eq_attr "length" "8")
6890 (const_string "yes")
6891 (const_string "no")))
6892 (set (attr "length")
6894 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6895 (le (minus (match_dup 3) (pc)) (const_int 256)))
6898 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6899 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6904 (define_insn "*xorsi3_cbranch"
6907 (match_operator 5 "equality_operator"
6908 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6909 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6911 (label_ref (match_operand 4 "" ""))
6913 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6914 (xor:SI (match_dup 2) (match_dup 3)))
6915 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6919 if (which_alternative == 0)
6920 output_asm_insn (\"eor\\t%0, %3\", operands);
6921 else if (which_alternative == 1)
6923 output_asm_insn (\"eor\\t%1, %3\", operands);
6924 output_asm_insn (\"mov\\t%0, %1\", operands);
6928 output_asm_insn (\"eor\\t%1, %3\", operands);
6929 output_asm_insn (\"str\\t%1, %0\", operands);
6932 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6934 case 4: return \"b%d5\\t%l4\";
6935 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6936 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6939 [(set (attr "far_jump")
6941 (ior (and (eq (symbol_ref ("which_alternative"))
6943 (eq_attr "length" "8"))
6944 (eq_attr "length" "10"))
6945 (const_string "yes")
6946 (const_string "no")))
6947 (set (attr "length")
6949 (eq (symbol_ref ("which_alternative"))
6952 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6953 (le (minus (match_dup 4) (pc)) (const_int 256)))
6956 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6957 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6961 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6962 (le (minus (match_dup 4) (pc)) (const_int 256)))
6965 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6966 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6971 (define_insn "*bicsi3_cbranch_scratch"
6974 (match_operator 4 "equality_operator"
6975 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6976 (match_operand:SI 1 "s_register_operand" "0"))
6978 (label_ref (match_operand 3 "" ""))
6980 (clobber (match_scratch:SI 0 "=l"))]
6984 output_asm_insn (\"bic\\t%0, %2\", operands);
6985 switch (get_attr_length (insn))
6987 case 4: return \"b%d4\\t%l3\";
6988 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6989 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6992 [(set (attr "far_jump")
6994 (eq_attr "length" "8")
6995 (const_string "yes")
6996 (const_string "no")))
6997 (set (attr "length")
6999 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7000 (le (minus (match_dup 3) (pc)) (const_int 256)))
7003 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7004 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7009 (define_insn "*bicsi3_cbranch"
7012 (match_operator 5 "equality_operator"
7013 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7014 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7016 (label_ref (match_operand 4 "" ""))
7018 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7019 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7020 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7024 if (which_alternative == 0)
7025 output_asm_insn (\"bic\\t%0, %3\", operands);
7026 else if (which_alternative <= 2)
7028 output_asm_insn (\"bic\\t%1, %3\", operands);
7029 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7030 conditions again, since we're only testing for equality. */
7031 output_asm_insn (\"mov\\t%0, %1\", operands);
7035 output_asm_insn (\"bic\\t%1, %3\", operands);
7036 output_asm_insn (\"str\\t%1, %0\", operands);
7039 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7041 case 4: return \"b%d5\\t%l4\";
7042 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7043 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7046 [(set (attr "far_jump")
7048 (ior (and (eq (symbol_ref ("which_alternative"))
7050 (eq_attr "length" "8"))
7051 (eq_attr "length" "10"))
7052 (const_string "yes")
7053 (const_string "no")))
7054 (set (attr "length")
7056 (eq (symbol_ref ("which_alternative"))
7059 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7060 (le (minus (match_dup 4) (pc)) (const_int 256)))
7063 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7064 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7068 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7069 (le (minus (match_dup 4) (pc)) (const_int 256)))
7072 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7073 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7078 (define_insn "*cbranchne_decr1"
7080 (if_then_else (match_operator 3 "equality_operator"
7081 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7083 (label_ref (match_operand 4 "" ""))
7085 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7086 (plus:SI (match_dup 2) (const_int -1)))
7087 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7092 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7094 VOIDmode, operands[2], const1_rtx);
7095 cond[1] = operands[4];
7097 if (which_alternative == 0)
7098 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7099 else if (which_alternative == 1)
7101 /* We must provide an alternative for a hi reg because reload
7102 cannot handle output reloads on a jump instruction, but we
7103 can't subtract into that. Fortunately a mov from lo to hi
7104 does not clobber the condition codes. */
7105 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7106 output_asm_insn (\"mov\\t%0, %1\", operands);
7110 /* Similarly, but the target is memory. */
7111 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7112 output_asm_insn (\"str\\t%1, %0\", operands);
7115 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7118 output_asm_insn (\"b%d0\\t%l1\", cond);
7121 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7122 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7124 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7125 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7129 [(set (attr "far_jump")
7131 (ior (and (eq (symbol_ref ("which_alternative"))
7133 (eq_attr "length" "8"))
7134 (eq_attr "length" "10"))
7135 (const_string "yes")
7136 (const_string "no")))
7137 (set_attr_alternative "length"
7141 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7142 (le (minus (match_dup 4) (pc)) (const_int 256)))
7145 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7146 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7151 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7152 (le (minus (match_dup 4) (pc)) (const_int 256)))
7155 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7156 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7161 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7162 (le (minus (match_dup 4) (pc)) (const_int 256)))
7165 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7166 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7171 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7172 (le (minus (match_dup 4) (pc)) (const_int 256)))
7175 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7176 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7181 (define_insn "*addsi3_cbranch"
7184 (match_operator 4 "comparison_operator"
7186 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7187 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7189 (label_ref (match_operand 5 "" ""))
7192 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7193 (plus:SI (match_dup 2) (match_dup 3)))
7194 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7196 && (GET_CODE (operands[4]) == EQ
7197 || GET_CODE (operands[4]) == NE
7198 || GET_CODE (operands[4]) == GE
7199 || GET_CODE (operands[4]) == LT)"
7205 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7206 cond[1] = operands[2];
7207 cond[2] = operands[3];
7209 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7210 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7212 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7214 if (which_alternative >= 3
7215 && which_alternative < 4)
7216 output_asm_insn (\"mov\\t%0, %1\", operands);
7217 else if (which_alternative >= 4)
7218 output_asm_insn (\"str\\t%1, %0\", operands);
7220 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7223 return \"b%d4\\t%l5\";
7225 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7227 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7231 [(set (attr "far_jump")
7233 (ior (and (lt (symbol_ref ("which_alternative"))
7235 (eq_attr "length" "8"))
7236 (eq_attr "length" "10"))
7237 (const_string "yes")
7238 (const_string "no")))
7239 (set (attr "length")
7241 (lt (symbol_ref ("which_alternative"))
7244 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7245 (le (minus (match_dup 5) (pc)) (const_int 256)))
7248 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7249 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7253 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7254 (le (minus (match_dup 5) (pc)) (const_int 256)))
7257 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7258 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7263 (define_insn "*addsi3_cbranch_scratch"
7266 (match_operator 3 "comparison_operator"
7268 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7269 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7271 (label_ref (match_operand 4 "" ""))
7273 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7275 && (GET_CODE (operands[3]) == EQ
7276 || GET_CODE (operands[3]) == NE
7277 || GET_CODE (operands[3]) == GE
7278 || GET_CODE (operands[3]) == LT)"
7281 switch (which_alternative)
7284 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7287 output_asm_insn (\"cmn\t%1, %2\", operands);
7290 if (INTVAL (operands[2]) < 0)
7291 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7293 output_asm_insn (\"add\t%0, %1, %2\", operands);
7296 if (INTVAL (operands[2]) < 0)
7297 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7299 output_asm_insn (\"add\t%0, %0, %2\", operands);
7303 switch (get_attr_length (insn))
7306 return \"b%d3\\t%l4\";
7308 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7310 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7314 [(set (attr "far_jump")
7316 (eq_attr "length" "8")
7317 (const_string "yes")
7318 (const_string "no")))
7319 (set (attr "length")
7321 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7322 (le (minus (match_dup 4) (pc)) (const_int 256)))
7325 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7326 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7331 (define_insn "*subsi3_cbranch"
7334 (match_operator 4 "comparison_operator"
7336 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7337 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7339 (label_ref (match_operand 5 "" ""))
7341 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7342 (minus:SI (match_dup 2) (match_dup 3)))
7343 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7345 && (GET_CODE (operands[4]) == EQ
7346 || GET_CODE (operands[4]) == NE
7347 || GET_CODE (operands[4]) == GE
7348 || GET_CODE (operands[4]) == LT)"
7351 if (which_alternative == 0)
7352 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7353 else if (which_alternative == 1)
7355 /* We must provide an alternative for a hi reg because reload
7356 cannot handle output reloads on a jump instruction, but we
7357 can't subtract into that. Fortunately a mov from lo to hi
7358 does not clobber the condition codes. */
7359 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7360 output_asm_insn (\"mov\\t%0, %1\", operands);
7364 /* Similarly, but the target is memory. */
7365 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7366 output_asm_insn (\"str\\t%1, %0\", operands);
7369 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7372 return \"b%d4\\t%l5\";
7374 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7376 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7380 [(set (attr "far_jump")
7382 (ior (and (eq (symbol_ref ("which_alternative"))
7384 (eq_attr "length" "8"))
7385 (eq_attr "length" "10"))
7386 (const_string "yes")
7387 (const_string "no")))
7388 (set (attr "length")
7390 (eq (symbol_ref ("which_alternative"))
7393 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7394 (le (minus (match_dup 5) (pc)) (const_int 256)))
7397 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7398 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7402 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7403 (le (minus (match_dup 5) (pc)) (const_int 256)))
7406 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7407 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7412 (define_insn "*subsi3_cbranch_scratch"
7415 (match_operator 0 "arm_comparison_operator"
7416 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7417 (match_operand:SI 2 "nonmemory_operand" "l"))
7419 (label_ref (match_operand 3 "" ""))
7422 && (GET_CODE (operands[0]) == EQ
7423 || GET_CODE (operands[0]) == NE
7424 || GET_CODE (operands[0]) == GE
7425 || GET_CODE (operands[0]) == LT)"
7427 output_asm_insn (\"cmp\\t%1, %2\", operands);
7428 switch (get_attr_length (insn))
7430 case 4: return \"b%d0\\t%l3\";
7431 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7432 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7435 [(set (attr "far_jump")
7437 (eq_attr "length" "8")
7438 (const_string "yes")
7439 (const_string "no")))
7440 (set (attr "length")
7442 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7443 (le (minus (match_dup 3) (pc)) (const_int 256)))
7446 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7447 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7452 ;; Comparison and test insns
7454 (define_expand "cmpsi"
7455 [(match_operand:SI 0 "s_register_operand" "")
7456 (match_operand:SI 1 "arm_add_operand" "")]
7459 arm_compare_op0 = operands[0];
7460 arm_compare_op1 = operands[1];
7465 (define_expand "cmpsf"
7466 [(match_operand:SF 0 "s_register_operand" "")
7467 (match_operand:SF 1 "arm_float_compare_operand" "")]
7468 "TARGET_32BIT && TARGET_HARD_FLOAT"
7470 arm_compare_op0 = operands[0];
7471 arm_compare_op1 = operands[1];
7476 (define_expand "cmpdf"
7477 [(match_operand:DF 0 "s_register_operand" "")
7478 (match_operand:DF 1 "arm_float_compare_operand" "")]
7479 "TARGET_32BIT && TARGET_HARD_FLOAT"
7481 arm_compare_op0 = operands[0];
7482 arm_compare_op1 = operands[1];
7487 (define_insn "*arm_cmpsi_insn"
7488 [(set (reg:CC CC_REGNUM)
7489 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7490 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7495 [(set_attr "conds" "set")]
7498 (define_insn "*arm_cmpsi_shiftsi"
7499 [(set (reg:CC CC_REGNUM)
7500 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7501 (match_operator:SI 3 "shift_operator"
7502 [(match_operand:SI 1 "s_register_operand" "r")
7503 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7506 [(set_attr "conds" "set")
7507 (set_attr "shift" "1")
7508 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7509 (const_string "alu_shift")
7510 (const_string "alu_shift_reg")))]
7513 (define_insn "*arm_cmpsi_shiftsi_swp"
7514 [(set (reg:CC_SWP CC_REGNUM)
7515 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7516 [(match_operand:SI 1 "s_register_operand" "r")
7517 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7518 (match_operand:SI 0 "s_register_operand" "r")))]
7521 [(set_attr "conds" "set")
7522 (set_attr "shift" "1")
7523 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7524 (const_string "alu_shift")
7525 (const_string "alu_shift_reg")))]
7528 (define_insn "*arm_cmpsi_negshiftsi_si"
7529 [(set (reg:CC_Z CC_REGNUM)
7531 (neg:SI (match_operator:SI 1 "shift_operator"
7532 [(match_operand:SI 2 "s_register_operand" "r")
7533 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7534 (match_operand:SI 0 "s_register_operand" "r")))]
7537 [(set_attr "conds" "set")
7538 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7539 (const_string "alu_shift")
7540 (const_string "alu_shift_reg")))]
7543 ;; Cirrus SF compare instruction
7544 (define_insn "*cirrus_cmpsf"
7545 [(set (reg:CCFP CC_REGNUM)
7546 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7547 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7548 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7549 "cfcmps%?\\tr15, %V0, %V1"
7550 [(set_attr "type" "mav_farith")
7551 (set_attr "cirrus" "compare")]
7554 ;; Cirrus DF compare instruction
7555 (define_insn "*cirrus_cmpdf"
7556 [(set (reg:CCFP CC_REGNUM)
7557 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7558 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7559 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7560 "cfcmpd%?\\tr15, %V0, %V1"
7561 [(set_attr "type" "mav_farith")
7562 (set_attr "cirrus" "compare")]
7565 ;; Cirrus DI compare instruction
7566 (define_expand "cmpdi"
7567 [(match_operand:DI 0 "cirrus_fp_register" "")
7568 (match_operand:DI 1 "cirrus_fp_register" "")]
7569 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7571 arm_compare_op0 = operands[0];
7572 arm_compare_op1 = operands[1];
7576 (define_insn "*cirrus_cmpdi"
7577 [(set (reg:CC CC_REGNUM)
7578 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7579 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7580 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7581 "cfcmp64%?\\tr15, %V0, %V1"
7582 [(set_attr "type" "mav_farith")
7583 (set_attr "cirrus" "compare")]
7586 ; This insn allows redundant compares to be removed by cse, nothing should
7587 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7588 ; is deleted later on. The match_dup will match the mode here, so that
7589 ; mode changes of the condition codes aren't lost by this even though we don't
7590 ; specify what they are.
7592 (define_insn "*deleted_compare"
7593 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7595 "\\t%@ deleted compare"
7596 [(set_attr "conds" "set")
7597 (set_attr "length" "0")]
7601 ;; Conditional branch insns
7603 (define_expand "beq"
7605 (if_then_else (eq (match_dup 1) (const_int 0))
7606 (label_ref (match_operand 0 "" ""))
7609 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7612 (define_expand "bne"
7614 (if_then_else (ne (match_dup 1) (const_int 0))
7615 (label_ref (match_operand 0 "" ""))
7618 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7621 (define_expand "bgt"
7623 (if_then_else (gt (match_dup 1) (const_int 0))
7624 (label_ref (match_operand 0 "" ""))
7627 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7630 (define_expand "ble"
7632 (if_then_else (le (match_dup 1) (const_int 0))
7633 (label_ref (match_operand 0 "" ""))
7636 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7639 (define_expand "bge"
7641 (if_then_else (ge (match_dup 1) (const_int 0))
7642 (label_ref (match_operand 0 "" ""))
7645 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7648 (define_expand "blt"
7650 (if_then_else (lt (match_dup 1) (const_int 0))
7651 (label_ref (match_operand 0 "" ""))
7654 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7657 (define_expand "bgtu"
7659 (if_then_else (gtu (match_dup 1) (const_int 0))
7660 (label_ref (match_operand 0 "" ""))
7663 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7666 (define_expand "bleu"
7668 (if_then_else (leu (match_dup 1) (const_int 0))
7669 (label_ref (match_operand 0 "" ""))
7672 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7675 (define_expand "bgeu"
7677 (if_then_else (geu (match_dup 1) (const_int 0))
7678 (label_ref (match_operand 0 "" ""))
7681 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7684 (define_expand "bltu"
7686 (if_then_else (ltu (match_dup 1) (const_int 0))
7687 (label_ref (match_operand 0 "" ""))
7690 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7693 (define_expand "bunordered"
7695 (if_then_else (unordered (match_dup 1) (const_int 0))
7696 (label_ref (match_operand 0 "" ""))
7698 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7699 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7703 (define_expand "bordered"
7705 (if_then_else (ordered (match_dup 1) (const_int 0))
7706 (label_ref (match_operand 0 "" ""))
7708 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7709 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7713 (define_expand "bungt"
7715 (if_then_else (ungt (match_dup 1) (const_int 0))
7716 (label_ref (match_operand 0 "" ""))
7718 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7719 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7722 (define_expand "bunlt"
7724 (if_then_else (unlt (match_dup 1) (const_int 0))
7725 (label_ref (match_operand 0 "" ""))
7727 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7728 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7731 (define_expand "bunge"
7733 (if_then_else (unge (match_dup 1) (const_int 0))
7734 (label_ref (match_operand 0 "" ""))
7736 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7737 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7740 (define_expand "bunle"
7742 (if_then_else (unle (match_dup 1) (const_int 0))
7743 (label_ref (match_operand 0 "" ""))
7745 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7746 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7749 ;; The following two patterns need two branch instructions, since there is
7750 ;; no single instruction that will handle all cases.
7751 (define_expand "buneq"
7753 (if_then_else (uneq (match_dup 1) (const_int 0))
7754 (label_ref (match_operand 0 "" ""))
7756 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7757 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7760 (define_expand "bltgt"
7762 (if_then_else (ltgt (match_dup 1) (const_int 0))
7763 (label_ref (match_operand 0 "" ""))
7765 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7766 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7770 ;; Patterns to match conditional branch insns.
7773 ; Special pattern to match UNEQ.
7774 (define_insn "*arm_buneq"
7776 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7777 (label_ref (match_operand 0 "" ""))
7779 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7781 gcc_assert (!arm_ccfsm_state);
7783 return \"bvs\\t%l0\;beq\\t%l0\";
7785 [(set_attr "conds" "jump_clob")
7786 (set_attr "length" "8")]
7789 ; Special pattern to match LTGT.
7790 (define_insn "*arm_bltgt"
7792 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7793 (label_ref (match_operand 0 "" ""))
7795 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7797 gcc_assert (!arm_ccfsm_state);
7799 return \"bmi\\t%l0\;bgt\\t%l0\";
7801 [(set_attr "conds" "jump_clob")
7802 (set_attr "length" "8")]
7805 (define_insn "*arm_cond_branch"
7807 (if_then_else (match_operator 1 "arm_comparison_operator"
7808 [(match_operand 2 "cc_register" "") (const_int 0)])
7809 (label_ref (match_operand 0 "" ""))
7813 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7815 arm_ccfsm_state += 2;
7818 return \"b%d1\\t%l0\";
7820 [(set_attr "conds" "use")
7821 (set_attr "type" "branch")]
7824 ; Special pattern to match reversed UNEQ.
7825 (define_insn "*arm_buneq_reversed"
7827 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7829 (label_ref (match_operand 0 "" ""))))]
7830 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7832 gcc_assert (!arm_ccfsm_state);
7834 return \"bmi\\t%l0\;bgt\\t%l0\";
7836 [(set_attr "conds" "jump_clob")
7837 (set_attr "length" "8")]
7840 ; Special pattern to match reversed LTGT.
7841 (define_insn "*arm_bltgt_reversed"
7843 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7845 (label_ref (match_operand 0 "" ""))))]
7846 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7848 gcc_assert (!arm_ccfsm_state);
7850 return \"bvs\\t%l0\;beq\\t%l0\";
7852 [(set_attr "conds" "jump_clob")
7853 (set_attr "length" "8")]
7856 (define_insn "*arm_cond_branch_reversed"
7858 (if_then_else (match_operator 1 "arm_comparison_operator"
7859 [(match_operand 2 "cc_register" "") (const_int 0)])
7861 (label_ref (match_operand 0 "" ""))))]
7864 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7866 arm_ccfsm_state += 2;
7869 return \"b%D1\\t%l0\";
7871 [(set_attr "conds" "use")
7872 (set_attr "type" "branch")]
7879 (define_expand "seq"
7880 [(set (match_operand:SI 0 "s_register_operand" "")
7881 (eq:SI (match_dup 1) (const_int 0)))]
7883 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7886 (define_expand "sne"
7887 [(set (match_operand:SI 0 "s_register_operand" "")
7888 (ne:SI (match_dup 1) (const_int 0)))]
7890 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7893 (define_expand "sgt"
7894 [(set (match_operand:SI 0 "s_register_operand" "")
7895 (gt:SI (match_dup 1) (const_int 0)))]
7897 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7900 (define_expand "sle"
7901 [(set (match_operand:SI 0 "s_register_operand" "")
7902 (le:SI (match_dup 1) (const_int 0)))]
7904 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7907 (define_expand "sge"
7908 [(set (match_operand:SI 0 "s_register_operand" "")
7909 (ge:SI (match_dup 1) (const_int 0)))]
7911 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7914 (define_expand "slt"
7915 [(set (match_operand:SI 0 "s_register_operand" "")
7916 (lt:SI (match_dup 1) (const_int 0)))]
7918 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7921 (define_expand "sgtu"
7922 [(set (match_operand:SI 0 "s_register_operand" "")
7923 (gtu:SI (match_dup 1) (const_int 0)))]
7925 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7928 (define_expand "sleu"
7929 [(set (match_operand:SI 0 "s_register_operand" "")
7930 (leu:SI (match_dup 1) (const_int 0)))]
7932 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7935 (define_expand "sgeu"
7936 [(set (match_operand:SI 0 "s_register_operand" "")
7937 (geu:SI (match_dup 1) (const_int 0)))]
7939 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7942 (define_expand "sltu"
7943 [(set (match_operand:SI 0 "s_register_operand" "")
7944 (ltu:SI (match_dup 1) (const_int 0)))]
7946 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7949 (define_expand "sunordered"
7950 [(set (match_operand:SI 0 "s_register_operand" "")
7951 (unordered:SI (match_dup 1) (const_int 0)))]
7952 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7953 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7957 (define_expand "sordered"
7958 [(set (match_operand:SI 0 "s_register_operand" "")
7959 (ordered:SI (match_dup 1) (const_int 0)))]
7960 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7961 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7965 (define_expand "sungt"
7966 [(set (match_operand:SI 0 "s_register_operand" "")
7967 (ungt:SI (match_dup 1) (const_int 0)))]
7968 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7969 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7973 (define_expand "sunge"
7974 [(set (match_operand:SI 0 "s_register_operand" "")
7975 (unge:SI (match_dup 1) (const_int 0)))]
7976 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7977 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7981 (define_expand "sunlt"
7982 [(set (match_operand:SI 0 "s_register_operand" "")
7983 (unlt:SI (match_dup 1) (const_int 0)))]
7984 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7985 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7989 (define_expand "sunle"
7990 [(set (match_operand:SI 0 "s_register_operand" "")
7991 (unle:SI (match_dup 1) (const_int 0)))]
7992 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7993 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7997 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7998 ;;; simple ARM instructions.
8000 ; (define_expand "suneq"
8001 ; [(set (match_operand:SI 0 "s_register_operand" "")
8002 ; (uneq:SI (match_dup 1) (const_int 0)))]
8003 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8004 ; "gcc_unreachable ();"
8007 ; (define_expand "sltgt"
8008 ; [(set (match_operand:SI 0 "s_register_operand" "")
8009 ; (ltgt:SI (match_dup 1) (const_int 0)))]
8010 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8011 ; "gcc_unreachable ();"
8014 (define_insn "*mov_scc"
8015 [(set (match_operand:SI 0 "s_register_operand" "=r")
8016 (match_operator:SI 1 "arm_comparison_operator"
8017 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8019 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8020 [(set_attr "conds" "use")
8021 (set_attr "length" "8")]
8024 (define_insn "*mov_negscc"
8025 [(set (match_operand:SI 0 "s_register_operand" "=r")
8026 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8027 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8029 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8030 [(set_attr "conds" "use")
8031 (set_attr "length" "8")]
8034 (define_insn "*mov_notscc"
8035 [(set (match_operand:SI 0 "s_register_operand" "=r")
8036 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8037 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8039 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8040 [(set_attr "conds" "use")
8041 (set_attr "length" "8")]
8044 (define_expand "cstoresi4"
8045 [(set (match_operand:SI 0 "s_register_operand" "")
8046 (match_operator:SI 1 "arm_comparison_operator"
8047 [(match_operand:SI 2 "s_register_operand" "")
8048 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8051 rtx op3, scratch, scratch2;
8053 if (operands[3] == const0_rtx)
8055 switch (GET_CODE (operands[1]))
8058 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8062 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8066 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8067 NULL_RTX, 0, OPTAB_WIDEN);
8068 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8069 NULL_RTX, 0, OPTAB_WIDEN);
8070 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8071 operands[0], 1, OPTAB_WIDEN);
8075 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8077 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8078 NULL_RTX, 1, OPTAB_WIDEN);
8082 scratch = expand_binop (SImode, ashr_optab, operands[2],
8083 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8084 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8085 NULL_RTX, 0, OPTAB_WIDEN);
8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8090 /* LT is handled by generic code. No need for unsigned with 0. */
8097 switch (GET_CODE (operands[1]))
8100 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8101 NULL_RTX, 0, OPTAB_WIDEN);
8102 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8106 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8107 NULL_RTX, 0, OPTAB_WIDEN);
8108 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8112 op3 = force_reg (SImode, operands[3]);
8114 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8115 NULL_RTX, 1, OPTAB_WIDEN);
8116 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8117 NULL_RTX, 0, OPTAB_WIDEN);
8118 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8124 if (!thumb1_cmp_operand (op3, SImode))
8125 op3 = force_reg (SImode, op3);
8126 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8127 NULL_RTX, 0, OPTAB_WIDEN);
8128 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8129 NULL_RTX, 1, OPTAB_WIDEN);
8130 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8135 op3 = force_reg (SImode, operands[3]);
8136 scratch = force_reg (SImode, const0_rtx);
8137 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8143 if (!thumb1_cmp_operand (op3, SImode))
8144 op3 = force_reg (SImode, op3);
8145 scratch = force_reg (SImode, const0_rtx);
8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8152 if (!thumb1_cmp_operand (op3, SImode))
8153 op3 = force_reg (SImode, op3);
8154 scratch = gen_reg_rtx (SImode);
8155 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8156 emit_insn (gen_negsi2 (operands[0], scratch));
8160 op3 = force_reg (SImode, operands[3]);
8161 scratch = gen_reg_rtx (SImode);
8162 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8163 emit_insn (gen_negsi2 (operands[0], scratch));
8166 /* No good sequences for GT, LT. */
8173 (define_expand "cstoresi_eq0_thumb1"
8175 [(set (match_operand:SI 0 "s_register_operand" "")
8176 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8178 (clobber (match_dup:SI 2))])]
8180 "operands[2] = gen_reg_rtx (SImode);"
8183 (define_expand "cstoresi_ne0_thumb1"
8185 [(set (match_operand:SI 0 "s_register_operand" "")
8186 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8188 (clobber (match_dup:SI 2))])]
8190 "operands[2] = gen_reg_rtx (SImode);"
8193 (define_insn "*cstoresi_eq0_thumb1_insn"
8194 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8195 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8197 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8200 neg\\t%0, %1\;adc\\t%0, %0, %1
8201 neg\\t%2, %1\;adc\\t%0, %1, %2"
8202 [(set_attr "length" "4")]
8205 (define_insn "*cstoresi_ne0_thumb1_insn"
8206 [(set (match_operand:SI 0 "s_register_operand" "=l")
8207 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8209 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8211 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8212 [(set_attr "length" "4")]
8215 (define_insn "cstoresi_nltu_thumb1"
8216 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8217 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8218 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8220 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8221 [(set_attr "length" "4")]
8224 ;; Used as part of the expansion of thumb les sequence.
8225 (define_insn "thumb1_addsi3_addgeu"
8226 [(set (match_operand:SI 0 "s_register_operand" "=l")
8227 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8228 (match_operand:SI 2 "s_register_operand" "l"))
8229 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8230 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8232 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8233 [(set_attr "length" "4")]
8237 ;; Conditional move insns
8239 (define_expand "movsicc"
8240 [(set (match_operand:SI 0 "s_register_operand" "")
8241 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8242 (match_operand:SI 2 "arm_not_operand" "")
8243 (match_operand:SI 3 "arm_not_operand" "")))]
8247 enum rtx_code code = GET_CODE (operands[1]);
8250 if (code == UNEQ || code == LTGT)
8253 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8254 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8258 (define_expand "movsfcc"
8259 [(set (match_operand:SF 0 "s_register_operand" "")
8260 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8261 (match_operand:SF 2 "s_register_operand" "")
8262 (match_operand:SF 3 "nonmemory_operand" "")))]
8263 "TARGET_32BIT && TARGET_HARD_FLOAT"
8266 enum rtx_code code = GET_CODE (operands[1]);
8269 if (code == UNEQ || code == LTGT)
8272 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8273 Otherwise, ensure it is a valid FP add operand */
8274 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8275 || (!arm_float_add_operand (operands[3], SFmode)))
8276 operands[3] = force_reg (SFmode, operands[3]);
8278 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8279 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8283 (define_expand "movdfcc"
8284 [(set (match_operand:DF 0 "s_register_operand" "")
8285 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8286 (match_operand:DF 2 "s_register_operand" "")
8287 (match_operand:DF 3 "arm_float_add_operand" "")))]
8288 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8291 enum rtx_code code = GET_CODE (operands[1]);
8294 if (code == UNEQ || code == LTGT)
8297 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8298 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8302 (define_insn "*movsicc_insn"
8303 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8305 (match_operator 3 "arm_comparison_operator"
8306 [(match_operand 4 "cc_register" "") (const_int 0)])
8307 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8308 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8315 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8316 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8317 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8318 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8319 [(set_attr "length" "4,4,4,4,8,8,8,8")
8320 (set_attr "conds" "use")]
8323 (define_insn "*movsfcc_soft_insn"
8324 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8325 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8326 [(match_operand 4 "cc_register" "") (const_int 0)])
8327 (match_operand:SF 1 "s_register_operand" "0,r")
8328 (match_operand:SF 2 "s_register_operand" "r,0")))]
8329 "TARGET_ARM && TARGET_SOFT_FLOAT"
8333 [(set_attr "conds" "use")]
8337 ;; Jump and linkage insns
8339 (define_expand "jump"
8341 (label_ref (match_operand 0 "" "")))]
8346 (define_insn "*arm_jump"
8348 (label_ref (match_operand 0 "" "")))]
8352 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8354 arm_ccfsm_state += 2;
8357 return \"b%?\\t%l0\";
8360 [(set_attr "predicable" "yes")]
8363 (define_insn "*thumb_jump"
8365 (label_ref (match_operand 0 "" "")))]
8368 if (get_attr_length (insn) == 2)
8370 return \"bl\\t%l0\\t%@ far jump\";
8372 [(set (attr "far_jump")
8374 (eq_attr "length" "4")
8375 (const_string "yes")
8376 (const_string "no")))
8377 (set (attr "length")
8379 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8380 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8385 (define_expand "call"
8386 [(parallel [(call (match_operand 0 "memory_operand" "")
8387 (match_operand 1 "general_operand" ""))
8388 (use (match_operand 2 "" ""))
8389 (clobber (reg:SI LR_REGNUM))])]
8395 /* In an untyped call, we can get NULL for operand 2. */
8396 if (operands[2] == NULL_RTX)
8397 operands[2] = const0_rtx;
8399 /* Decide if we should generate indirect calls by loading the
8400 32-bit address of the callee into a register before performing the
8402 callee = XEXP (operands[0], 0);
8403 if (GET_CODE (callee) == SYMBOL_REF
8404 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8406 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8408 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8409 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8414 (define_expand "call_internal"
8415 [(parallel [(call (match_operand 0 "memory_operand" "")
8416 (match_operand 1 "general_operand" ""))
8417 (use (match_operand 2 "" ""))
8418 (clobber (reg:SI LR_REGNUM))])])
8420 (define_insn "*call_reg_armv5"
8421 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8422 (match_operand 1 "" ""))
8423 (use (match_operand 2 "" ""))
8424 (clobber (reg:SI LR_REGNUM))]
8425 "TARGET_ARM && arm_arch5"
8427 [(set_attr "type" "call")]
8430 (define_insn "*call_reg_arm"
8431 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8432 (match_operand 1 "" ""))
8433 (use (match_operand 2 "" ""))
8434 (clobber (reg:SI LR_REGNUM))]
8435 "TARGET_ARM && !arm_arch5"
8437 return output_call (operands);
8439 ;; length is worst case, normally it is only two
8440 [(set_attr "length" "12")
8441 (set_attr "type" "call")]
8444 (define_insn "*call_mem"
8445 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8446 (match_operand 1 "" ""))
8447 (use (match_operand 2 "" ""))
8448 (clobber (reg:SI LR_REGNUM))]
8451 return output_call_mem (operands);
8453 [(set_attr "length" "12")
8454 (set_attr "type" "call")]
8457 (define_insn "*call_reg_thumb1_v5"
8458 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8459 (match_operand 1 "" ""))
8460 (use (match_operand 2 "" ""))
8461 (clobber (reg:SI LR_REGNUM))]
8462 "TARGET_THUMB1 && arm_arch5"
8464 [(set_attr "length" "2")
8465 (set_attr "type" "call")]
8468 (define_insn "*call_reg_thumb1"
8469 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8470 (match_operand 1 "" ""))
8471 (use (match_operand 2 "" ""))
8472 (clobber (reg:SI LR_REGNUM))]
8473 "TARGET_THUMB1 && !arm_arch5"
8476 if (!TARGET_CALLER_INTERWORKING)
8477 return thumb_call_via_reg (operands[0]);
8478 else if (operands[1] == const0_rtx)
8479 return \"bl\\t%__interwork_call_via_%0\";
8480 else if (frame_pointer_needed)
8481 return \"bl\\t%__interwork_r7_call_via_%0\";
8483 return \"bl\\t%__interwork_r11_call_via_%0\";
8485 [(set_attr "type" "call")]
8488 (define_expand "call_value"
8489 [(parallel [(set (match_operand 0 "" "")
8490 (call (match_operand 1 "memory_operand" "")
8491 (match_operand 2 "general_operand" "")))
8492 (use (match_operand 3 "" ""))
8493 (clobber (reg:SI LR_REGNUM))])]
8499 /* In an untyped call, we can get NULL for operand 2. */
8500 if (operands[3] == 0)
8501 operands[3] = const0_rtx;
8503 /* Decide if we should generate indirect calls by loading the
8504 32-bit address of the callee into a register before performing the
8506 callee = XEXP (operands[1], 0);
8507 if (GET_CODE (callee) == SYMBOL_REF
8508 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8510 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8512 pat = gen_call_value_internal (operands[0], operands[1],
8513 operands[2], operands[3]);
8514 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8519 (define_expand "call_value_internal"
8520 [(parallel [(set (match_operand 0 "" "")
8521 (call (match_operand 1 "memory_operand" "")
8522 (match_operand 2 "general_operand" "")))
8523 (use (match_operand 3 "" ""))
8524 (clobber (reg:SI LR_REGNUM))])])
8526 (define_insn "*call_value_reg_armv5"
8527 [(set (match_operand 0 "" "")
8528 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8529 (match_operand 2 "" "")))
8530 (use (match_operand 3 "" ""))
8531 (clobber (reg:SI LR_REGNUM))]
8532 "TARGET_ARM && arm_arch5"
8534 [(set_attr "type" "call")]
8537 (define_insn "*call_value_reg_arm"
8538 [(set (match_operand 0 "" "")
8539 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8540 (match_operand 2 "" "")))
8541 (use (match_operand 3 "" ""))
8542 (clobber (reg:SI LR_REGNUM))]
8543 "TARGET_ARM && !arm_arch5"
8545 return output_call (&operands[1]);
8547 [(set_attr "length" "12")
8548 (set_attr "type" "call")]
8551 (define_insn "*call_value_mem"
8552 [(set (match_operand 0 "" "")
8553 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8554 (match_operand 2 "" "")))
8555 (use (match_operand 3 "" ""))
8556 (clobber (reg:SI LR_REGNUM))]
8557 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8559 return output_call_mem (&operands[1]);
8561 [(set_attr "length" "12")
8562 (set_attr "type" "call")]
8565 (define_insn "*call_value_reg_thumb1_v5"
8566 [(set (match_operand 0 "" "")
8567 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8568 (match_operand 2 "" "")))
8569 (use (match_operand 3 "" ""))
8570 (clobber (reg:SI LR_REGNUM))]
8571 "TARGET_THUMB1 && arm_arch5"
8573 [(set_attr "length" "2")
8574 (set_attr "type" "call")]
8577 (define_insn "*call_value_reg_thumb1"
8578 [(set (match_operand 0 "" "")
8579 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8580 (match_operand 2 "" "")))
8581 (use (match_operand 3 "" ""))
8582 (clobber (reg:SI LR_REGNUM))]
8583 "TARGET_THUMB1 && !arm_arch5"
8586 if (!TARGET_CALLER_INTERWORKING)
8587 return thumb_call_via_reg (operands[1]);
8588 else if (operands[2] == const0_rtx)
8589 return \"bl\\t%__interwork_call_via_%1\";
8590 else if (frame_pointer_needed)
8591 return \"bl\\t%__interwork_r7_call_via_%1\";
8593 return \"bl\\t%__interwork_r11_call_via_%1\";
8595 [(set_attr "type" "call")]
8598 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8599 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8601 (define_insn "*call_symbol"
8602 [(call (mem:SI (match_operand:SI 0 "" ""))
8603 (match_operand 1 "" ""))
8604 (use (match_operand 2 "" ""))
8605 (clobber (reg:SI LR_REGNUM))]
8607 && (GET_CODE (operands[0]) == SYMBOL_REF)
8608 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8611 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8613 [(set_attr "type" "call")]
8616 (define_insn "*call_value_symbol"
8617 [(set (match_operand 0 "" "")
8618 (call (mem:SI (match_operand:SI 1 "" ""))
8619 (match_operand:SI 2 "" "")))
8620 (use (match_operand 3 "" ""))
8621 (clobber (reg:SI LR_REGNUM))]
8623 && (GET_CODE (operands[1]) == SYMBOL_REF)
8624 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8627 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8629 [(set_attr "type" "call")]
8632 (define_insn "*call_insn"
8633 [(call (mem:SI (match_operand:SI 0 "" ""))
8634 (match_operand:SI 1 "" ""))
8635 (use (match_operand 2 "" ""))
8636 (clobber (reg:SI LR_REGNUM))]
8638 && GET_CODE (operands[0]) == SYMBOL_REF
8639 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8641 [(set_attr "length" "4")
8642 (set_attr "type" "call")]
8645 (define_insn "*call_value_insn"
8646 [(set (match_operand 0 "" "")
8647 (call (mem:SI (match_operand 1 "" ""))
8648 (match_operand 2 "" "")))
8649 (use (match_operand 3 "" ""))
8650 (clobber (reg:SI LR_REGNUM))]
8652 && GET_CODE (operands[1]) == SYMBOL_REF
8653 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8655 [(set_attr "length" "4")
8656 (set_attr "type" "call")]
8659 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8660 (define_expand "sibcall"
8661 [(parallel [(call (match_operand 0 "memory_operand" "")
8662 (match_operand 1 "general_operand" ""))
8664 (use (match_operand 2 "" ""))])]
8668 if (operands[2] == NULL_RTX)
8669 operands[2] = const0_rtx;
8673 (define_expand "sibcall_value"
8674 [(parallel [(set (match_operand 0 "" "")
8675 (call (match_operand 1 "memory_operand" "")
8676 (match_operand 2 "general_operand" "")))
8678 (use (match_operand 3 "" ""))])]
8682 if (operands[3] == NULL_RTX)
8683 operands[3] = const0_rtx;
8687 (define_insn "*sibcall_insn"
8688 [(call (mem:SI (match_operand:SI 0 "" "X"))
8689 (match_operand 1 "" ""))
8691 (use (match_operand 2 "" ""))]
8692 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8694 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8696 [(set_attr "type" "call")]
8699 (define_insn "*sibcall_value_insn"
8700 [(set (match_operand 0 "" "")
8701 (call (mem:SI (match_operand:SI 1 "" "X"))
8702 (match_operand 2 "" "")))
8704 (use (match_operand 3 "" ""))]
8705 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8707 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8709 [(set_attr "type" "call")]
8712 ;; Often the return insn will be the same as loading from memory, so set attr
8713 (define_insn "return"
8715 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8718 if (arm_ccfsm_state == 2)
8720 arm_ccfsm_state += 2;
8723 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8725 [(set_attr "type" "load1")
8726 (set_attr "length" "12")
8727 (set_attr "predicable" "yes")]
8730 (define_insn "*cond_return"
8732 (if_then_else (match_operator 0 "arm_comparison_operator"
8733 [(match_operand 1 "cc_register" "") (const_int 0)])
8736 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8739 if (arm_ccfsm_state == 2)
8741 arm_ccfsm_state += 2;
8744 return output_return_instruction (operands[0], TRUE, FALSE);
8746 [(set_attr "conds" "use")
8747 (set_attr "length" "12")
8748 (set_attr "type" "load1")]
8751 (define_insn "*cond_return_inverted"
8753 (if_then_else (match_operator 0 "arm_comparison_operator"
8754 [(match_operand 1 "cc_register" "") (const_int 0)])
8757 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8760 if (arm_ccfsm_state == 2)
8762 arm_ccfsm_state += 2;
8765 return output_return_instruction (operands[0], TRUE, TRUE);
8767 [(set_attr "conds" "use")
8768 (set_attr "length" "12")
8769 (set_attr "type" "load1")]
8772 ;; Generate a sequence of instructions to determine if the processor is
8773 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8776 (define_expand "return_addr_mask"
8778 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8780 (set (match_operand:SI 0 "s_register_operand" "")
8781 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8783 (const_int 67108860)))] ; 0x03fffffc
8786 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8789 (define_insn "*check_arch2"
8790 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8791 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8794 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8795 [(set_attr "length" "8")
8796 (set_attr "conds" "set")]
8799 ;; Call subroutine returning any type.
8801 (define_expand "untyped_call"
8802 [(parallel [(call (match_operand 0 "" "")
8804 (match_operand 1 "" "")
8805 (match_operand 2 "" "")])]
8810 rtx par = gen_rtx_PARALLEL (VOIDmode,
8811 rtvec_alloc (XVECLEN (operands[2], 0)));
8812 rtx addr = gen_reg_rtx (Pmode);
8816 emit_move_insn (addr, XEXP (operands[1], 0));
8817 mem = change_address (operands[1], BLKmode, addr);
8819 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8821 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8823 /* Default code only uses r0 as a return value, but we could
8824 be using anything up to 4 registers. */
8825 if (REGNO (src) == R0_REGNUM)
8826 src = gen_rtx_REG (TImode, R0_REGNUM);
8828 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8830 size += GET_MODE_SIZE (GET_MODE (src));
8833 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8838 for (i = 0; i < XVECLEN (par, 0); i++)
8840 HOST_WIDE_INT offset = 0;
8841 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8844 emit_move_insn (addr, plus_constant (addr, size));
8846 mem = change_address (mem, GET_MODE (reg), NULL);
8847 if (REGNO (reg) == R0_REGNUM)
8849 /* On thumb we have to use a write-back instruction. */
8850 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8851 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8852 size = TARGET_ARM ? 16 : 0;
8856 emit_move_insn (mem, reg);
8857 size = GET_MODE_SIZE (GET_MODE (reg));
8861 /* The optimizer does not know that the call sets the function value
8862 registers we stored in the result block. We avoid problems by
8863 claiming that all hard registers are used and clobbered at this
8865 emit_insn (gen_blockage ());
8871 (define_expand "untyped_return"
8872 [(match_operand:BLK 0 "memory_operand" "")
8873 (match_operand 1 "" "")]
8878 rtx addr = gen_reg_rtx (Pmode);
8882 emit_move_insn (addr, XEXP (operands[0], 0));
8883 mem = change_address (operands[0], BLKmode, addr);
8885 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8887 HOST_WIDE_INT offset = 0;
8888 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8891 emit_move_insn (addr, plus_constant (addr, size));
8893 mem = change_address (mem, GET_MODE (reg), NULL);
8894 if (REGNO (reg) == R0_REGNUM)
8896 /* On thumb we have to use a write-back instruction. */
8897 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8898 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8899 size = TARGET_ARM ? 16 : 0;
8903 emit_move_insn (reg, mem);
8904 size = GET_MODE_SIZE (GET_MODE (reg));
8908 /* Emit USE insns before the return. */
8909 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8910 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8912 /* Construct the return. */
8913 expand_naked_return ();
8919 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8920 ;; all of memory. This blocks insns from being moved across this point.
8922 (define_insn "blockage"
8923 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8926 [(set_attr "length" "0")
8927 (set_attr "type" "block")]
8930 (define_expand "casesi"
8931 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8932 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8933 (match_operand:SI 2 "const_int_operand" "") ; total range
8934 (match_operand:SI 3 "" "") ; table label
8935 (match_operand:SI 4 "" "")] ; Out of range label
8940 if (operands[1] != const0_rtx)
8942 reg = gen_reg_rtx (SImode);
8944 emit_insn (gen_addsi3 (reg, operands[0],
8945 GEN_INT (-INTVAL (operands[1]))));
8949 if (!const_ok_for_arm (INTVAL (operands[2])))
8950 operands[2] = force_reg (SImode, operands[2]);
8954 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8955 operands[3], operands[4]));
8959 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8960 operands[2], operands[3], operands[4]));
8964 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8965 operands[3], operands[4]));
8971 ;; The USE in this pattern is needed to tell flow analysis that this is
8972 ;; a CASESI insn. It has no other purpose.
8973 (define_insn "arm_casesi_internal"
8974 [(parallel [(set (pc)
8976 (leu (match_operand:SI 0 "s_register_operand" "r")
8977 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8978 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8979 (label_ref (match_operand 2 "" ""))))
8980 (label_ref (match_operand 3 "" ""))))
8981 (clobber (reg:CC CC_REGNUM))
8982 (use (label_ref (match_dup 2)))])]
8986 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8987 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8989 [(set_attr "conds" "clob")
8990 (set_attr "length" "12")]
8993 (define_expand "indirect_jump"
8995 (match_operand:SI 0 "s_register_operand" ""))]
8998 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8999 address and use bx. */
9003 tmp = gen_reg_rtx (SImode);
9004 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9010 ;; NB Never uses BX.
9011 (define_insn "*arm_indirect_jump"
9013 (match_operand:SI 0 "s_register_operand" "r"))]
9015 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9016 [(set_attr "predicable" "yes")]
9019 (define_insn "*load_indirect_jump"
9021 (match_operand:SI 0 "memory_operand" "m"))]
9023 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9024 [(set_attr "type" "load1")
9025 (set_attr "pool_range" "4096")
9026 (set_attr "neg_pool_range" "4084")
9027 (set_attr "predicable" "yes")]
9030 ;; NB Never uses BX.
9031 (define_insn "*thumb1_indirect_jump"
9033 (match_operand:SI 0 "register_operand" "l*r"))]
9036 [(set_attr "conds" "clob")
9037 (set_attr "length" "2")]
9047 if (TARGET_UNIFIED_ASM)
9050 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9051 return \"mov\\tr8, r8\";
9053 [(set (attr "length")
9054 (if_then_else (eq_attr "is_thumb" "yes")
9060 ;; Patterns to allow combination of arithmetic, cond code and shifts
9062 (define_insn "*arith_shiftsi"
9063 [(set (match_operand:SI 0 "s_register_operand" "=r")
9064 (match_operator:SI 1 "shiftable_operator"
9065 [(match_operator:SI 3 "shift_operator"
9066 [(match_operand:SI 4 "s_register_operand" "r")
9067 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9068 (match_operand:SI 2 "s_register_operand" "r")]))]
9070 "%i1%?\\t%0, %2, %4%S3"
9071 [(set_attr "predicable" "yes")
9072 (set_attr "shift" "4")
9073 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9074 (const_string "alu_shift")
9075 (const_string "alu_shift_reg")))]
9079 [(set (match_operand:SI 0 "s_register_operand" "")
9080 (match_operator:SI 1 "shiftable_operator"
9081 [(match_operator:SI 2 "shiftable_operator"
9082 [(match_operator:SI 3 "shift_operator"
9083 [(match_operand:SI 4 "s_register_operand" "")
9084 (match_operand:SI 5 "reg_or_int_operand" "")])
9085 (match_operand:SI 6 "s_register_operand" "")])
9086 (match_operand:SI 7 "arm_rhs_operand" "")]))
9087 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9090 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9093 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9096 (define_insn "*arith_shiftsi_compare0"
9097 [(set (reg:CC_NOOV CC_REGNUM)
9098 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9099 [(match_operator:SI 3 "shift_operator"
9100 [(match_operand:SI 4 "s_register_operand" "r")
9101 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9102 (match_operand:SI 2 "s_register_operand" "r")])
9104 (set (match_operand:SI 0 "s_register_operand" "=r")
9105 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9108 "%i1%.\\t%0, %2, %4%S3"
9109 [(set_attr "conds" "set")
9110 (set_attr "shift" "4")
9111 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9112 (const_string "alu_shift")
9113 (const_string "alu_shift_reg")))]
9116 (define_insn "*arith_shiftsi_compare0_scratch"
9117 [(set (reg:CC_NOOV CC_REGNUM)
9118 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9119 [(match_operator:SI 3 "shift_operator"
9120 [(match_operand:SI 4 "s_register_operand" "r")
9121 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9122 (match_operand:SI 2 "s_register_operand" "r")])
9124 (clobber (match_scratch:SI 0 "=r"))]
9126 "%i1%.\\t%0, %2, %4%S3"
9127 [(set_attr "conds" "set")
9128 (set_attr "shift" "4")
9129 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9130 (const_string "alu_shift")
9131 (const_string "alu_shift_reg")))]
9134 (define_insn "*sub_shiftsi"
9135 [(set (match_operand:SI 0 "s_register_operand" "=r")
9136 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9137 (match_operator:SI 2 "shift_operator"
9138 [(match_operand:SI 3 "s_register_operand" "r")
9139 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9141 "sub%?\\t%0, %1, %3%S2"
9142 [(set_attr "predicable" "yes")
9143 (set_attr "shift" "3")
9144 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9145 (const_string "alu_shift")
9146 (const_string "alu_shift_reg")))]
9149 (define_insn "*sub_shiftsi_compare0"
9150 [(set (reg:CC_NOOV CC_REGNUM)
9152 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9153 (match_operator:SI 2 "shift_operator"
9154 [(match_operand:SI 3 "s_register_operand" "r")
9155 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9157 (set (match_operand:SI 0 "s_register_operand" "=r")
9158 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9161 "sub%.\\t%0, %1, %3%S2"
9162 [(set_attr "conds" "set")
9163 (set_attr "shift" "3")
9164 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9165 (const_string "alu_shift")
9166 (const_string "alu_shift_reg")))]
9169 (define_insn "*sub_shiftsi_compare0_scratch"
9170 [(set (reg:CC_NOOV CC_REGNUM)
9172 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9173 (match_operator:SI 2 "shift_operator"
9174 [(match_operand:SI 3 "s_register_operand" "r")
9175 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9177 (clobber (match_scratch:SI 0 "=r"))]
9179 "sub%.\\t%0, %1, %3%S2"
9180 [(set_attr "conds" "set")
9181 (set_attr "shift" "3")
9182 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9183 (const_string "alu_shift")
9184 (const_string "alu_shift_reg")))]
9189 (define_insn "*and_scc"
9190 [(set (match_operand:SI 0 "s_register_operand" "=r")
9191 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9192 [(match_operand 3 "cc_register" "") (const_int 0)])
9193 (match_operand:SI 2 "s_register_operand" "r")))]
9195 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9196 [(set_attr "conds" "use")
9197 (set_attr "length" "8")]
9200 (define_insn "*ior_scc"
9201 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9202 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9203 [(match_operand 3 "cc_register" "") (const_int 0)])
9204 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9208 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9209 [(set_attr "conds" "use")
9210 (set_attr "length" "4,8")]
9213 (define_insn "*compare_scc"
9214 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9215 (match_operator:SI 1 "arm_comparison_operator"
9216 [(match_operand:SI 2 "s_register_operand" "r,r")
9217 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9218 (clobber (reg:CC CC_REGNUM))]
9221 if (operands[3] == const0_rtx)
9223 if (GET_CODE (operands[1]) == LT)
9224 return \"mov\\t%0, %2, lsr #31\";
9226 if (GET_CODE (operands[1]) == GE)
9227 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9229 if (GET_CODE (operands[1]) == EQ)
9230 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9233 if (GET_CODE (operands[1]) == NE)
9235 if (which_alternative == 1)
9236 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9237 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9239 if (which_alternative == 1)
9240 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9242 output_asm_insn (\"cmp\\t%2, %3\", operands);
9243 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9245 [(set_attr "conds" "clob")
9246 (set_attr "length" "12")]
9249 (define_insn "*cond_move"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9251 (if_then_else:SI (match_operator 3 "equality_operator"
9252 [(match_operator 4 "arm_comparison_operator"
9253 [(match_operand 5 "cc_register" "") (const_int 0)])
9255 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9256 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9259 if (GET_CODE (operands[3]) == NE)
9261 if (which_alternative != 1)
9262 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9263 if (which_alternative != 0)
9264 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9267 if (which_alternative != 0)
9268 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9269 if (which_alternative != 1)
9270 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9273 [(set_attr "conds" "use")
9274 (set_attr "length" "4,4,8")]
9277 (define_insn "*cond_arith"
9278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9279 (match_operator:SI 5 "shiftable_operator"
9280 [(match_operator:SI 4 "arm_comparison_operator"
9281 [(match_operand:SI 2 "s_register_operand" "r,r")
9282 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9283 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9284 (clobber (reg:CC CC_REGNUM))]
9287 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9288 return \"%i5\\t%0, %1, %2, lsr #31\";
9290 output_asm_insn (\"cmp\\t%2, %3\", operands);
9291 if (GET_CODE (operands[5]) == AND)
9292 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9293 else if (GET_CODE (operands[5]) == MINUS)
9294 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9295 else if (which_alternative != 0)
9296 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9297 return \"%i5%d4\\t%0, %1, #1\";
9299 [(set_attr "conds" "clob")
9300 (set_attr "length" "12")]
9303 (define_insn "*cond_sub"
9304 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9305 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9306 (match_operator:SI 4 "arm_comparison_operator"
9307 [(match_operand:SI 2 "s_register_operand" "r,r")
9308 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9309 (clobber (reg:CC CC_REGNUM))]
9312 output_asm_insn (\"cmp\\t%2, %3\", operands);
9313 if (which_alternative != 0)
9314 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9315 return \"sub%d4\\t%0, %1, #1\";
9317 [(set_attr "conds" "clob")
9318 (set_attr "length" "8,12")]
9321 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9322 (define_insn "*cmp_ite0"
9323 [(set (match_operand 6 "dominant_cc_register" "")
9326 (match_operator 4 "arm_comparison_operator"
9327 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9328 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9329 (match_operator:SI 5 "arm_comparison_operator"
9330 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9331 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9337 static const char * const opcodes[4][2] =
9339 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9340 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9341 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9342 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9343 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9344 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9345 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9346 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9349 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9351 return opcodes[which_alternative][swap];
9353 [(set_attr "conds" "set")
9354 (set_attr "length" "8")]
9357 (define_insn "*cmp_ite1"
9358 [(set (match_operand 6 "dominant_cc_register" "")
9361 (match_operator 4 "arm_comparison_operator"
9362 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9363 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9364 (match_operator:SI 5 "arm_comparison_operator"
9365 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9366 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9372 static const char * const opcodes[4][2] =
9374 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9375 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9376 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9377 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9378 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9379 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9380 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9381 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9384 comparison_dominates_p (GET_CODE (operands[5]),
9385 reverse_condition (GET_CODE (operands[4])));
9387 return opcodes[which_alternative][swap];
9389 [(set_attr "conds" "set")
9390 (set_attr "length" "8")]
9393 (define_insn "*cmp_and"
9394 [(set (match_operand 6 "dominant_cc_register" "")
9397 (match_operator 4 "arm_comparison_operator"
9398 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9399 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9400 (match_operator:SI 5 "arm_comparison_operator"
9401 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9402 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9407 static const char *const opcodes[4][2] =
9409 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9410 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9411 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9412 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9413 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9414 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9415 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9416 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9419 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9421 return opcodes[which_alternative][swap];
9423 [(set_attr "conds" "set")
9424 (set_attr "predicable" "no")
9425 (set_attr "length" "8")]
9428 (define_insn "*cmp_ior"
9429 [(set (match_operand 6 "dominant_cc_register" "")
9432 (match_operator 4 "arm_comparison_operator"
9433 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9434 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9435 (match_operator:SI 5 "arm_comparison_operator"
9436 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9437 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9442 static const char *const opcodes[4][2] =
9444 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9445 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9446 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9447 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9448 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9449 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9450 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9451 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9454 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9456 return opcodes[which_alternative][swap];
9459 [(set_attr "conds" "set")
9460 (set_attr "length" "8")]
9463 (define_insn_and_split "*ior_scc_scc"
9464 [(set (match_operand:SI 0 "s_register_operand" "=r")
9465 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9466 [(match_operand:SI 1 "s_register_operand" "r")
9467 (match_operand:SI 2 "arm_add_operand" "rIL")])
9468 (match_operator:SI 6 "arm_comparison_operator"
9469 [(match_operand:SI 4 "s_register_operand" "r")
9470 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9471 (clobber (reg:CC CC_REGNUM))]
9473 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9476 "TARGET_ARM && reload_completed"
9480 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9481 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9483 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9485 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9488 [(set_attr "conds" "clob")
9489 (set_attr "length" "16")])
9491 ; If the above pattern is followed by a CMP insn, then the compare is
9492 ; redundant, since we can rework the conditional instruction that follows.
9493 (define_insn_and_split "*ior_scc_scc_cmp"
9494 [(set (match_operand 0 "dominant_cc_register" "")
9495 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9496 [(match_operand:SI 1 "s_register_operand" "r")
9497 (match_operand:SI 2 "arm_add_operand" "rIL")])
9498 (match_operator:SI 6 "arm_comparison_operator"
9499 [(match_operand:SI 4 "s_register_operand" "r")
9500 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9502 (set (match_operand:SI 7 "s_register_operand" "=r")
9503 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9504 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9507 "TARGET_ARM && reload_completed"
9511 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9512 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9514 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9516 [(set_attr "conds" "set")
9517 (set_attr "length" "16")])
9519 (define_insn_and_split "*and_scc_scc"
9520 [(set (match_operand:SI 0 "s_register_operand" "=r")
9521 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9522 [(match_operand:SI 1 "s_register_operand" "r")
9523 (match_operand:SI 2 "arm_add_operand" "rIL")])
9524 (match_operator:SI 6 "arm_comparison_operator"
9525 [(match_operand:SI 4 "s_register_operand" "r")
9526 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9527 (clobber (reg:CC CC_REGNUM))]
9529 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9532 "TARGET_ARM && reload_completed
9533 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9538 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9539 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9541 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9543 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "16")])
9549 ; If the above pattern is followed by a CMP insn, then the compare is
9550 ; redundant, since we can rework the conditional instruction that follows.
9551 (define_insn_and_split "*and_scc_scc_cmp"
9552 [(set (match_operand 0 "dominant_cc_register" "")
9553 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9554 [(match_operand:SI 1 "s_register_operand" "r")
9555 (match_operand:SI 2 "arm_add_operand" "rIL")])
9556 (match_operator:SI 6 "arm_comparison_operator"
9557 [(match_operand:SI 4 "s_register_operand" "r")
9558 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9560 (set (match_operand:SI 7 "s_register_operand" "=r")
9561 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9562 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9565 "TARGET_ARM && reload_completed"
9569 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9570 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9572 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9574 [(set_attr "conds" "set")
9575 (set_attr "length" "16")])
9577 ;; If there is no dominance in the comparison, then we can still save an
9578 ;; instruction in the AND case, since we can know that the second compare
9579 ;; need only zero the value if false (if true, then the value is already
9581 (define_insn_and_split "*and_scc_scc_nodom"
9582 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9583 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9584 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9585 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9586 (match_operator:SI 6 "arm_comparison_operator"
9587 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9588 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9589 (clobber (reg:CC CC_REGNUM))]
9591 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9594 "TARGET_ARM && reload_completed"
9595 [(parallel [(set (match_dup 0)
9596 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9597 (clobber (reg:CC CC_REGNUM))])
9598 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9600 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9603 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9604 operands[4], operands[5]),
9606 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9608 [(set_attr "conds" "clob")
9609 (set_attr "length" "20")])
9612 [(set (reg:CC_NOOV CC_REGNUM)
9613 (compare:CC_NOOV (ior:SI
9614 (and:SI (match_operand:SI 0 "s_register_operand" "")
9616 (match_operator:SI 1 "comparison_operator"
9617 [(match_operand:SI 2 "s_register_operand" "")
9618 (match_operand:SI 3 "arm_add_operand" "")]))
9620 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9623 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9625 (set (reg:CC_NOOV CC_REGNUM)
9626 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9631 [(set (reg:CC_NOOV CC_REGNUM)
9632 (compare:CC_NOOV (ior:SI
9633 (match_operator:SI 1 "comparison_operator"
9634 [(match_operand:SI 2 "s_register_operand" "")
9635 (match_operand:SI 3 "arm_add_operand" "")])
9636 (and:SI (match_operand:SI 0 "s_register_operand" "")
9639 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9642 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9644 (set (reg:CC_NOOV CC_REGNUM)
9645 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9648 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9650 (define_insn "*negscc"
9651 [(set (match_operand:SI 0 "s_register_operand" "=r")
9652 (neg:SI (match_operator 3 "arm_comparison_operator"
9653 [(match_operand:SI 1 "s_register_operand" "r")
9654 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9655 (clobber (reg:CC CC_REGNUM))]
9658 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9659 return \"mov\\t%0, %1, asr #31\";
9661 if (GET_CODE (operands[3]) == NE)
9662 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9664 output_asm_insn (\"cmp\\t%1, %2\", operands);
9665 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9666 return \"mvn%d3\\t%0, #0\";
9668 [(set_attr "conds" "clob")
9669 (set_attr "length" "12")]
9672 (define_insn "movcond"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9677 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9678 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9679 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9680 (clobber (reg:CC CC_REGNUM))]
9683 if (GET_CODE (operands[5]) == LT
9684 && (operands[4] == const0_rtx))
9686 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9688 if (operands[2] == const0_rtx)
9689 return \"and\\t%0, %1, %3, asr #31\";
9690 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9692 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9694 if (operands[1] == const0_rtx)
9695 return \"bic\\t%0, %2, %3, asr #31\";
9696 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9698 /* The only case that falls through to here is when both ops 1 & 2
9702 if (GET_CODE (operands[5]) == GE
9703 && (operands[4] == const0_rtx))
9705 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9707 if (operands[2] == const0_rtx)
9708 return \"bic\\t%0, %1, %3, asr #31\";
9709 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9711 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9713 if (operands[1] == const0_rtx)
9714 return \"and\\t%0, %2, %3, asr #31\";
9715 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9717 /* The only case that falls through to here is when both ops 1 & 2
9720 if (GET_CODE (operands[4]) == CONST_INT
9721 && !const_ok_for_arm (INTVAL (operands[4])))
9722 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9724 output_asm_insn (\"cmp\\t%3, %4\", operands);
9725 if (which_alternative != 0)
9726 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9727 if (which_alternative != 1)
9728 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9731 [(set_attr "conds" "clob")
9732 (set_attr "length" "8,8,12")]
9735 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9737 (define_insn "*ifcompare_plus_move"
9738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9739 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9740 [(match_operand:SI 4 "s_register_operand" "r,r")
9741 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9743 (match_operand:SI 2 "s_register_operand" "r,r")
9744 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9745 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9746 (clobber (reg:CC CC_REGNUM))]
9749 [(set_attr "conds" "clob")
9750 (set_attr "length" "8,12")]
9753 (define_insn "*if_plus_move"
9754 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9756 (match_operator 4 "arm_comparison_operator"
9757 [(match_operand 5 "cc_register" "") (const_int 0)])
9759 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9760 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9761 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9765 sub%d4\\t%0, %2, #%n3
9766 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9767 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9768 [(set_attr "conds" "use")
9769 (set_attr "length" "4,4,8,8")
9770 (set_attr "type" "*,*,*,*")]
9773 (define_insn "*ifcompare_move_plus"
9774 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9775 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9776 [(match_operand:SI 4 "s_register_operand" "r,r")
9777 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9778 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9780 (match_operand:SI 2 "s_register_operand" "r,r")
9781 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9782 (clobber (reg:CC CC_REGNUM))]
9785 [(set_attr "conds" "clob")
9786 (set_attr "length" "8,12")]
9789 (define_insn "*if_move_plus"
9790 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9792 (match_operator 4 "arm_comparison_operator"
9793 [(match_operand 5 "cc_register" "") (const_int 0)])
9794 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9796 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9797 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9801 sub%D4\\t%0, %2, #%n3
9802 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9803 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9804 [(set_attr "conds" "use")
9805 (set_attr "length" "4,4,8,8")
9806 (set_attr "type" "*,*,*,*")]
9809 (define_insn "*ifcompare_arith_arith"
9810 [(set (match_operand:SI 0 "s_register_operand" "=r")
9811 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9812 [(match_operand:SI 5 "s_register_operand" "r")
9813 (match_operand:SI 6 "arm_add_operand" "rIL")])
9814 (match_operator:SI 8 "shiftable_operator"
9815 [(match_operand:SI 1 "s_register_operand" "r")
9816 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9817 (match_operator:SI 7 "shiftable_operator"
9818 [(match_operand:SI 3 "s_register_operand" "r")
9819 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9820 (clobber (reg:CC CC_REGNUM))]
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "12")]
9827 (define_insn "*if_arith_arith"
9828 [(set (match_operand:SI 0 "s_register_operand" "=r")
9829 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9830 [(match_operand 8 "cc_register" "") (const_int 0)])
9831 (match_operator:SI 6 "shiftable_operator"
9832 [(match_operand:SI 1 "s_register_operand" "r")
9833 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9834 (match_operator:SI 7 "shiftable_operator"
9835 [(match_operand:SI 3 "s_register_operand" "r")
9836 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9838 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9839 [(set_attr "conds" "use")
9840 (set_attr "length" "8")]
9843 (define_insn "*ifcompare_arith_move"
9844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9845 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9846 [(match_operand:SI 2 "s_register_operand" "r,r")
9847 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9848 (match_operator:SI 7 "shiftable_operator"
9849 [(match_operand:SI 4 "s_register_operand" "r,r")
9850 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9851 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9852 (clobber (reg:CC CC_REGNUM))]
9855 /* If we have an operation where (op x 0) is the identity operation and
9856 the conditional operator is LT or GE and we are comparing against zero and
9857 everything is in registers then we can do this in two instructions. */
9858 if (operands[3] == const0_rtx
9859 && GET_CODE (operands[7]) != AND
9860 && GET_CODE (operands[5]) == REG
9861 && GET_CODE (operands[1]) == REG
9862 && REGNO (operands[1]) == REGNO (operands[4])
9863 && REGNO (operands[4]) != REGNO (operands[0]))
9865 if (GET_CODE (operands[6]) == LT)
9866 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9867 else if (GET_CODE (operands[6]) == GE)
9868 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9870 if (GET_CODE (operands[3]) == CONST_INT
9871 && !const_ok_for_arm (INTVAL (operands[3])))
9872 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9874 output_asm_insn (\"cmp\\t%2, %3\", operands);
9875 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9876 if (which_alternative != 0)
9877 return \"mov%D6\\t%0, %1\";
9880 [(set_attr "conds" "clob")
9881 (set_attr "length" "8,12")]
9884 (define_insn "*if_arith_move"
9885 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9886 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9887 [(match_operand 6 "cc_register" "") (const_int 0)])
9888 (match_operator:SI 5 "shiftable_operator"
9889 [(match_operand:SI 2 "s_register_operand" "r,r")
9890 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9891 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9895 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9896 [(set_attr "conds" "use")
9897 (set_attr "length" "4,8")
9898 (set_attr "type" "*,*")]
9901 (define_insn "*ifcompare_move_arith"
9902 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9903 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9904 [(match_operand:SI 4 "s_register_operand" "r,r")
9905 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9906 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9907 (match_operator:SI 7 "shiftable_operator"
9908 [(match_operand:SI 2 "s_register_operand" "r,r")
9909 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9910 (clobber (reg:CC CC_REGNUM))]
9913 /* If we have an operation where (op x 0) is the identity operation and
9914 the conditional operator is LT or GE and we are comparing against zero and
9915 everything is in registers then we can do this in two instructions */
9916 if (operands[5] == const0_rtx
9917 && GET_CODE (operands[7]) != AND
9918 && GET_CODE (operands[3]) == REG
9919 && GET_CODE (operands[1]) == REG
9920 && REGNO (operands[1]) == REGNO (operands[2])
9921 && REGNO (operands[2]) != REGNO (operands[0]))
9923 if (GET_CODE (operands[6]) == GE)
9924 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9925 else if (GET_CODE (operands[6]) == LT)
9926 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9929 if (GET_CODE (operands[5]) == CONST_INT
9930 && !const_ok_for_arm (INTVAL (operands[5])))
9931 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9933 output_asm_insn (\"cmp\\t%4, %5\", operands);
9935 if (which_alternative != 0)
9936 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9937 return \"%I7%D6\\t%0, %2, %3\";
9939 [(set_attr "conds" "clob")
9940 (set_attr "length" "8,12")]
9943 (define_insn "*if_move_arith"
9944 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9946 (match_operator 4 "arm_comparison_operator"
9947 [(match_operand 6 "cc_register" "") (const_int 0)])
9948 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9949 (match_operator:SI 5 "shiftable_operator"
9950 [(match_operand:SI 2 "s_register_operand" "r,r")
9951 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9955 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9956 [(set_attr "conds" "use")
9957 (set_attr "length" "4,8")
9958 (set_attr "type" "*,*")]
9961 (define_insn "*ifcompare_move_not"
9962 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9964 (match_operator 5 "arm_comparison_operator"
9965 [(match_operand:SI 3 "s_register_operand" "r,r")
9966 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9967 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9969 (match_operand:SI 2 "s_register_operand" "r,r"))))
9970 (clobber (reg:CC CC_REGNUM))]
9973 [(set_attr "conds" "clob")
9974 (set_attr "length" "8,12")]
9977 (define_insn "*if_move_not"
9978 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9980 (match_operator 4 "arm_comparison_operator"
9981 [(match_operand 3 "cc_register" "") (const_int 0)])
9982 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9983 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9987 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9988 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9989 [(set_attr "conds" "use")
9990 (set_attr "length" "4,8,8")]
9993 (define_insn "*ifcompare_not_move"
9994 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9996 (match_operator 5 "arm_comparison_operator"
9997 [(match_operand:SI 3 "s_register_operand" "r,r")
9998 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10000 (match_operand:SI 2 "s_register_operand" "r,r"))
10001 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10002 (clobber (reg:CC CC_REGNUM))]
10005 [(set_attr "conds" "clob")
10006 (set_attr "length" "8,12")]
10009 (define_insn "*if_not_move"
10010 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10012 (match_operator 4 "arm_comparison_operator"
10013 [(match_operand 3 "cc_register" "") (const_int 0)])
10014 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10015 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10019 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10020 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10021 [(set_attr "conds" "use")
10022 (set_attr "length" "4,8,8")]
10025 (define_insn "*ifcompare_shift_move"
10026 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10028 (match_operator 6 "arm_comparison_operator"
10029 [(match_operand:SI 4 "s_register_operand" "r,r")
10030 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10031 (match_operator:SI 7 "shift_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r,r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10034 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10035 (clobber (reg:CC CC_REGNUM))]
10038 [(set_attr "conds" "clob")
10039 (set_attr "length" "8,12")]
10042 (define_insn "*if_shift_move"
10043 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10045 (match_operator 5 "arm_comparison_operator"
10046 [(match_operand 6 "cc_register" "") (const_int 0)])
10047 (match_operator:SI 4 "shift_operator"
10048 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10049 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10050 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10054 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10055 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10056 [(set_attr "conds" "use")
10057 (set_attr "shift" "2")
10058 (set_attr "length" "4,8,8")
10059 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10060 (const_string "alu_shift")
10061 (const_string "alu_shift_reg")))]
10064 (define_insn "*ifcompare_move_shift"
10065 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10067 (match_operator 6 "arm_comparison_operator"
10068 [(match_operand:SI 4 "s_register_operand" "r,r")
10069 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10070 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10071 (match_operator:SI 7 "shift_operator"
10072 [(match_operand:SI 2 "s_register_operand" "r,r")
10073 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10074 (clobber (reg:CC CC_REGNUM))]
10077 [(set_attr "conds" "clob")
10078 (set_attr "length" "8,12")]
10081 (define_insn "*if_move_shift"
10082 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10084 (match_operator 5 "arm_comparison_operator"
10085 [(match_operand 6 "cc_register" "") (const_int 0)])
10086 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10087 (match_operator:SI 4 "shift_operator"
10088 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10089 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10093 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10094 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10095 [(set_attr "conds" "use")
10096 (set_attr "shift" "2")
10097 (set_attr "length" "4,8,8")
10098 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10099 (const_string "alu_shift")
10100 (const_string "alu_shift_reg")))]
10103 (define_insn "*ifcompare_shift_shift"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r")
10106 (match_operator 7 "arm_comparison_operator"
10107 [(match_operand:SI 5 "s_register_operand" "r")
10108 (match_operand:SI 6 "arm_add_operand" "rIL")])
10109 (match_operator:SI 8 "shift_operator"
10110 [(match_operand:SI 1 "s_register_operand" "r")
10111 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10112 (match_operator:SI 9 "shift_operator"
10113 [(match_operand:SI 3 "s_register_operand" "r")
10114 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10115 (clobber (reg:CC CC_REGNUM))]
10118 [(set_attr "conds" "clob")
10119 (set_attr "length" "12")]
10122 (define_insn "*if_shift_shift"
10123 [(set (match_operand:SI 0 "s_register_operand" "=r")
10125 (match_operator 5 "arm_comparison_operator"
10126 [(match_operand 8 "cc_register" "") (const_int 0)])
10127 (match_operator:SI 6 "shift_operator"
10128 [(match_operand:SI 1 "s_register_operand" "r")
10129 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10130 (match_operator:SI 7 "shift_operator"
10131 [(match_operand:SI 3 "s_register_operand" "r")
10132 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10134 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10135 [(set_attr "conds" "use")
10136 (set_attr "shift" "1")
10137 (set_attr "length" "8")
10138 (set (attr "type") (if_then_else
10139 (and (match_operand 2 "const_int_operand" "")
10140 (match_operand 4 "const_int_operand" ""))
10141 (const_string "alu_shift")
10142 (const_string "alu_shift_reg")))]
10145 (define_insn "*ifcompare_not_arith"
10146 [(set (match_operand:SI 0 "s_register_operand" "=r")
10148 (match_operator 6 "arm_comparison_operator"
10149 [(match_operand:SI 4 "s_register_operand" "r")
10150 (match_operand:SI 5 "arm_add_operand" "rIL")])
10151 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10152 (match_operator:SI 7 "shiftable_operator"
10153 [(match_operand:SI 2 "s_register_operand" "r")
10154 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10155 (clobber (reg:CC CC_REGNUM))]
10158 [(set_attr "conds" "clob")
10159 (set_attr "length" "12")]
10162 (define_insn "*if_not_arith"
10163 [(set (match_operand:SI 0 "s_register_operand" "=r")
10165 (match_operator 5 "arm_comparison_operator"
10166 [(match_operand 4 "cc_register" "") (const_int 0)])
10167 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10168 (match_operator:SI 6 "shiftable_operator"
10169 [(match_operand:SI 2 "s_register_operand" "r")
10170 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10172 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10173 [(set_attr "conds" "use")
10174 (set_attr "length" "8")]
10177 (define_insn "*ifcompare_arith_not"
10178 [(set (match_operand:SI 0 "s_register_operand" "=r")
10180 (match_operator 6 "arm_comparison_operator"
10181 [(match_operand:SI 4 "s_register_operand" "r")
10182 (match_operand:SI 5 "arm_add_operand" "rIL")])
10183 (match_operator:SI 7 "shiftable_operator"
10184 [(match_operand:SI 2 "s_register_operand" "r")
10185 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10186 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10187 (clobber (reg:CC CC_REGNUM))]
10190 [(set_attr "conds" "clob")
10191 (set_attr "length" "12")]
10194 (define_insn "*if_arith_not"
10195 [(set (match_operand:SI 0 "s_register_operand" "=r")
10197 (match_operator 5 "arm_comparison_operator"
10198 [(match_operand 4 "cc_register" "") (const_int 0)])
10199 (match_operator:SI 6 "shiftable_operator"
10200 [(match_operand:SI 2 "s_register_operand" "r")
10201 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10202 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10204 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10205 [(set_attr "conds" "use")
10206 (set_attr "length" "8")]
10209 (define_insn "*ifcompare_neg_move"
10210 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10212 (match_operator 5 "arm_comparison_operator"
10213 [(match_operand:SI 3 "s_register_operand" "r,r")
10214 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10215 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10216 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10217 (clobber (reg:CC CC_REGNUM))]
10220 [(set_attr "conds" "clob")
10221 (set_attr "length" "8,12")]
10224 (define_insn "*if_neg_move"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10227 (match_operator 4 "arm_comparison_operator"
10228 [(match_operand 3 "cc_register" "") (const_int 0)])
10229 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10230 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10233 rsb%d4\\t%0, %2, #0
10234 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10235 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10236 [(set_attr "conds" "use")
10237 (set_attr "length" "4,8,8")]
10240 (define_insn "*ifcompare_move_neg"
10241 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10243 (match_operator 5 "arm_comparison_operator"
10244 [(match_operand:SI 3 "s_register_operand" "r,r")
10245 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10246 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10247 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10248 (clobber (reg:CC CC_REGNUM))]
10251 [(set_attr "conds" "clob")
10252 (set_attr "length" "8,12")]
10255 (define_insn "*if_move_neg"
10256 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10258 (match_operator 4 "arm_comparison_operator"
10259 [(match_operand 3 "cc_register" "") (const_int 0)])
10260 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10261 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10264 rsb%D4\\t%0, %2, #0
10265 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10266 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10267 [(set_attr "conds" "use")
10268 (set_attr "length" "4,8,8")]
10271 (define_insn "*arith_adjacentmem"
10272 [(set (match_operand:SI 0 "s_register_operand" "=r")
10273 (match_operator:SI 1 "shiftable_operator"
10274 [(match_operand:SI 2 "memory_operand" "m")
10275 (match_operand:SI 3 "memory_operand" "m")]))
10276 (clobber (match_scratch:SI 4 "=r"))]
10277 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10283 HOST_WIDE_INT val1 = 0, val2 = 0;
10285 if (REGNO (operands[0]) > REGNO (operands[4]))
10287 ldm[1] = operands[4];
10288 ldm[2] = operands[0];
10292 ldm[1] = operands[0];
10293 ldm[2] = operands[4];
10296 base_reg = XEXP (operands[2], 0);
10298 if (!REG_P (base_reg))
10300 val1 = INTVAL (XEXP (base_reg, 1));
10301 base_reg = XEXP (base_reg, 0);
10304 if (!REG_P (XEXP (operands[3], 0)))
10305 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10307 arith[0] = operands[0];
10308 arith[3] = operands[1];
10322 if (val1 !=0 && val2 != 0)
10326 if (val1 == 4 || val2 == 4)
10327 /* Other val must be 8, since we know they are adjacent and neither
10329 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10330 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10332 ldm[0] = ops[0] = operands[4];
10334 ops[2] = GEN_INT (val1);
10335 output_add_immediate (ops);
10337 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10339 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10343 /* Offset is out of range for a single add, so use two ldr. */
10346 ops[2] = GEN_INT (val1);
10347 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10349 ops[2] = GEN_INT (val2);
10350 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10353 else if (val1 != 0)
10356 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10358 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10363 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10365 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10367 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10370 [(set_attr "length" "12")
10371 (set_attr "predicable" "yes")
10372 (set_attr "type" "load1")]
10375 ; This pattern is never tried by combine, so do it as a peephole
10378 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10379 (match_operand:SI 1 "arm_general_register_operand" ""))
10380 (set (reg:CC CC_REGNUM)
10381 (compare:CC (match_dup 1) (const_int 0)))]
10383 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10384 (set (match_dup 0) (match_dup 1))])]
10388 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10389 ; reversed, check that the memory references aren't volatile.
10392 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10393 (match_operand:SI 4 "memory_operand" "m"))
10394 (set (match_operand:SI 1 "s_register_operand" "=rk")
10395 (match_operand:SI 5 "memory_operand" "m"))
10396 (set (match_operand:SI 2 "s_register_operand" "=rk")
10397 (match_operand:SI 6 "memory_operand" "m"))
10398 (set (match_operand:SI 3 "s_register_operand" "=rk")
10399 (match_operand:SI 7 "memory_operand" "m"))]
10400 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10402 return emit_ldm_seq (operands, 4);
10407 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10408 (match_operand:SI 3 "memory_operand" "m"))
10409 (set (match_operand:SI 1 "s_register_operand" "=rk")
10410 (match_operand:SI 4 "memory_operand" "m"))
10411 (set (match_operand:SI 2 "s_register_operand" "=rk")
10412 (match_operand:SI 5 "memory_operand" "m"))]
10413 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10415 return emit_ldm_seq (operands, 3);
10420 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10421 (match_operand:SI 2 "memory_operand" "m"))
10422 (set (match_operand:SI 1 "s_register_operand" "=rk")
10423 (match_operand:SI 3 "memory_operand" "m"))]
10424 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10426 return emit_ldm_seq (operands, 2);
10431 [(set (match_operand:SI 4 "memory_operand" "=m")
10432 (match_operand:SI 0 "s_register_operand" "rk"))
10433 (set (match_operand:SI 5 "memory_operand" "=m")
10434 (match_operand:SI 1 "s_register_operand" "rk"))
10435 (set (match_operand:SI 6 "memory_operand" "=m")
10436 (match_operand:SI 2 "s_register_operand" "rk"))
10437 (set (match_operand:SI 7 "memory_operand" "=m")
10438 (match_operand:SI 3 "s_register_operand" "rk"))]
10439 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10441 return emit_stm_seq (operands, 4);
10446 [(set (match_operand:SI 3 "memory_operand" "=m")
10447 (match_operand:SI 0 "s_register_operand" "rk"))
10448 (set (match_operand:SI 4 "memory_operand" "=m")
10449 (match_operand:SI 1 "s_register_operand" "rk"))
10450 (set (match_operand:SI 5 "memory_operand" "=m")
10451 (match_operand:SI 2 "s_register_operand" "rk"))]
10452 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10454 return emit_stm_seq (operands, 3);
10459 [(set (match_operand:SI 2 "memory_operand" "=m")
10460 (match_operand:SI 0 "s_register_operand" "rk"))
10461 (set (match_operand:SI 3 "memory_operand" "=m")
10462 (match_operand:SI 1 "s_register_operand" "rk"))]
10463 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10465 return emit_stm_seq (operands, 2);
10470 [(set (match_operand:SI 0 "s_register_operand" "")
10471 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10473 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10474 [(match_operand:SI 3 "s_register_operand" "")
10475 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10476 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10478 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10479 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10484 ;; This split can be used because CC_Z mode implies that the following
10485 ;; branch will be an equality, or an unsigned inequality, so the sign
10486 ;; extension is not needed.
10489 [(set (reg:CC_Z CC_REGNUM)
10491 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10493 (match_operand 1 "const_int_operand" "")))
10494 (clobber (match_scratch:SI 2 ""))]
10496 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10497 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10498 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10499 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10501 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10504 ;; ??? Check the patterns above for Thumb-2 usefulness
10506 (define_expand "prologue"
10507 [(clobber (const_int 0))]
10510 arm_expand_prologue ();
10512 thumb1_expand_prologue ();
10517 (define_expand "epilogue"
10518 [(clobber (const_int 0))]
10521 if (crtl->calls_eh_return)
10522 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10524 thumb1_expand_epilogue ();
10525 else if (USE_RETURN_INSN (FALSE))
10527 emit_jump_insn (gen_return ());
10530 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10532 gen_rtx_RETURN (VOIDmode)),
10533 VUNSPEC_EPILOGUE));
10538 ;; Note - although unspec_volatile's USE all hard registers,
10539 ;; USEs are ignored after relaod has completed. Thus we need
10540 ;; to add an unspec of the link register to ensure that flow
10541 ;; does not think that it is unused by the sibcall branch that
10542 ;; will replace the standard function epilogue.
10543 (define_insn "sibcall_epilogue"
10544 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10545 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10548 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10549 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10550 return arm_output_epilogue (next_nonnote_insn (insn));
10552 ;; Length is absolute worst case
10553 [(set_attr "length" "44")
10554 (set_attr "type" "block")
10555 ;; We don't clobber the conditions, but the potential length of this
10556 ;; operation is sufficient to make conditionalizing the sequence
10557 ;; unlikely to be profitable.
10558 (set_attr "conds" "clob")]
10561 (define_insn "*epilogue_insns"
10562 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10566 return arm_output_epilogue (NULL);
10567 else /* TARGET_THUMB1 */
10568 return thumb_unexpanded_epilogue ();
10570 ; Length is absolute worst case
10571 [(set_attr "length" "44")
10572 (set_attr "type" "block")
10573 ;; We don't clobber the conditions, but the potential length of this
10574 ;; operation is sufficient to make conditionalizing the sequence
10575 ;; unlikely to be profitable.
10576 (set_attr "conds" "clob")]
10579 (define_expand "eh_epilogue"
10580 [(use (match_operand:SI 0 "register_operand" ""))
10581 (use (match_operand:SI 1 "register_operand" ""))
10582 (use (match_operand:SI 2 "register_operand" ""))]
10586 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10587 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10589 rtx ra = gen_rtx_REG (Pmode, 2);
10591 emit_move_insn (ra, operands[2]);
10594 /* This is a hack -- we may have crystalized the function type too
10596 cfun->machine->func_type = 0;
10600 ;; This split is only used during output to reduce the number of patterns
10601 ;; that need assembler instructions adding to them. We allowed the setting
10602 ;; of the conditions to be implicit during rtl generation so that
10603 ;; the conditional compare patterns would work. However this conflicts to
10604 ;; some extent with the conditional data operations, so we have to split them
10607 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10608 ;; conditional execution sufficient?
10611 [(set (match_operand:SI 0 "s_register_operand" "")
10612 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10613 [(match_operand 2 "" "") (match_operand 3 "" "")])
10615 (match_operand 4 "" "")))
10616 (clobber (reg:CC CC_REGNUM))]
10617 "TARGET_ARM && reload_completed"
10618 [(set (match_dup 5) (match_dup 6))
10619 (cond_exec (match_dup 7)
10620 (set (match_dup 0) (match_dup 4)))]
10623 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10624 operands[2], operands[3]);
10625 enum rtx_code rc = GET_CODE (operands[1]);
10627 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10628 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10629 if (mode == CCFPmode || mode == CCFPEmode)
10630 rc = reverse_condition_maybe_unordered (rc);
10632 rc = reverse_condition (rc);
10634 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10639 [(set (match_operand:SI 0 "s_register_operand" "")
10640 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10641 [(match_operand 2 "" "") (match_operand 3 "" "")])
10642 (match_operand 4 "" "")
10644 (clobber (reg:CC CC_REGNUM))]
10645 "TARGET_ARM && reload_completed"
10646 [(set (match_dup 5) (match_dup 6))
10647 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10648 (set (match_dup 0) (match_dup 4)))]
10651 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10652 operands[2], operands[3]);
10654 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10655 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10660 [(set (match_operand:SI 0 "s_register_operand" "")
10661 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10662 [(match_operand 2 "" "") (match_operand 3 "" "")])
10663 (match_operand 4 "" "")
10664 (match_operand 5 "" "")))
10665 (clobber (reg:CC CC_REGNUM))]
10666 "TARGET_ARM && reload_completed"
10667 [(set (match_dup 6) (match_dup 7))
10668 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10669 (set (match_dup 0) (match_dup 4)))
10670 (cond_exec (match_dup 8)
10671 (set (match_dup 0) (match_dup 5)))]
10674 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10675 operands[2], operands[3]);
10676 enum rtx_code rc = GET_CODE (operands[1]);
10678 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10679 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10680 if (mode == CCFPmode || mode == CCFPEmode)
10681 rc = reverse_condition_maybe_unordered (rc);
10683 rc = reverse_condition (rc);
10685 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10690 [(set (match_operand:SI 0 "s_register_operand" "")
10691 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10692 [(match_operand:SI 2 "s_register_operand" "")
10693 (match_operand:SI 3 "arm_add_operand" "")])
10694 (match_operand:SI 4 "arm_rhs_operand" "")
10696 (match_operand:SI 5 "s_register_operand" ""))))
10697 (clobber (reg:CC CC_REGNUM))]
10698 "TARGET_ARM && reload_completed"
10699 [(set (match_dup 6) (match_dup 7))
10700 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10701 (set (match_dup 0) (match_dup 4)))
10702 (cond_exec (match_dup 8)
10703 (set (match_dup 0) (not:SI (match_dup 5))))]
10706 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10707 operands[2], operands[3]);
10708 enum rtx_code rc = GET_CODE (operands[1]);
10710 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10711 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10712 if (mode == CCFPmode || mode == CCFPEmode)
10713 rc = reverse_condition_maybe_unordered (rc);
10715 rc = reverse_condition (rc);
10717 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10721 (define_insn "*cond_move_not"
10722 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10723 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10724 [(match_operand 3 "cc_register" "") (const_int 0)])
10725 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10727 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10731 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10732 [(set_attr "conds" "use")
10733 (set_attr "length" "4,8")]
10736 ;; The next two patterns occur when an AND operation is followed by a
10737 ;; scc insn sequence
10739 (define_insn "*sign_extract_onebit"
10740 [(set (match_operand:SI 0 "s_register_operand" "=r")
10741 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10743 (match_operand:SI 2 "const_int_operand" "n")))
10744 (clobber (reg:CC CC_REGNUM))]
10747 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10748 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10749 return \"mvnne\\t%0, #0\";
10751 [(set_attr "conds" "clob")
10752 (set_attr "length" "8")]
10755 (define_insn "*not_signextract_onebit"
10756 [(set (match_operand:SI 0 "s_register_operand" "=r")
10758 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10760 (match_operand:SI 2 "const_int_operand" "n"))))
10761 (clobber (reg:CC CC_REGNUM))]
10764 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10765 output_asm_insn (\"tst\\t%1, %2\", operands);
10766 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10767 return \"movne\\t%0, #0\";
10769 [(set_attr "conds" "clob")
10770 (set_attr "length" "12")]
10772 ;; ??? The above patterns need auditing for Thumb-2
10774 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10775 ;; expressions. For simplicity, the first register is also in the unspec
10777 (define_insn "*push_multi"
10778 [(match_parallel 2 "multi_register_push"
10779 [(set (match_operand:BLK 0 "memory_operand" "=m")
10780 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10781 UNSPEC_PUSH_MULT))])]
10785 int num_saves = XVECLEN (operands[2], 0);
10787 /* For the StrongARM at least it is faster to
10788 use STR to store only a single register.
10789 In Thumb mode always use push, and the assembler will pick
10790 something appropriate. */
10791 if (num_saves == 1 && TARGET_ARM)
10792 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10799 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10801 strcpy (pattern, \"push\\t{%1\");
10803 for (i = 1; i < num_saves; i++)
10805 strcat (pattern, \", %|\");
10807 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10810 strcat (pattern, \"}\");
10811 output_asm_insn (pattern, operands);
10816 [(set_attr "type" "store4")]
10819 (define_insn "stack_tie"
10820 [(set (mem:BLK (scratch))
10821 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10822 (match_operand:SI 1 "s_register_operand" "rk")]
10826 [(set_attr "length" "0")]
10829 ;; Similarly for the floating point registers
10830 (define_insn "*push_fp_multi"
10831 [(match_parallel 2 "multi_register_push"
10832 [(set (match_operand:BLK 0 "memory_operand" "=m")
10833 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10834 UNSPEC_PUSH_MULT))])]
10835 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10840 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10841 output_asm_insn (pattern, operands);
10844 [(set_attr "type" "f_store")]
10847 ;; Special patterns for dealing with the constant pool
10849 (define_insn "align_4"
10850 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10853 assemble_align (32);
10858 (define_insn "align_8"
10859 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10862 assemble_align (64);
10867 (define_insn "consttable_end"
10868 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10871 making_const_table = FALSE;
10876 (define_insn "consttable_1"
10877 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10880 making_const_table = TRUE;
10881 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10882 assemble_zeros (3);
10885 [(set_attr "length" "4")]
10888 (define_insn "consttable_2"
10889 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10892 making_const_table = TRUE;
10893 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10894 assemble_zeros (2);
10897 [(set_attr "length" "4")]
10900 (define_insn "consttable_4"
10901 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10905 making_const_table = TRUE;
10906 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10911 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10912 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10916 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10917 mark_symbol_refs_as_used (operands[0]);
10922 [(set_attr "length" "4")]
10925 (define_insn "consttable_8"
10926 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10930 making_const_table = TRUE;
10931 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10936 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10937 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10941 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10946 [(set_attr "length" "8")]
10949 (define_insn "consttable_16"
10950 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10954 making_const_table = TRUE;
10955 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10960 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10961 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10965 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10970 [(set_attr "length" "16")]
10973 ;; Miscellaneous Thumb patterns
10975 (define_expand "tablejump"
10976 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10977 (use (label_ref (match_operand 1 "" "")))])]
10982 /* Hopefully, CSE will eliminate this copy. */
10983 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10984 rtx reg2 = gen_reg_rtx (SImode);
10986 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10987 operands[0] = reg2;
10992 ;; NB never uses BX.
10993 (define_insn "*thumb1_tablejump"
10994 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10995 (use (label_ref (match_operand 1 "" "")))]
10998 [(set_attr "length" "2")]
11001 ;; V5 Instructions,
11003 (define_insn "clzsi2"
11004 [(set (match_operand:SI 0 "s_register_operand" "=r")
11005 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11006 "TARGET_32BIT && arm_arch5"
11008 [(set_attr "predicable" "yes")
11009 (set_attr "insn" "clz")])
11011 ;; V5E instructions.
11013 (define_insn "prefetch"
11014 [(prefetch (match_operand:SI 0 "address_operand" "p")
11015 (match_operand:SI 1 "" "")
11016 (match_operand:SI 2 "" ""))]
11017 "TARGET_32BIT && arm_arch5e"
11020 ;; General predication pattern
11023 [(match_operator 0 "arm_comparison_operator"
11024 [(match_operand 1 "cc_register" "")
11030 (define_insn "prologue_use"
11031 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11033 "%@ %0 needed for prologue"
11037 ;; Patterns for exception handling
11039 (define_expand "eh_return"
11040 [(use (match_operand 0 "general_operand" ""))]
11045 emit_insn (gen_arm_eh_return (operands[0]));
11047 emit_insn (gen_thumb_eh_return (operands[0]));
11052 ;; We can't expand this before we know where the link register is stored.
11053 (define_insn_and_split "arm_eh_return"
11054 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11056 (clobber (match_scratch:SI 1 "=&r"))]
11059 "&& reload_completed"
11063 arm_set_return_address (operands[0], operands[1]);
11068 (define_insn_and_split "thumb_eh_return"
11069 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11071 (clobber (match_scratch:SI 1 "=&l"))]
11074 "&& reload_completed"
11078 thumb_set_return_address (operands[0], operands[1]);
11086 (define_insn "load_tp_hard"
11087 [(set (match_operand:SI 0 "register_operand" "=r")
11088 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11090 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11091 [(set_attr "predicable" "yes")]
11094 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11095 (define_insn "load_tp_soft"
11096 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11097 (clobber (reg:SI LR_REGNUM))
11098 (clobber (reg:SI IP_REGNUM))
11099 (clobber (reg:CC CC_REGNUM))]
11101 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11102 [(set_attr "conds" "clob")]
11105 ;; Load the FPA co-processor patterns
11107 ;; Load the Maverick co-processor patterns
11108 (include "cirrus.md")
11109 ;; Vector bits common to IWMMXT and Neon
11110 (include "vec-common.md")
11111 ;; Load the Intel Wireless Multimedia Extension patterns
11112 (include "iwmmxt.md")
11113 ;; Load the VFP co-processor patterns
11115 ;; Thumb-2 patterns
11116 (include "thumb2.md")
11118 (include "neon.md")