1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 ;; and Martin Simmons (@harleqn.co.uk).
6 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 ;; This file is part of GCC.
10 ;; GCC is free software; you can redistribute it and/or modify it
11 ;; under the terms of the GNU General Public License as published
12 ;; by the Free Software Foundation; either version 2, or (at your
13 ;; option) any later version.
15 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
16 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 ;; License for more details.
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GCC; see the file COPYING. If not, write to
22 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
23 ;; Boston, MA 02110-1301, USA.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
56 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
57 ; operand 0 is the result,
58 ; operand 1 the parameter.
59 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
60 ; operand 0 is the result,
61 ; operand 1 the parameter.
62 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
63 ; operand 0 is the first register,
64 ; subsequent registers are in parallel (use ...)
66 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
67 ; usage, that is, we will add the pic_register
68 ; value to it before trying to dereference it.
69 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
70 ; GLOBAL_OFFSET_TABLE. The operation is fully
71 ; described by the RTL but must be wrapped to
72 ; prevent combine from trying to rip it apart.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 (UNSPEC_STACK_ALIGN 20) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 22) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
103 ;; UNSPEC_VOLATILE Usage:
106 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
108 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
109 ; instruction epilogue sequence that isn't expanded
110 ; into normal RTL. Used for both normal and sibcall
112 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
113 ; for inlined constants.
114 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
116 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
118 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
120 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
122 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
124 (VUNSPEC_TMRC 8) ; Used by the iWMMXt TMRC instruction.
125 (VUNSPEC_TMCR 9) ; Used by the iWMMXt TMCR instruction.
126 (VUNSPEC_ALIGN8 10) ; 8-byte alignment version of VUNSPEC_ALIGN
127 (VUNSPEC_WCMP_EQ 11) ; Used by the iWMMXt WCMPEQ instructions
128 (VUNSPEC_WCMP_GTU 12) ; Used by the iWMMXt WCMPGTU instructions
129 (VUNSPEC_WCMP_GT 13) ; Used by the iwMMXT WCMPGT instructions
130 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
135 ;;---------------------------------------------------------------------------
138 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
139 ; generating ARM code. This is used to control the length of some insn
140 ; patterns that share the same RTL in both ARM and Thumb code.
141 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
143 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
144 ; scheduling decisions for the load unit and the multiplier.
145 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
147 ; IS_XSCALE is set to 'yes' when compiling for XScale.
148 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
150 ;; Operand number of an input operand that is shifted. Zero if the
151 ;; given instruction does not shift one of its input operands.
152 (define_attr "shift" "" (const_int 0))
154 ; Floating Point Unit. If we only have floating point emulation, then there
155 ; is no point in scheduling the floating point insns. (Well, for best
156 ; performance we should try and group them together).
157 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
158 (const (symbol_ref "arm_fpu_attr")))
160 ; LENGTH of an instruction (in bytes)
161 (define_attr "length" "" (const_int 4))
163 ; POOL_RANGE is how far away from a constant pool entry that this insn
164 ; can be placed. If the distance is zero, then this insn will never
165 ; reference the pool.
166 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
167 ; before its address.
168 (define_attr "pool_range" "" (const_int 0))
169 (define_attr "neg_pool_range" "" (const_int 0))
171 ; An assembler sequence may clobber the condition codes without us knowing.
172 ; If such an insn references the pool, then we have no way of knowing how,
173 ; so use the most conservative value for pool_range.
174 (define_asm_attributes
175 [(set_attr "conds" "clob")
176 (set_attr "length" "4")
177 (set_attr "pool_range" "250")])
179 ;; The instruction used to implement a particular pattern. This
180 ;; information is used by pipeline descriptions to provide accurate
181 ;; scheduling information.
184 "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,other"
185 (const_string "other"))
187 ; TYPE attribute is used to detect floating point instructions which, if
188 ; running on a co-processor can run in parallel with other, basic instructions
189 ; If write-buffer scheduling is enabled then it can also be used in the
190 ; scheduling of writes.
192 ; Classification of each insn
193 ; alu any alu instruction that doesn't hit memory or fp
194 ; regs or have a shifted source operand
195 ; alu_shift any data instruction that doesn't hit memory or fp
196 ; regs, but has a source operand shifted by a constant
197 ; alu_shift_reg any data instruction that doesn't hit memory or fp
198 ; regs, but has a source operand shifted by a register value
199 ; mult a multiply instruction
200 ; block blockage insn, this blocks all functional units
201 ; float a floating point arithmetic operation (subject to expansion)
202 ; fdivd DFmode floating point division
203 ; fdivs SFmode floating point division
204 ; fmul Floating point multiply
205 ; ffmul Fast floating point multiply
206 ; farith Floating point arithmetic (4 cycle)
207 ; ffarith Fast floating point arithmetic (2 cycle)
208 ; float_em a floating point arithmetic operation that is normally emulated
209 ; even on a machine with an fpa.
210 ; f_load a floating point load from memory
211 ; f_store a floating point store to memory
212 ; f_load[sd] single/double load from memory
213 ; f_store[sd] single/double store to memory
214 ; f_flag a transfer of co-processor flags to the CPSR
215 ; f_mem_r a transfer of a floating point register to a real reg via mem
216 ; r_mem_f the reverse of f_mem_r
217 ; f_2_r fast transfer float to arm (no memory needed)
218 ; r_2_f fast transfer arm to float
219 ; f_cvt convert floating<->integral
221 ; call a subroutine call
222 ; load_byte load byte(s) from memory to arm registers
223 ; load1 load 1 word from memory to arm registers
224 ; load2 load 2 words from memory to arm registers
225 ; load3 load 3 words from memory to arm registers
226 ; load4 load 4 words from memory to arm registers
227 ; store store 1 word to memory from arm registers
228 ; store2 store 2 words
229 ; store3 store 3 words
230 ; store4 store 4 (or more) words
231 ; Additions for Cirrus Maverick co-processor:
232 ; mav_farith Floating point arithmetic (4 cycle)
233 ; mav_dmult Double multiplies (7 cycle)
236 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
238 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
239 (const_string "mult")
240 (const_string "alu")))
242 ; Load scheduling, set from the arm_ld_sched variable
243 ; initialized by arm_override_options()
244 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
246 ; condition codes: this one is used by final_prescan_insn to speed up
247 ; conditionalizing instructions. It saves having to scan the rtl to see if
248 ; it uses or alters the condition codes.
250 ; USE means that the condition codes are used by the insn in the process of
251 ; outputting code, this means (at present) that we can't use the insn in
254 ; SET means that the purpose of the insn is to set the condition codes in a
255 ; well defined manner.
257 ; CLOB means that the condition codes are altered in an undefined manner, if
258 ; they are altered at all
260 ; JUMP_CLOB is used when the condition cannot be represented by a single
261 ; instruction (UNEQ and LTGT). These cannot be predicated.
263 ; NOCOND means that the condition codes are neither altered nor affect the
264 ; output of this insn
266 (define_attr "conds" "use,set,clob,jump_clob,nocond"
267 (if_then_else (eq_attr "type" "call")
268 (const_string "clob")
269 (const_string "nocond")))
271 ; Predicable means that the insn can be conditionally executed based on
272 ; an automatically added predicate (additional patterns are generated by
273 ; gen...). We default to 'no' because no Thumb patterns match this rule
274 ; and not all ARM patterns do.
275 (define_attr "predicable" "no,yes" (const_string "no"))
277 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
278 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
279 ; suffer blockages enough to warrant modelling this (and it can adversely
280 ; affect the schedule).
281 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
283 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
284 ; to stall the processor. Used with model_wbuf above.
285 (define_attr "write_conflict" "no,yes"
286 (if_then_else (eq_attr "type"
287 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
289 (const_string "no")))
291 ; Classify the insns into those that take one cycle and those that take more
292 ; than one on the main cpu execution unit.
293 (define_attr "core_cycles" "single,multi"
294 (if_then_else (eq_attr "type"
295 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
296 (const_string "single")
297 (const_string "multi")))
299 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
300 ;; distant label. Only applicable to Thumb code.
301 (define_attr "far_jump" "yes,no" (const_string "no"))
304 ;; The number of machine instructions this pattern expands to.
305 ;; Used for Thumb-2 conditional execution.
306 (define_attr "ce_count" "" (const_int 1))
308 ;;---------------------------------------------------------------------------
311 ; A list of modes that are exactly 64 bits in size. We use this to expand
312 ; some splits that are the same for all modes when operating on ARM
314 (define_mode_macro ANY64 [DI DF V8QI V4HI V2SI V2SF])
316 ;;---------------------------------------------------------------------------
319 (include "predicates.md")
320 (include "constraints.md")
322 ;;---------------------------------------------------------------------------
323 ;; Pipeline descriptions
325 ;; Processor type. This is created automatically from arm-cores.def.
326 (include "arm-tune.md")
328 ;; True if the generic scheduling description should be used.
330 (define_attr "generic_sched" "yes,no"
332 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs")
334 (const_string "yes"))))
336 (define_attr "generic_vfp" "yes,no"
338 (and (eq_attr "fpu" "vfp")
339 (eq_attr "tune" "!arm1020e,arm1022e"))
341 (const_string "no"))))
343 (include "arm-generic.md")
344 (include "arm926ejs.md")
345 (include "arm1020e.md")
346 (include "arm1026ejs.md")
347 (include "arm1136jfs.md")
350 ;;---------------------------------------------------------------------------
355 ;; Note: For DImode insns, there is normally no reason why operands should
356 ;; not be in the same register, what we don't want is for something being
357 ;; written to partially overlap something that is an input.
358 ;; Cirrus 64bit additions should not be split because we have a native
359 ;; 64bit addition instructions.
361 (define_expand "adddi3"
363 [(set (match_operand:DI 0 "s_register_operand" "")
364 (plus:DI (match_operand:DI 1 "s_register_operand" "")
365 (match_operand:DI 2 "s_register_operand" "")))
366 (clobber (reg:CC CC_REGNUM))])]
369 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
371 if (!cirrus_fp_register (operands[0], DImode))
372 operands[0] = force_reg (DImode, operands[0]);
373 if (!cirrus_fp_register (operands[1], DImode))
374 operands[1] = force_reg (DImode, operands[1]);
375 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
381 if (GET_CODE (operands[1]) != REG)
382 operands[1] = force_reg (SImode, operands[1]);
383 if (GET_CODE (operands[2]) != REG)
384 operands[2] = force_reg (SImode, operands[2]);
389 (define_insn "*thumb1_adddi3"
390 [(set (match_operand:DI 0 "register_operand" "=l")
391 (plus:DI (match_operand:DI 1 "register_operand" "%0")
392 (match_operand:DI 2 "register_operand" "l")))
393 (clobber (reg:CC CC_REGNUM))
396 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
397 [(set_attr "length" "4")]
400 (define_insn_and_split "*arm_adddi3"
401 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
402 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
403 (match_operand:DI 2 "s_register_operand" "r, 0")))
404 (clobber (reg:CC CC_REGNUM))]
405 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
407 "TARGET_32BIT && reload_completed"
408 [(parallel [(set (reg:CC_C CC_REGNUM)
409 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
411 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
412 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
413 (plus:SI (match_dup 4) (match_dup 5))))]
416 operands[3] = gen_highpart (SImode, operands[0]);
417 operands[0] = gen_lowpart (SImode, operands[0]);
418 operands[4] = gen_highpart (SImode, operands[1]);
419 operands[1] = gen_lowpart (SImode, operands[1]);
420 operands[5] = gen_highpart (SImode, operands[2]);
421 operands[2] = gen_lowpart (SImode, operands[2]);
423 [(set_attr "conds" "clob")
424 (set_attr "length" "8")]
427 (define_insn_and_split "*adddi_sesidi_di"
428 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
429 (plus:DI (sign_extend:DI
430 (match_operand:SI 2 "s_register_operand" "r,r"))
431 (match_operand:DI 1 "s_register_operand" "r,0")))
432 (clobber (reg:CC CC_REGNUM))]
433 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
435 "TARGET_32BIT && reload_completed"
436 [(parallel [(set (reg:CC_C CC_REGNUM)
437 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
439 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
440 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
441 (plus:SI (ashiftrt:SI (match_dup 2)
446 operands[3] = gen_highpart (SImode, operands[0]);
447 operands[0] = gen_lowpart (SImode, operands[0]);
448 operands[4] = gen_highpart (SImode, operands[1]);
449 operands[1] = gen_lowpart (SImode, operands[1]);
450 operands[2] = gen_lowpart (SImode, operands[2]);
452 [(set_attr "conds" "clob")
453 (set_attr "length" "8")]
456 (define_insn_and_split "*adddi_zesidi_di"
457 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
458 (plus:DI (zero_extend:DI
459 (match_operand:SI 2 "s_register_operand" "r,r"))
460 (match_operand:DI 1 "s_register_operand" "r,0")))
461 (clobber (reg:CC CC_REGNUM))]
462 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
464 "TARGET_32BIT && reload_completed"
465 [(parallel [(set (reg:CC_C CC_REGNUM)
466 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
468 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
469 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
470 (plus:SI (match_dup 4) (const_int 0))))]
473 operands[3] = gen_highpart (SImode, operands[0]);
474 operands[0] = gen_lowpart (SImode, operands[0]);
475 operands[4] = gen_highpart (SImode, operands[1]);
476 operands[1] = gen_lowpart (SImode, operands[1]);
477 operands[2] = gen_lowpart (SImode, operands[2]);
479 [(set_attr "conds" "clob")
480 (set_attr "length" "8")]
483 (define_expand "addsi3"
484 [(set (match_operand:SI 0 "s_register_operand" "")
485 (plus:SI (match_operand:SI 1 "s_register_operand" "")
486 (match_operand:SI 2 "reg_or_int_operand" "")))]
489 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
491 arm_split_constant (PLUS, SImode, NULL_RTX,
492 INTVAL (operands[2]), operands[0], operands[1],
493 optimize && can_create_pseudo_p ());
499 ; If there is a scratch available, this will be faster than synthesizing the
502 [(match_scratch:SI 3 "r")
503 (set (match_operand:SI 0 "arm_general_register_operand" "")
504 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
505 (match_operand:SI 2 "const_int_operand" "")))]
507 !(const_ok_for_arm (INTVAL (operands[2]))
508 || const_ok_for_arm (-INTVAL (operands[2])))
509 && const_ok_for_arm (~INTVAL (operands[2]))"
510 [(set (match_dup 3) (match_dup 2))
511 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
515 (define_insn_and_split "*arm_addsi3"
516 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
517 (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
518 (match_operand:SI 2 "reg_or_int_operand" "rI,L,?n")))]
525 GET_CODE (operands[2]) == CONST_INT
526 && !(const_ok_for_arm (INTVAL (operands[2]))
527 || const_ok_for_arm (-INTVAL (operands[2])))"
528 [(clobber (const_int 0))]
530 arm_split_constant (PLUS, SImode, curr_insn,
531 INTVAL (operands[2]), operands[0],
535 [(set_attr "length" "4,4,16")
536 (set_attr "predicable" "yes")]
539 ;; Register group 'k' is a single register group containing only the stack
540 ;; register. Trying to reload it will always fail catastrophically,
541 ;; so never allow those alternatives to match if reloading is needed.
543 (define_insn "*thumb1_addsi3"
544 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*r,*h,l,!k")
545 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
546 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,*r,!M,!O")))]
549 static const char * const asms[] =
551 \"add\\t%0, %0, %2\",
552 \"sub\\t%0, %0, #%n2\",
553 \"add\\t%0, %1, %2\",
554 \"add\\t%0, %0, %2\",
555 \"add\\t%0, %0, %2\",
556 \"add\\t%0, %1, %2\",
559 if ((which_alternative == 2 || which_alternative == 6)
560 && GET_CODE (operands[2]) == CONST_INT
561 && INTVAL (operands[2]) < 0)
562 return \"sub\\t%0, %1, #%n2\";
563 return asms[which_alternative];
565 [(set_attr "length" "2")]
568 ;; Reloading and elimination of the frame pointer can
569 ;; sometimes cause this optimization to be missed.
571 [(set (match_operand:SI 0 "arm_general_register_operand" "")
572 (match_operand:SI 1 "const_int_operand" ""))
574 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
576 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
577 && (INTVAL (operands[1]) & 3) == 0"
578 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
582 ;; ??? Make Thumb-2 variants which prefer low regs
583 (define_insn "*addsi3_compare0"
584 [(set (reg:CC_NOOV CC_REGNUM)
586 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
587 (match_operand:SI 2 "arm_add_operand" "rI,L"))
589 (set (match_operand:SI 0 "s_register_operand" "=r,r")
590 (plus:SI (match_dup 1) (match_dup 2)))]
594 sub%.\\t%0, %1, #%n2"
595 [(set_attr "conds" "set")]
598 (define_insn "*addsi3_compare0_scratch"
599 [(set (reg:CC_NOOV CC_REGNUM)
601 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
602 (match_operand:SI 1 "arm_add_operand" "rI,L"))
608 [(set_attr "conds" "set")]
611 (define_insn "*compare_negsi_si"
612 [(set (reg:CC_Z CC_REGNUM)
614 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
615 (match_operand:SI 1 "s_register_operand" "r")))]
618 [(set_attr "conds" "set")]
621 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
622 ;; addend is a constant.
623 (define_insn "*cmpsi2_addneg"
624 [(set (reg:CC CC_REGNUM)
626 (match_operand:SI 1 "s_register_operand" "r,r")
627 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
628 (set (match_operand:SI 0 "s_register_operand" "=r,r")
629 (plus:SI (match_dup 1)
630 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
631 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
634 add%.\\t%0, %1, #%n2"
635 [(set_attr "conds" "set")]
638 ;; Convert the sequence
640 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
644 ;; bcs dest ((unsigned)rn >= 1)
645 ;; similarly for the beq variant using bcc.
646 ;; This is a common looping idiom (while (n--))
648 [(set (match_operand:SI 0 "arm_general_register_operand" "")
649 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
651 (set (match_operand 2 "cc_register" "")
652 (compare (match_dup 0) (const_int -1)))
654 (if_then_else (match_operator 3 "equality_operator"
655 [(match_dup 2) (const_int 0)])
656 (match_operand 4 "" "")
657 (match_operand 5 "" "")))]
658 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
662 (match_dup 1) (const_int 1)))
663 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
665 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
668 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
669 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
672 operands[2], const0_rtx);"
675 ;; The next four insns work because they compare the result with one of
676 ;; the operands, and we know that the use of the condition code is
677 ;; either GEU or LTU, so we can use the carry flag from the addition
678 ;; instead of doing the compare a second time.
679 (define_insn "*addsi3_compare_op1"
680 [(set (reg:CC_C CC_REGNUM)
682 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
683 (match_operand:SI 2 "arm_add_operand" "rI,L"))
685 (set (match_operand:SI 0 "s_register_operand" "=r,r")
686 (plus:SI (match_dup 1) (match_dup 2)))]
690 sub%.\\t%0, %1, #%n2"
691 [(set_attr "conds" "set")]
694 (define_insn "*addsi3_compare_op2"
695 [(set (reg:CC_C CC_REGNUM)
697 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
698 (match_operand:SI 2 "arm_add_operand" "rI,L"))
700 (set (match_operand:SI 0 "s_register_operand" "=r,r")
701 (plus:SI (match_dup 1) (match_dup 2)))]
705 sub%.\\t%0, %1, #%n2"
706 [(set_attr "conds" "set")]
709 (define_insn "*compare_addsi2_op0"
710 [(set (reg:CC_C CC_REGNUM)
712 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
713 (match_operand:SI 1 "arm_add_operand" "rI,L"))
719 [(set_attr "conds" "set")]
722 (define_insn "*compare_addsi2_op1"
723 [(set (reg:CC_C CC_REGNUM)
725 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
726 (match_operand:SI 1 "arm_add_operand" "rI,L"))
732 [(set_attr "conds" "set")]
735 (define_insn "*addsi3_carryin"
736 [(set (match_operand:SI 0 "s_register_operand" "=r")
737 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
738 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
739 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
742 [(set_attr "conds" "use")]
745 (define_insn "*addsi3_carryin_shift"
746 [(set (match_operand:SI 0 "s_register_operand" "=r")
747 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
749 (match_operator:SI 2 "shift_operator"
750 [(match_operand:SI 3 "s_register_operand" "r")
751 (match_operand:SI 4 "reg_or_int_operand" "rM")])
752 (match_operand:SI 1 "s_register_operand" "r"))))]
754 "adc%?\\t%0, %1, %3%S2"
755 [(set_attr "conds" "use")
756 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
757 (const_string "alu_shift")
758 (const_string "alu_shift_reg")))]
761 (define_insn "*addsi3_carryin_alt1"
762 [(set (match_operand:SI 0 "s_register_operand" "=r")
763 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
764 (match_operand:SI 2 "arm_rhs_operand" "rI"))
765 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
768 [(set_attr "conds" "use")]
771 (define_insn "*addsi3_carryin_alt2"
772 [(set (match_operand:SI 0 "s_register_operand" "=r")
773 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
774 (match_operand:SI 1 "s_register_operand" "r"))
775 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
778 [(set_attr "conds" "use")]
781 (define_insn "*addsi3_carryin_alt3"
782 [(set (match_operand:SI 0 "s_register_operand" "=r")
783 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
784 (match_operand:SI 2 "arm_rhs_operand" "rI"))
785 (match_operand:SI 1 "s_register_operand" "r")))]
788 [(set_attr "conds" "use")]
791 (define_expand "incscc"
792 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
793 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
794 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
795 (match_operand:SI 1 "s_register_operand" "0,?r")))]
800 (define_insn "*arm_incscc"
801 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
803 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
804 (match_operand:SI 1 "s_register_operand" "0,?r")))]
808 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
809 [(set_attr "conds" "use")
810 (set_attr "length" "4,8")]
813 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
815 [(set (match_operand:SI 0 "s_register_operand" "")
816 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
817 (match_operand:SI 2 "s_register_operand" ""))
819 (clobber (match_operand:SI 3 "s_register_operand" ""))]
821 [(set (match_dup 3) (match_dup 1))
822 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
824 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
827 (define_expand "addsf3"
828 [(set (match_operand:SF 0 "s_register_operand" "")
829 (plus:SF (match_operand:SF 1 "s_register_operand" "")
830 (match_operand:SF 2 "arm_float_add_operand" "")))]
831 "TARGET_32BIT && TARGET_HARD_FLOAT"
834 && !cirrus_fp_register (operands[2], SFmode))
835 operands[2] = force_reg (SFmode, operands[2]);
838 (define_expand "adddf3"
839 [(set (match_operand:DF 0 "s_register_operand" "")
840 (plus:DF (match_operand:DF 1 "s_register_operand" "")
841 (match_operand:DF 2 "arm_float_add_operand" "")))]
842 "TARGET_32BIT && TARGET_HARD_FLOAT"
845 && !cirrus_fp_register (operands[2], DFmode))
846 operands[2] = force_reg (DFmode, operands[2]);
849 (define_expand "subdi3"
851 [(set (match_operand:DI 0 "s_register_operand" "")
852 (minus:DI (match_operand:DI 1 "s_register_operand" "")
853 (match_operand:DI 2 "s_register_operand" "")))
854 (clobber (reg:CC CC_REGNUM))])]
857 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
859 && cirrus_fp_register (operands[0], DImode)
860 && cirrus_fp_register (operands[1], DImode))
862 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
868 if (GET_CODE (operands[1]) != REG)
869 operands[1] = force_reg (SImode, operands[1]);
870 if (GET_CODE (operands[2]) != REG)
871 operands[2] = force_reg (SImode, operands[2]);
876 (define_insn "*arm_subdi3"
877 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
878 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
879 (match_operand:DI 2 "s_register_operand" "r,0,0")))
880 (clobber (reg:CC CC_REGNUM))]
882 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
883 [(set_attr "conds" "clob")
884 (set_attr "length" "8")]
887 (define_insn "*thumb_subdi3"
888 [(set (match_operand:DI 0 "register_operand" "=l")
889 (minus:DI (match_operand:DI 1 "register_operand" "0")
890 (match_operand:DI 2 "register_operand" "l")))
891 (clobber (reg:CC CC_REGNUM))]
893 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
894 [(set_attr "length" "4")]
897 (define_insn "*subdi_di_zesidi"
898 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
899 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
901 (match_operand:SI 2 "s_register_operand" "r,r"))))
902 (clobber (reg:CC CC_REGNUM))]
904 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
905 [(set_attr "conds" "clob")
906 (set_attr "length" "8")]
909 (define_insn "*subdi_di_sesidi"
910 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
911 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
913 (match_operand:SI 2 "s_register_operand" "r,r"))))
914 (clobber (reg:CC CC_REGNUM))]
916 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
917 [(set_attr "conds" "clob")
918 (set_attr "length" "8")]
921 (define_insn "*subdi_zesidi_di"
922 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
923 (minus:DI (zero_extend:DI
924 (match_operand:SI 2 "s_register_operand" "r,r"))
925 (match_operand:DI 1 "s_register_operand" "?r,0")))
926 (clobber (reg:CC CC_REGNUM))]
928 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
929 [(set_attr "conds" "clob")
930 (set_attr "length" "8")]
933 (define_insn "*subdi_sesidi_di"
934 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
935 (minus:DI (sign_extend:DI
936 (match_operand:SI 2 "s_register_operand" "r,r"))
937 (match_operand:DI 1 "s_register_operand" "?r,0")))
938 (clobber (reg:CC CC_REGNUM))]
940 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
941 [(set_attr "conds" "clob")
942 (set_attr "length" "8")]
945 (define_insn "*subdi_zesidi_zesidi"
946 [(set (match_operand:DI 0 "s_register_operand" "=r")
947 (minus:DI (zero_extend:DI
948 (match_operand:SI 1 "s_register_operand" "r"))
950 (match_operand:SI 2 "s_register_operand" "r"))))
951 (clobber (reg:CC CC_REGNUM))]
953 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
954 [(set_attr "conds" "clob")
955 (set_attr "length" "8")]
958 (define_expand "subsi3"
959 [(set (match_operand:SI 0 "s_register_operand" "")
960 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
961 (match_operand:SI 2 "s_register_operand" "")))]
964 if (GET_CODE (operands[1]) == CONST_INT)
968 arm_split_constant (MINUS, SImode, NULL_RTX,
969 INTVAL (operands[1]), operands[0],
970 operands[2], optimize && can_create_pseudo_p ());
973 else /* TARGET_THUMB1 */
974 operands[1] = force_reg (SImode, operands[1]);
979 (define_insn "*thumb1_subsi3_insn"
980 [(set (match_operand:SI 0 "register_operand" "=l")
981 (minus:SI (match_operand:SI 1 "register_operand" "l")
982 (match_operand:SI 2 "register_operand" "l")))]
985 [(set_attr "length" "2")]
988 ; ??? Check Thumb-2 split length
989 (define_insn_and_split "*arm_subsi3_insn"
990 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
991 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,?n")
992 (match_operand:SI 2 "s_register_operand" "r,r")))]
998 && GET_CODE (operands[1]) == CONST_INT
999 && !const_ok_for_arm (INTVAL (operands[1]))"
1000 [(clobber (const_int 0))]
1002 arm_split_constant (MINUS, SImode, curr_insn,
1003 INTVAL (operands[1]), operands[0], operands[2], 0);
1006 [(set_attr "length" "4,16")
1007 (set_attr "predicable" "yes")]
1011 [(match_scratch:SI 3 "r")
1012 (set (match_operand:SI 0 "arm_general_register_operand" "")
1013 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1014 (match_operand:SI 2 "arm_general_register_operand" "")))]
1016 && !const_ok_for_arm (INTVAL (operands[1]))
1017 && const_ok_for_arm (~INTVAL (operands[1]))"
1018 [(set (match_dup 3) (match_dup 1))
1019 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1023 (define_insn "*subsi3_compare0"
1024 [(set (reg:CC_NOOV CC_REGNUM)
1026 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1027 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1029 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1030 (minus:SI (match_dup 1) (match_dup 2)))]
1035 [(set_attr "conds" "set")]
1038 (define_expand "decscc"
1039 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1040 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1041 (match_operator:SI 2 "arm_comparison_operator"
1042 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1047 (define_insn "*arm_decscc"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1049 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1050 (match_operator:SI 2 "arm_comparison_operator"
1051 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1055 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1056 [(set_attr "conds" "use")
1057 (set_attr "length" "*,8")]
1060 (define_expand "subsf3"
1061 [(set (match_operand:SF 0 "s_register_operand" "")
1062 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1063 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1064 "TARGET_32BIT && TARGET_HARD_FLOAT"
1066 if (TARGET_MAVERICK)
1068 if (!cirrus_fp_register (operands[1], SFmode))
1069 operands[1] = force_reg (SFmode, operands[1]);
1070 if (!cirrus_fp_register (operands[2], SFmode))
1071 operands[2] = force_reg (SFmode, operands[2]);
1075 (define_expand "subdf3"
1076 [(set (match_operand:DF 0 "s_register_operand" "")
1077 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1078 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1079 "TARGET_32BIT && TARGET_HARD_FLOAT"
1081 if (TARGET_MAVERICK)
1083 if (!cirrus_fp_register (operands[1], DFmode))
1084 operands[1] = force_reg (DFmode, operands[1]);
1085 if (!cirrus_fp_register (operands[2], DFmode))
1086 operands[2] = force_reg (DFmode, operands[2]);
1091 ;; Multiplication insns
1093 (define_expand "mulsi3"
1094 [(set (match_operand:SI 0 "s_register_operand" "")
1095 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1096 (match_operand:SI 1 "s_register_operand" "")))]
1101 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1102 (define_insn "*arm_mulsi3"
1103 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1104 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1105 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1106 "TARGET_32BIT && !arm_arch6"
1107 "mul%?\\t%0, %2, %1"
1108 [(set_attr "insn" "mul")
1109 (set_attr "predicable" "yes")]
1112 (define_insn "*arm_mulsi3_v6"
1113 [(set (match_operand:SI 0 "s_register_operand" "=r")
1114 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1115 (match_operand:SI 2 "s_register_operand" "r")))]
1116 "TARGET_32BIT && arm_arch6"
1117 "mul%?\\t%0, %1, %2"
1118 [(set_attr "insn" "mul")
1119 (set_attr "predicable" "yes")]
1122 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1123 ; 1 and 2; are the same, because reload will make operand 0 match
1124 ; operand 1 without realizing that this conflicts with operand 2. We fix
1125 ; this by adding another alternative to match this case, and then `reload'
1126 ; it ourselves. This alternative must come first.
1127 (define_insn "*thumb_mulsi3"
1128 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1129 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1130 (match_operand:SI 2 "register_operand" "l,l,l")))]
1131 "TARGET_THUMB1 && !arm_arch6"
1133 if (which_alternative < 2)
1134 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1136 return \"mul\\t%0, %2\";
1138 [(set_attr "length" "4,4,2")
1139 (set_attr "insn" "mul")]
1142 (define_insn "*thumb_mulsi3_v6"
1143 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1144 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1145 (match_operand:SI 2 "register_operand" "l,0,0")))]
1146 "TARGET_THUMB1 && arm_arch6"
1151 [(set_attr "length" "2")
1152 (set_attr "insn" "mul")]
1155 (define_insn "*mulsi3_compare0"
1156 [(set (reg:CC_NOOV CC_REGNUM)
1157 (compare:CC_NOOV (mult:SI
1158 (match_operand:SI 2 "s_register_operand" "r,r")
1159 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1161 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1162 (mult:SI (match_dup 2) (match_dup 1)))]
1163 "TARGET_ARM && !arm_arch6"
1164 "mul%.\\t%0, %2, %1"
1165 [(set_attr "conds" "set")
1166 (set_attr "insn" "muls")]
1169 (define_insn "*mulsi3_compare0_v6"
1170 [(set (reg:CC_NOOV CC_REGNUM)
1171 (compare:CC_NOOV (mult:SI
1172 (match_operand:SI 2 "s_register_operand" "r")
1173 (match_operand:SI 1 "s_register_operand" "r"))
1175 (set (match_operand:SI 0 "s_register_operand" "=r")
1176 (mult:SI (match_dup 2) (match_dup 1)))]
1177 "TARGET_ARM && arm_arch6 && optimize_size"
1178 "mul%.\\t%0, %2, %1"
1179 [(set_attr "conds" "set")
1180 (set_attr "insn" "muls")]
1183 (define_insn "*mulsi_compare0_scratch"
1184 [(set (reg:CC_NOOV CC_REGNUM)
1185 (compare:CC_NOOV (mult:SI
1186 (match_operand:SI 2 "s_register_operand" "r,r")
1187 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1189 (clobber (match_scratch:SI 0 "=&r,&r"))]
1190 "TARGET_ARM && !arm_arch6"
1191 "mul%.\\t%0, %2, %1"
1192 [(set_attr "conds" "set")
1193 (set_attr "insn" "muls")]
1196 (define_insn "*mulsi_compare0_scratch_v6"
1197 [(set (reg:CC_NOOV CC_REGNUM)
1198 (compare:CC_NOOV (mult:SI
1199 (match_operand:SI 2 "s_register_operand" "r")
1200 (match_operand:SI 1 "s_register_operand" "r"))
1202 (clobber (match_scratch:SI 0 "=r"))]
1203 "TARGET_ARM && arm_arch6 && optimize_size"
1204 "mul%.\\t%0, %2, %1"
1205 [(set_attr "conds" "set")
1206 (set_attr "insn" "muls")]
1209 ;; Unnamed templates to match MLA instruction.
1211 (define_insn "*mulsi3addsi"
1212 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1214 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1215 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1216 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1217 "TARGET_32BIT && !arm_arch6"
1218 "mla%?\\t%0, %2, %1, %3"
1219 [(set_attr "insn" "mla")
1220 (set_attr "predicable" "yes")]
1223 (define_insn "*mulsi3addsi_v6"
1224 [(set (match_operand:SI 0 "s_register_operand" "=r")
1226 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1227 (match_operand:SI 1 "s_register_operand" "r"))
1228 (match_operand:SI 3 "s_register_operand" "r")))]
1229 "TARGET_32BIT && arm_arch6"
1230 "mla%?\\t%0, %2, %1, %3"
1231 [(set_attr "insn" "mla")
1232 (set_attr "predicable" "yes")]
1235 (define_insn "*mulsi3addsi_compare0"
1236 [(set (reg:CC_NOOV CC_REGNUM)
1239 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1240 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1241 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1243 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1244 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1246 "TARGET_ARM && arm_arch6"
1247 "mla%.\\t%0, %2, %1, %3"
1248 [(set_attr "conds" "set")
1249 (set_attr "insn" "mlas")]
1252 (define_insn "*mulsi3addsi_compare0_v6"
1253 [(set (reg:CC_NOOV CC_REGNUM)
1256 (match_operand:SI 2 "s_register_operand" "r")
1257 (match_operand:SI 1 "s_register_operand" "r"))
1258 (match_operand:SI 3 "s_register_operand" "r"))
1260 (set (match_operand:SI 0 "s_register_operand" "=r")
1261 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1263 "TARGET_ARM && arm_arch6 && optimize_size"
1264 "mla%.\\t%0, %2, %1, %3"
1265 [(set_attr "conds" "set")
1266 (set_attr "insn" "mlas")]
1269 (define_insn "*mulsi3addsi_compare0_scratch"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1273 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1274 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1275 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1277 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1278 "TARGET_ARM && !arm_arch6"
1279 "mla%.\\t%0, %2, %1, %3"
1280 [(set_attr "conds" "set")
1281 (set_attr "insn" "mlas")]
1284 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1285 [(set (reg:CC_NOOV CC_REGNUM)
1288 (match_operand:SI 2 "s_register_operand" "r")
1289 (match_operand:SI 1 "s_register_operand" "r"))
1290 (match_operand:SI 3 "s_register_operand" "r"))
1292 (clobber (match_scratch:SI 0 "=r"))]
1293 "TARGET_ARM && arm_arch6 && optimize_size"
1294 "mla%.\\t%0, %2, %1, %3"
1295 [(set_attr "conds" "set")
1296 (set_attr "insn" "mlas")]
1299 (define_insn "*mulsi3subsi"
1300 [(set (match_operand:SI 0 "s_register_operand" "=r")
1302 (match_operand:SI 3 "s_register_operand" "r")
1303 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1304 (match_operand:SI 1 "s_register_operand" "r"))))]
1305 "TARGET_32BIT && arm_arch_thumb2"
1306 "mls%?\\t%0, %2, %1, %3"
1307 [(set_attr "insn" "mla")
1308 (set_attr "predicable" "yes")]
1311 ;; Unnamed template to match long long multiply-accumulate (smlal)
1313 (define_insn "*mulsidi3adddi"
1314 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1317 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1318 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1319 (match_operand:DI 1 "s_register_operand" "0")))]
1320 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1321 "smlal%?\\t%Q0, %R0, %3, %2"
1322 [(set_attr "insn" "smlal")
1323 (set_attr "predicable" "yes")]
1326 (define_insn "*mulsidi3adddi_v6"
1327 [(set (match_operand:DI 0 "s_register_operand" "=r")
1330 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1331 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1332 (match_operand:DI 1 "s_register_operand" "0")))]
1333 "TARGET_32BIT && arm_arch6"
1334 "smlal%?\\t%Q0, %R0, %3, %2"
1335 [(set_attr "insn" "smlal")
1336 (set_attr "predicable" "yes")]
1339 (define_insn "mulsidi3"
1340 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1342 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1343 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1344 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1345 "smull%?\\t%Q0, %R0, %1, %2"
1346 [(set_attr "insn" "smull")
1347 (set_attr "predicable" "yes")]
1350 (define_insn "mulsidi3_v6"
1351 [(set (match_operand:DI 0 "s_register_operand" "=r")
1353 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1354 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1355 "TARGET_32BIT && arm_arch6"
1356 "smull%?\\t%Q0, %R0, %1, %2"
1357 [(set_attr "insn" "smull")
1358 (set_attr "predicable" "yes")]
1361 (define_insn "umulsidi3"
1362 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1364 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1365 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1366 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1367 "umull%?\\t%Q0, %R0, %1, %2"
1368 [(set_attr "insn" "umull")
1369 (set_attr "predicable" "yes")]
1372 (define_insn "umulsidi3_v6"
1373 [(set (match_operand:DI 0 "s_register_operand" "=r")
1375 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1376 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1377 "TARGET_32BIT && arm_arch6"
1378 "umull%?\\t%Q0, %R0, %1, %2"
1379 [(set_attr "insn" "umull")
1380 (set_attr "predicable" "yes")]
1383 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1385 (define_insn "*umulsidi3adddi"
1386 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1389 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1390 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1391 (match_operand:DI 1 "s_register_operand" "0")))]
1392 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1393 "umlal%?\\t%Q0, %R0, %3, %2"
1394 [(set_attr "insn" "umlal")
1395 (set_attr "predicable" "yes")]
1398 (define_insn "*umulsidi3adddi_v6"
1399 [(set (match_operand:DI 0 "s_register_operand" "=r")
1402 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1403 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1404 (match_operand:DI 1 "s_register_operand" "0")))]
1405 "TARGET_32BIT && arm_arch6"
1406 "umlal%?\\t%Q0, %R0, %3, %2"
1407 [(set_attr "insn" "umlal")
1408 (set_attr "predicable" "yes")]
1411 (define_insn "smulsi3_highpart"
1412 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1416 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1417 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1419 (clobber (match_scratch:SI 3 "=&r,&r"))]
1420 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1421 "smull%?\\t%3, %0, %2, %1"
1422 [(set_attr "insn" "smull")
1423 (set_attr "predicable" "yes")]
1426 (define_insn "smulsi3_highpart_v6"
1427 [(set (match_operand:SI 0 "s_register_operand" "=r")
1431 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1432 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1434 (clobber (match_scratch:SI 3 "=r"))]
1435 "TARGET_32BIT && arm_arch6"
1436 "smull%?\\t%3, %0, %2, %1"
1437 [(set_attr "insn" "smull")
1438 (set_attr "predicable" "yes")]
1441 (define_insn "umulsi3_highpart"
1442 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1446 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1447 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1449 (clobber (match_scratch:SI 3 "=&r,&r"))]
1450 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1451 "umull%?\\t%3, %0, %2, %1"
1452 [(set_attr "insn" "umull")
1453 (set_attr "predicable" "yes")]
1456 (define_insn "umulsi3_highpart_v6"
1457 [(set (match_operand:SI 0 "s_register_operand" "=r")
1461 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1462 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1464 (clobber (match_scratch:SI 3 "=r"))]
1465 "TARGET_32BIT && arm_arch6"
1466 "umull%?\\t%3, %0, %2, %1"
1467 [(set_attr "insn" "umull")
1468 (set_attr "predicable" "yes")]
1471 (define_insn "mulhisi3"
1472 [(set (match_operand:SI 0 "s_register_operand" "=r")
1473 (mult:SI (sign_extend:SI
1474 (match_operand:HI 1 "s_register_operand" "%r"))
1476 (match_operand:HI 2 "s_register_operand" "r"))))]
1477 "TARGET_DSP_MULTIPLY"
1478 "smulbb%?\\t%0, %1, %2"
1479 [(set_attr "insn" "smulxy")
1480 (set_attr "predicable" "yes")]
1483 (define_insn "*mulhisi3tb"
1484 [(set (match_operand:SI 0 "s_register_operand" "=r")
1485 (mult:SI (ashiftrt:SI
1486 (match_operand:SI 1 "s_register_operand" "r")
1489 (match_operand:HI 2 "s_register_operand" "r"))))]
1490 "TARGET_DSP_MULTIPLY"
1491 "smultb%?\\t%0, %1, %2"
1492 [(set_attr "insn" "smulxy")
1493 (set_attr "predicable" "yes")]
1496 (define_insn "*mulhisi3bt"
1497 [(set (match_operand:SI 0 "s_register_operand" "=r")
1498 (mult:SI (sign_extend:SI
1499 (match_operand:HI 1 "s_register_operand" "r"))
1501 (match_operand:SI 2 "s_register_operand" "r")
1503 "TARGET_DSP_MULTIPLY"
1504 "smulbt%?\\t%0, %1, %2"
1505 [(set_attr "insn" "smulxy")
1506 (set_attr "predicable" "yes")]
1509 (define_insn "*mulhisi3tt"
1510 [(set (match_operand:SI 0 "s_register_operand" "=r")
1511 (mult:SI (ashiftrt:SI
1512 (match_operand:SI 1 "s_register_operand" "r")
1515 (match_operand:SI 2 "s_register_operand" "r")
1517 "TARGET_DSP_MULTIPLY"
1518 "smultt%?\\t%0, %1, %2"
1519 [(set_attr "insn" "smulxy")
1520 (set_attr "predicable" "yes")]
1523 (define_insn "*mulhisi3addsi"
1524 [(set (match_operand:SI 0 "s_register_operand" "=r")
1525 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1526 (mult:SI (sign_extend:SI
1527 (match_operand:HI 2 "s_register_operand" "%r"))
1529 (match_operand:HI 3 "s_register_operand" "r")))))]
1530 "TARGET_DSP_MULTIPLY"
1531 "smlabb%?\\t%0, %2, %3, %1"
1532 [(set_attr "insn" "smlaxy")
1533 (set_attr "predicable" "yes")]
1536 (define_insn "*mulhidi3adddi"
1537 [(set (match_operand:DI 0 "s_register_operand" "=r")
1539 (match_operand:DI 1 "s_register_operand" "0")
1540 (mult:DI (sign_extend:DI
1541 (match_operand:HI 2 "s_register_operand" "%r"))
1543 (match_operand:HI 3 "s_register_operand" "r")))))]
1544 "TARGET_DSP_MULTIPLY"
1545 "smlalbb%?\\t%Q0, %R0, %2, %3"
1546 [(set_attr "insn" "smlalxy")
1547 (set_attr "predicable" "yes")])
1549 (define_expand "mulsf3"
1550 [(set (match_operand:SF 0 "s_register_operand" "")
1551 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1552 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1553 "TARGET_32BIT && TARGET_HARD_FLOAT"
1556 && !cirrus_fp_register (operands[2], SFmode))
1557 operands[2] = force_reg (SFmode, operands[2]);
1560 (define_expand "muldf3"
1561 [(set (match_operand:DF 0 "s_register_operand" "")
1562 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1563 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1564 "TARGET_32BIT && TARGET_HARD_FLOAT"
1567 && !cirrus_fp_register (operands[2], DFmode))
1568 operands[2] = force_reg (DFmode, operands[2]);
1573 (define_expand "divsf3"
1574 [(set (match_operand:SF 0 "s_register_operand" "")
1575 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1576 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1577 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1580 (define_expand "divdf3"
1581 [(set (match_operand:DF 0 "s_register_operand" "")
1582 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1583 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1584 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1589 (define_expand "modsf3"
1590 [(set (match_operand:SF 0 "s_register_operand" "")
1591 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1592 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1593 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1596 (define_expand "moddf3"
1597 [(set (match_operand:DF 0 "s_register_operand" "")
1598 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1599 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1600 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1603 ;; Boolean and,ior,xor insns
1605 ;; Split up double word logical operations
1607 ;; Split up simple DImode logical operations. Simply perform the logical
1608 ;; operation on the upper and lower halves of the registers.
1610 [(set (match_operand:DI 0 "s_register_operand" "")
1611 (match_operator:DI 6 "logical_binary_operator"
1612 [(match_operand:DI 1 "s_register_operand" "")
1613 (match_operand:DI 2 "s_register_operand" "")]))]
1614 "TARGET_32BIT && reload_completed
1615 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1616 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1617 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1620 operands[3] = gen_highpart (SImode, operands[0]);
1621 operands[0] = gen_lowpart (SImode, operands[0]);
1622 operands[4] = gen_highpart (SImode, operands[1]);
1623 operands[1] = gen_lowpart (SImode, operands[1]);
1624 operands[5] = gen_highpart (SImode, operands[2]);
1625 operands[2] = gen_lowpart (SImode, operands[2]);
1630 [(set (match_operand:DI 0 "s_register_operand" "")
1631 (match_operator:DI 6 "logical_binary_operator"
1632 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1633 (match_operand:DI 1 "s_register_operand" "")]))]
1634 "TARGET_32BIT && reload_completed"
1635 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1636 (set (match_dup 3) (match_op_dup:SI 6
1637 [(ashiftrt:SI (match_dup 2) (const_int 31))
1641 operands[3] = gen_highpart (SImode, operands[0]);
1642 operands[0] = gen_lowpart (SImode, operands[0]);
1643 operands[4] = gen_highpart (SImode, operands[1]);
1644 operands[1] = gen_lowpart (SImode, operands[1]);
1645 operands[5] = gen_highpart (SImode, operands[2]);
1646 operands[2] = gen_lowpart (SImode, operands[2]);
1650 ;; The zero extend of operand 2 means we can just copy the high part of
1651 ;; operand1 into operand0.
1653 [(set (match_operand:DI 0 "s_register_operand" "")
1655 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1656 (match_operand:DI 1 "s_register_operand" "")))]
1657 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1658 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1659 (set (match_dup 3) (match_dup 4))]
1662 operands[4] = gen_highpart (SImode, operands[1]);
1663 operands[3] = gen_highpart (SImode, operands[0]);
1664 operands[0] = gen_lowpart (SImode, operands[0]);
1665 operands[1] = gen_lowpart (SImode, operands[1]);
1669 ;; The zero extend of operand 2 means we can just copy the high part of
1670 ;; operand1 into operand0.
1672 [(set (match_operand:DI 0 "s_register_operand" "")
1674 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1675 (match_operand:DI 1 "s_register_operand" "")))]
1676 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1677 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1678 (set (match_dup 3) (match_dup 4))]
1681 operands[4] = gen_highpart (SImode, operands[1]);
1682 operands[3] = gen_highpart (SImode, operands[0]);
1683 operands[0] = gen_lowpart (SImode, operands[0]);
1684 operands[1] = gen_lowpart (SImode, operands[1]);
1688 (define_insn "anddi3"
1689 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1690 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1691 (match_operand:DI 2 "s_register_operand" "r,r")))]
1692 "TARGET_32BIT && ! TARGET_IWMMXT"
1694 [(set_attr "length" "8")]
1697 (define_insn_and_split "*anddi_zesidi_di"
1698 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1699 (and:DI (zero_extend:DI
1700 (match_operand:SI 2 "s_register_operand" "r,r"))
1701 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1704 "TARGET_32BIT && reload_completed"
1705 ; The zero extend of operand 2 clears the high word of the output
1707 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1708 (set (match_dup 3) (const_int 0))]
1711 operands[3] = gen_highpart (SImode, operands[0]);
1712 operands[0] = gen_lowpart (SImode, operands[0]);
1713 operands[1] = gen_lowpart (SImode, operands[1]);
1715 [(set_attr "length" "8")]
1718 (define_insn "*anddi_sesdi_di"
1719 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1720 (and:DI (sign_extend:DI
1721 (match_operand:SI 2 "s_register_operand" "r,r"))
1722 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1725 [(set_attr "length" "8")]
1728 (define_expand "andsi3"
1729 [(set (match_operand:SI 0 "s_register_operand" "")
1730 (and:SI (match_operand:SI 1 "s_register_operand" "")
1731 (match_operand:SI 2 "reg_or_int_operand" "")))]
1736 if (GET_CODE (operands[2]) == CONST_INT)
1738 arm_split_constant (AND, SImode, NULL_RTX,
1739 INTVAL (operands[2]), operands[0],
1740 operands[1], optimize && can_create_pseudo_p ());
1745 else /* TARGET_THUMB1 */
1747 if (GET_CODE (operands[2]) != CONST_INT)
1748 operands[2] = force_reg (SImode, operands[2]);
1753 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1755 operands[2] = force_reg (SImode,
1756 GEN_INT (~INTVAL (operands[2])));
1758 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1763 for (i = 9; i <= 31; i++)
1765 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1767 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1771 else if ((((HOST_WIDE_INT) 1) << i) - 1
1772 == ~INTVAL (operands[2]))
1774 rtx shift = GEN_INT (i);
1775 rtx reg = gen_reg_rtx (SImode);
1777 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1778 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1784 operands[2] = force_reg (SImode, operands[2]);
1790 ; ??? Check split length for Thumb-2
1791 (define_insn_and_split "*arm_andsi3_insn"
1792 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1793 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1794 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1798 bic%?\\t%0, %1, #%B2
1801 && GET_CODE (operands[2]) == CONST_INT
1802 && !(const_ok_for_arm (INTVAL (operands[2]))
1803 || const_ok_for_arm (~INTVAL (operands[2])))"
1804 [(clobber (const_int 0))]
1806 arm_split_constant (AND, SImode, curr_insn,
1807 INTVAL (operands[2]), operands[0], operands[1], 0);
1810 [(set_attr "length" "4,4,16")
1811 (set_attr "predicable" "yes")]
1814 (define_insn "*thumb1_andsi3_insn"
1815 [(set (match_operand:SI 0 "register_operand" "=l")
1816 (and:SI (match_operand:SI 1 "register_operand" "%0")
1817 (match_operand:SI 2 "register_operand" "l")))]
1820 [(set_attr "length" "2")]
1823 (define_insn "*andsi3_compare0"
1824 [(set (reg:CC_NOOV CC_REGNUM)
1826 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1827 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1829 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1830 (and:SI (match_dup 1) (match_dup 2)))]
1834 bic%.\\t%0, %1, #%B2"
1835 [(set_attr "conds" "set")]
1838 (define_insn "*andsi3_compare0_scratch"
1839 [(set (reg:CC_NOOV CC_REGNUM)
1841 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1842 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1844 (clobber (match_scratch:SI 2 "=X,r"))]
1848 bic%.\\t%2, %0, #%B1"
1849 [(set_attr "conds" "set")]
1852 (define_insn "*zeroextractsi_compare0_scratch"
1853 [(set (reg:CC_NOOV CC_REGNUM)
1854 (compare:CC_NOOV (zero_extract:SI
1855 (match_operand:SI 0 "s_register_operand" "r")
1856 (match_operand 1 "const_int_operand" "n")
1857 (match_operand 2 "const_int_operand" "n"))
1860 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1861 && INTVAL (operands[1]) > 0
1862 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1863 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1865 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1866 << INTVAL (operands[2]));
1867 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1870 [(set_attr "conds" "set")]
1873 (define_insn_and_split "*ne_zeroextractsi"
1874 [(set (match_operand:SI 0 "s_register_operand" "=r")
1875 (ne:SI (zero_extract:SI
1876 (match_operand:SI 1 "s_register_operand" "r")
1877 (match_operand:SI 2 "const_int_operand" "n")
1878 (match_operand:SI 3 "const_int_operand" "n"))
1880 (clobber (reg:CC CC_REGNUM))]
1882 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1883 && INTVAL (operands[2]) > 0
1884 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1885 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1888 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1889 && INTVAL (operands[2]) > 0
1890 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1891 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1892 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1893 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1895 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1897 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1898 (match_dup 0) (const_int 1)))]
1900 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1901 << INTVAL (operands[3]));
1903 [(set_attr "conds" "clob")
1904 (set (attr "length")
1905 (if_then_else (eq_attr "is_thumb" "yes")
1910 (define_insn_and_split "*ne_zeroextractsi_shifted"
1911 [(set (match_operand:SI 0 "s_register_operand" "=r")
1912 (ne:SI (zero_extract:SI
1913 (match_operand:SI 1 "s_register_operand" "r")
1914 (match_operand:SI 2 "const_int_operand" "n")
1917 (clobber (reg:CC CC_REGNUM))]
1921 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1922 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1924 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1926 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1927 (match_dup 0) (const_int 1)))]
1929 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1931 [(set_attr "conds" "clob")
1932 (set_attr "length" "8")]
1935 (define_insn_and_split "*ite_ne_zeroextractsi"
1936 [(set (match_operand:SI 0 "s_register_operand" "=r")
1937 (if_then_else:SI (ne (zero_extract:SI
1938 (match_operand:SI 1 "s_register_operand" "r")
1939 (match_operand:SI 2 "const_int_operand" "n")
1940 (match_operand:SI 3 "const_int_operand" "n"))
1942 (match_operand:SI 4 "arm_not_operand" "rIK")
1944 (clobber (reg:CC CC_REGNUM))]
1946 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1947 && INTVAL (operands[2]) > 0
1948 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1949 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1950 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1953 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1954 && INTVAL (operands[2]) > 0
1955 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1956 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
1957 && !reg_overlap_mentioned_p (operands[0], operands[4])"
1958 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1959 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1961 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1963 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1964 (match_dup 0) (match_dup 4)))]
1966 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1967 << INTVAL (operands[3]));
1969 [(set_attr "conds" "clob")
1970 (set_attr "length" "8")]
1973 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
1974 [(set (match_operand:SI 0 "s_register_operand" "=r")
1975 (if_then_else:SI (ne (zero_extract:SI
1976 (match_operand:SI 1 "s_register_operand" "r")
1977 (match_operand:SI 2 "const_int_operand" "n")
1980 (match_operand:SI 3 "arm_not_operand" "rIK")
1982 (clobber (reg:CC CC_REGNUM))]
1983 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1985 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
1986 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1987 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1989 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1991 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1992 (match_dup 0) (match_dup 3)))]
1994 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1996 [(set_attr "conds" "clob")
1997 (set_attr "length" "8")]
2001 [(set (match_operand:SI 0 "s_register_operand" "")
2002 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2003 (match_operand:SI 2 "const_int_operand" "")
2004 (match_operand:SI 3 "const_int_operand" "")))
2005 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2007 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2008 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2010 HOST_WIDE_INT temp = INTVAL (operands[2]);
2012 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2013 operands[3] = GEN_INT (32 - temp);
2017 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2019 [(set (match_operand:SI 0 "s_register_operand" "")
2020 (match_operator:SI 1 "shiftable_operator"
2021 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2022 (match_operand:SI 3 "const_int_operand" "")
2023 (match_operand:SI 4 "const_int_operand" ""))
2024 (match_operand:SI 5 "s_register_operand" "")]))
2025 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2027 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2030 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2033 HOST_WIDE_INT temp = INTVAL (operands[3]);
2035 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2036 operands[4] = GEN_INT (32 - temp);
2041 [(set (match_operand:SI 0 "s_register_operand" "")
2042 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2043 (match_operand:SI 2 "const_int_operand" "")
2044 (match_operand:SI 3 "const_int_operand" "")))]
2046 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2047 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2049 HOST_WIDE_INT temp = INTVAL (operands[2]);
2051 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2052 operands[3] = GEN_INT (32 - temp);
2057 [(set (match_operand:SI 0 "s_register_operand" "")
2058 (match_operator:SI 1 "shiftable_operator"
2059 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2060 (match_operand:SI 3 "const_int_operand" "")
2061 (match_operand:SI 4 "const_int_operand" ""))
2062 (match_operand:SI 5 "s_register_operand" "")]))
2063 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2065 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2068 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2071 HOST_WIDE_INT temp = INTVAL (operands[3]);
2073 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2074 operands[4] = GEN_INT (32 - temp);
2078 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2079 ;;; represented by the bitfield, then this will produce incorrect results.
2080 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2081 ;;; which have a real bit-field insert instruction, the truncation happens
2082 ;;; in the bit-field insert instruction itself. Since arm does not have a
2083 ;;; bit-field insert instruction, we would have to emit code here to truncate
2084 ;;; the value before we insert. This loses some of the advantage of having
2085 ;;; this insv pattern, so this pattern needs to be reevalutated.
2087 ; ??? Use Thumb-2 bitfield insert/extract instructions
2088 (define_expand "insv"
2089 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2090 (match_operand:SI 1 "general_operand" "")
2091 (match_operand:SI 2 "general_operand" ""))
2092 (match_operand:SI 3 "reg_or_int_operand" ""))]
2096 int start_bit = INTVAL (operands[2]);
2097 int width = INTVAL (operands[1]);
2098 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2099 rtx target, subtarget;
2101 target = operands[0];
2102 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2103 subreg as the final target. */
2104 if (GET_CODE (target) == SUBREG)
2106 subtarget = gen_reg_rtx (SImode);
2107 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2108 < GET_MODE_SIZE (SImode))
2109 target = SUBREG_REG (target);
2114 if (GET_CODE (operands[3]) == CONST_INT)
2116 /* Since we are inserting a known constant, we may be able to
2117 reduce the number of bits that we have to clear so that
2118 the mask becomes simple. */
2119 /* ??? This code does not check to see if the new mask is actually
2120 simpler. It may not be. */
2121 rtx op1 = gen_reg_rtx (SImode);
2122 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2123 start of this pattern. */
2124 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2125 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2127 emit_insn (gen_andsi3 (op1, operands[0],
2128 gen_int_mode (~mask2, SImode)));
2129 emit_insn (gen_iorsi3 (subtarget, op1,
2130 gen_int_mode (op3_value << start_bit, SImode)));
2132 else if (start_bit == 0
2133 && !(const_ok_for_arm (mask)
2134 || const_ok_for_arm (~mask)))
2136 /* A Trick, since we are setting the bottom bits in the word,
2137 we can shift operand[3] up, operand[0] down, OR them together
2138 and rotate the result back again. This takes 3 insns, and
2139 the third might be mergeable into another op. */
2140 /* The shift up copes with the possibility that operand[3] is
2141 wider than the bitfield. */
2142 rtx op0 = gen_reg_rtx (SImode);
2143 rtx op1 = gen_reg_rtx (SImode);
2145 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2146 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2147 emit_insn (gen_iorsi3 (op1, op1, op0));
2148 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2150 else if ((width + start_bit == 32)
2151 && !(const_ok_for_arm (mask)
2152 || const_ok_for_arm (~mask)))
2154 /* Similar trick, but slightly less efficient. */
2156 rtx op0 = gen_reg_rtx (SImode);
2157 rtx op1 = gen_reg_rtx (SImode);
2159 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2160 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2161 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2162 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2166 rtx op0 = gen_int_mode (mask, SImode);
2167 rtx op1 = gen_reg_rtx (SImode);
2168 rtx op2 = gen_reg_rtx (SImode);
2170 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2172 rtx tmp = gen_reg_rtx (SImode);
2174 emit_insn (gen_movsi (tmp, op0));
2178 /* Mask out any bits in operand[3] that are not needed. */
2179 emit_insn (gen_andsi3 (op1, operands[3], op0));
2181 if (GET_CODE (op0) == CONST_INT
2182 && (const_ok_for_arm (mask << start_bit)
2183 || const_ok_for_arm (~(mask << start_bit))))
2185 op0 = gen_int_mode (~(mask << start_bit), SImode);
2186 emit_insn (gen_andsi3 (op2, operands[0], op0));
2190 if (GET_CODE (op0) == CONST_INT)
2192 rtx tmp = gen_reg_rtx (SImode);
2194 emit_insn (gen_movsi (tmp, op0));
2199 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2201 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2205 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2207 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2210 if (subtarget != target)
2212 /* If TARGET is still a SUBREG, then it must be wider than a word,
2213 so we must be careful only to set the subword we were asked to. */
2214 if (GET_CODE (target) == SUBREG)
2215 emit_move_insn (target, subtarget);
2217 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2224 ; constants for op 2 will never be given to these patterns.
2225 (define_insn_and_split "*anddi_notdi_di"
2226 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2227 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2228 (match_operand:DI 2 "s_register_operand" "0,r")))]
2231 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2232 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2233 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2236 operands[3] = gen_highpart (SImode, operands[0]);
2237 operands[0] = gen_lowpart (SImode, operands[0]);
2238 operands[4] = gen_highpart (SImode, operands[1]);
2239 operands[1] = gen_lowpart (SImode, operands[1]);
2240 operands[5] = gen_highpart (SImode, operands[2]);
2241 operands[2] = gen_lowpart (SImode, operands[2]);
2243 [(set_attr "length" "8")
2244 (set_attr "predicable" "yes")]
2247 (define_insn_and_split "*anddi_notzesidi_di"
2248 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2249 (and:DI (not:DI (zero_extend:DI
2250 (match_operand:SI 2 "s_register_operand" "r,r")))
2251 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2254 bic%?\\t%Q0, %Q1, %2
2256 ; (not (zero_extend ...)) allows us to just copy the high word from
2257 ; operand1 to operand0.
2260 && operands[0] != operands[1]"
2261 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2262 (set (match_dup 3) (match_dup 4))]
2265 operands[3] = gen_highpart (SImode, operands[0]);
2266 operands[0] = gen_lowpart (SImode, operands[0]);
2267 operands[4] = gen_highpart (SImode, operands[1]);
2268 operands[1] = gen_lowpart (SImode, operands[1]);
2270 [(set_attr "length" "4,8")
2271 (set_attr "predicable" "yes")]
2274 (define_insn_and_split "*anddi_notsesidi_di"
2275 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2276 (and:DI (not:DI (sign_extend:DI
2277 (match_operand:SI 2 "s_register_operand" "r,r")))
2278 (match_operand:DI 1 "s_register_operand" "0,r")))]
2281 "TARGET_32BIT && reload_completed"
2282 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2283 (set (match_dup 3) (and:SI (not:SI
2284 (ashiftrt:SI (match_dup 2) (const_int 31)))
2288 operands[3] = gen_highpart (SImode, operands[0]);
2289 operands[0] = gen_lowpart (SImode, operands[0]);
2290 operands[4] = gen_highpart (SImode, operands[1]);
2291 operands[1] = gen_lowpart (SImode, operands[1]);
2293 [(set_attr "length" "8")
2294 (set_attr "predicable" "yes")]
2297 (define_insn "andsi_notsi_si"
2298 [(set (match_operand:SI 0 "s_register_operand" "=r")
2299 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2300 (match_operand:SI 1 "s_register_operand" "r")))]
2302 "bic%?\\t%0, %1, %2"
2303 [(set_attr "predicable" "yes")]
2306 (define_insn "bicsi3"
2307 [(set (match_operand:SI 0 "register_operand" "=l")
2308 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2309 (match_operand:SI 2 "register_operand" "0")))]
2312 [(set_attr "length" "2")]
2315 (define_insn "andsi_not_shiftsi_si"
2316 [(set (match_operand:SI 0 "s_register_operand" "=r")
2317 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2318 [(match_operand:SI 2 "s_register_operand" "r")
2319 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2320 (match_operand:SI 1 "s_register_operand" "r")))]
2322 "bic%?\\t%0, %1, %2%S4"
2323 [(set_attr "predicable" "yes")
2324 (set_attr "shift" "2")
2325 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2326 (const_string "alu_shift")
2327 (const_string "alu_shift_reg")))]
2330 (define_insn "*andsi_notsi_si_compare0"
2331 [(set (reg:CC_NOOV CC_REGNUM)
2333 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2334 (match_operand:SI 1 "s_register_operand" "r"))
2336 (set (match_operand:SI 0 "s_register_operand" "=r")
2337 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2339 "bic%.\\t%0, %1, %2"
2340 [(set_attr "conds" "set")]
2343 (define_insn "*andsi_notsi_si_compare0_scratch"
2344 [(set (reg:CC_NOOV CC_REGNUM)
2346 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2347 (match_operand:SI 1 "s_register_operand" "r"))
2349 (clobber (match_scratch:SI 0 "=r"))]
2351 "bic%.\\t%0, %1, %2"
2352 [(set_attr "conds" "set")]
2355 (define_insn "iordi3"
2356 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2357 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2358 (match_operand:DI 2 "s_register_operand" "r,r")))]
2359 "TARGET_32BIT && ! TARGET_IWMMXT"
2361 [(set_attr "length" "8")
2362 (set_attr "predicable" "yes")]
2365 (define_insn "*iordi_zesidi_di"
2366 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2367 (ior:DI (zero_extend:DI
2368 (match_operand:SI 2 "s_register_operand" "r,r"))
2369 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2372 orr%?\\t%Q0, %Q1, %2
2374 [(set_attr "length" "4,8")
2375 (set_attr "predicable" "yes")]
2378 (define_insn "*iordi_sesidi_di"
2379 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2380 (ior:DI (sign_extend:DI
2381 (match_operand:SI 2 "s_register_operand" "r,r"))
2382 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2385 [(set_attr "length" "8")
2386 (set_attr "predicable" "yes")]
2389 (define_expand "iorsi3"
2390 [(set (match_operand:SI 0 "s_register_operand" "")
2391 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2392 (match_operand:SI 2 "reg_or_int_operand" "")))]
2395 if (GET_CODE (operands[2]) == CONST_INT)
2399 arm_split_constant (IOR, SImode, NULL_RTX,
2400 INTVAL (operands[2]), operands[0], operands[1],
2401 optimize && can_create_pseudo_p ());
2404 else /* TARGET_THUMB1 */
2405 operands [2] = force_reg (SImode, operands [2]);
2410 (define_insn_and_split "*arm_iorsi3"
2411 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2412 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2413 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2419 && GET_CODE (operands[2]) == CONST_INT
2420 && !const_ok_for_arm (INTVAL (operands[2]))"
2421 [(clobber (const_int 0))]
2423 arm_split_constant (IOR, SImode, curr_insn,
2424 INTVAL (operands[2]), operands[0], operands[1], 0);
2427 [(set_attr "length" "4,16")
2428 (set_attr "predicable" "yes")]
2431 (define_insn "*thumb1_iorsi3"
2432 [(set (match_operand:SI 0 "register_operand" "=l")
2433 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2434 (match_operand:SI 2 "register_operand" "l")))]
2437 [(set_attr "length" "2")]
2441 [(match_scratch:SI 3 "r")
2442 (set (match_operand:SI 0 "arm_general_register_operand" "")
2443 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2444 (match_operand:SI 2 "const_int_operand" "")))]
2446 && !const_ok_for_arm (INTVAL (operands[2]))
2447 && const_ok_for_arm (~INTVAL (operands[2]))"
2448 [(set (match_dup 3) (match_dup 2))
2449 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2453 (define_insn "*iorsi3_compare0"
2454 [(set (reg:CC_NOOV CC_REGNUM)
2455 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2456 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2458 (set (match_operand:SI 0 "s_register_operand" "=r")
2459 (ior:SI (match_dup 1) (match_dup 2)))]
2461 "orr%.\\t%0, %1, %2"
2462 [(set_attr "conds" "set")]
2465 (define_insn "*iorsi3_compare0_scratch"
2466 [(set (reg:CC_NOOV CC_REGNUM)
2467 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2468 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2470 (clobber (match_scratch:SI 0 "=r"))]
2472 "orr%.\\t%0, %1, %2"
2473 [(set_attr "conds" "set")]
2476 (define_insn "xordi3"
2477 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2478 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2479 (match_operand:DI 2 "s_register_operand" "r,r")))]
2480 "TARGET_32BIT && !TARGET_IWMMXT"
2482 [(set_attr "length" "8")
2483 (set_attr "predicable" "yes")]
2486 (define_insn "*xordi_zesidi_di"
2487 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2488 (xor:DI (zero_extend:DI
2489 (match_operand:SI 2 "s_register_operand" "r,r"))
2490 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2493 eor%?\\t%Q0, %Q1, %2
2495 [(set_attr "length" "4,8")
2496 (set_attr "predicable" "yes")]
2499 (define_insn "*xordi_sesidi_di"
2500 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2501 (xor:DI (sign_extend:DI
2502 (match_operand:SI 2 "s_register_operand" "r,r"))
2503 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2506 [(set_attr "length" "8")
2507 (set_attr "predicable" "yes")]
2510 (define_expand "xorsi3"
2511 [(set (match_operand:SI 0 "s_register_operand" "")
2512 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2513 (match_operand:SI 2 "arm_rhs_operand" "")))]
2516 if (GET_CODE (operands[2]) == CONST_INT)
2517 operands[2] = force_reg (SImode, operands[2]);
2521 (define_insn "*arm_xorsi3"
2522 [(set (match_operand:SI 0 "s_register_operand" "=r")
2523 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2524 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2526 "eor%?\\t%0, %1, %2"
2527 [(set_attr "predicable" "yes")]
2530 (define_insn "*thumb1_xorsi3"
2531 [(set (match_operand:SI 0 "register_operand" "=l")
2532 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2533 (match_operand:SI 2 "register_operand" "l")))]
2536 [(set_attr "length" "2")]
2539 (define_insn "*xorsi3_compare0"
2540 [(set (reg:CC_NOOV CC_REGNUM)
2541 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2542 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2544 (set (match_operand:SI 0 "s_register_operand" "=r")
2545 (xor:SI (match_dup 1) (match_dup 2)))]
2547 "eor%.\\t%0, %1, %2"
2548 [(set_attr "conds" "set")]
2551 (define_insn "*xorsi3_compare0_scratch"
2552 [(set (reg:CC_NOOV CC_REGNUM)
2553 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2554 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2558 [(set_attr "conds" "set")]
2561 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2562 ; (NOT D) we can sometimes merge the final NOT into one of the following
2566 [(set (match_operand:SI 0 "s_register_operand" "")
2567 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2568 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2569 (match_operand:SI 3 "arm_rhs_operand" "")))
2570 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2572 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2573 (not:SI (match_dup 3))))
2574 (set (match_dup 0) (not:SI (match_dup 4)))]
2578 (define_insn "*andsi_iorsi3_notsi"
2579 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2580 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2581 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2582 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2584 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2585 [(set_attr "length" "8")
2586 (set_attr "ce_count" "2")
2587 (set_attr "predicable" "yes")]
2590 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2591 ; insns are available?
2593 [(set (match_operand:SI 0 "s_register_operand" "")
2594 (match_operator:SI 1 "logical_binary_operator"
2595 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2596 (match_operand:SI 3 "const_int_operand" "")
2597 (match_operand:SI 4 "const_int_operand" ""))
2598 (match_operator:SI 9 "logical_binary_operator"
2599 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2600 (match_operand:SI 6 "const_int_operand" ""))
2601 (match_operand:SI 7 "s_register_operand" "")])]))
2602 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2604 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2605 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2608 [(ashift:SI (match_dup 2) (match_dup 4))
2612 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2615 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2619 [(set (match_operand:SI 0 "s_register_operand" "")
2620 (match_operator:SI 1 "logical_binary_operator"
2621 [(match_operator:SI 9 "logical_binary_operator"
2622 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2623 (match_operand:SI 6 "const_int_operand" ""))
2624 (match_operand:SI 7 "s_register_operand" "")])
2625 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2626 (match_operand:SI 3 "const_int_operand" "")
2627 (match_operand:SI 4 "const_int_operand" ""))]))
2628 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2630 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2631 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2634 [(ashift:SI (match_dup 2) (match_dup 4))
2638 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2641 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2645 [(set (match_operand:SI 0 "s_register_operand" "")
2646 (match_operator:SI 1 "logical_binary_operator"
2647 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2648 (match_operand:SI 3 "const_int_operand" "")
2649 (match_operand:SI 4 "const_int_operand" ""))
2650 (match_operator:SI 9 "logical_binary_operator"
2651 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2652 (match_operand:SI 6 "const_int_operand" ""))
2653 (match_operand:SI 7 "s_register_operand" "")])]))
2654 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2656 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2657 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2660 [(ashift:SI (match_dup 2) (match_dup 4))
2664 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2667 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2671 [(set (match_operand:SI 0 "s_register_operand" "")
2672 (match_operator:SI 1 "logical_binary_operator"
2673 [(match_operator:SI 9 "logical_binary_operator"
2674 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2675 (match_operand:SI 6 "const_int_operand" ""))
2676 (match_operand:SI 7 "s_register_operand" "")])
2677 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2678 (match_operand:SI 3 "const_int_operand" "")
2679 (match_operand:SI 4 "const_int_operand" ""))]))
2680 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2682 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2683 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2686 [(ashift:SI (match_dup 2) (match_dup 4))
2690 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2693 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2697 ;; Minimum and maximum insns
2699 (define_expand "smaxsi3"
2701 (set (match_operand:SI 0 "s_register_operand" "")
2702 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2703 (match_operand:SI 2 "arm_rhs_operand" "")))
2704 (clobber (reg:CC CC_REGNUM))])]
2707 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2709 /* No need for a clobber of the condition code register here. */
2710 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2711 gen_rtx_SMAX (SImode, operands[1],
2717 (define_insn "*smax_0"
2718 [(set (match_operand:SI 0 "s_register_operand" "=r")
2719 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2722 "bic%?\\t%0, %1, %1, asr #31"
2723 [(set_attr "predicable" "yes")]
2726 (define_insn "*smax_m1"
2727 [(set (match_operand:SI 0 "s_register_operand" "=r")
2728 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2731 "orr%?\\t%0, %1, %1, asr #31"
2732 [(set_attr "predicable" "yes")]
2735 (define_insn "*arm_smax_insn"
2736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2737 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2738 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2739 (clobber (reg:CC CC_REGNUM))]
2742 cmp\\t%1, %2\;movlt\\t%0, %2
2743 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2744 [(set_attr "conds" "clob")
2745 (set_attr "length" "8,12")]
2748 (define_expand "sminsi3"
2750 (set (match_operand:SI 0 "s_register_operand" "")
2751 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2752 (match_operand:SI 2 "arm_rhs_operand" "")))
2753 (clobber (reg:CC CC_REGNUM))])]
2756 if (operands[2] == const0_rtx)
2758 /* No need for a clobber of the condition code register here. */
2759 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2760 gen_rtx_SMIN (SImode, operands[1],
2766 (define_insn "*smin_0"
2767 [(set (match_operand:SI 0 "s_register_operand" "=r")
2768 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2771 "and%?\\t%0, %1, %1, asr #31"
2772 [(set_attr "predicable" "yes")]
2775 (define_insn "*arm_smin_insn"
2776 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2777 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2778 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2779 (clobber (reg:CC CC_REGNUM))]
2782 cmp\\t%1, %2\;movge\\t%0, %2
2783 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2784 [(set_attr "conds" "clob")
2785 (set_attr "length" "8,12")]
2788 (define_expand "umaxsi3"
2790 (set (match_operand:SI 0 "s_register_operand" "")
2791 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2792 (match_operand:SI 2 "arm_rhs_operand" "")))
2793 (clobber (reg:CC CC_REGNUM))])]
2798 (define_insn "*arm_umaxsi3"
2799 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2800 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2801 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2802 (clobber (reg:CC CC_REGNUM))]
2805 cmp\\t%1, %2\;movcc\\t%0, %2
2806 cmp\\t%1, %2\;movcs\\t%0, %1
2807 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2808 [(set_attr "conds" "clob")
2809 (set_attr "length" "8,8,12")]
2812 (define_expand "uminsi3"
2814 (set (match_operand:SI 0 "s_register_operand" "")
2815 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2816 (match_operand:SI 2 "arm_rhs_operand" "")))
2817 (clobber (reg:CC CC_REGNUM))])]
2822 (define_insn "*arm_uminsi3"
2823 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2824 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2825 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2826 (clobber (reg:CC CC_REGNUM))]
2829 cmp\\t%1, %2\;movcs\\t%0, %2
2830 cmp\\t%1, %2\;movcc\\t%0, %1
2831 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2832 [(set_attr "conds" "clob")
2833 (set_attr "length" "8,8,12")]
2836 (define_insn "*store_minmaxsi"
2837 [(set (match_operand:SI 0 "memory_operand" "=m")
2838 (match_operator:SI 3 "minmax_operator"
2839 [(match_operand:SI 1 "s_register_operand" "r")
2840 (match_operand:SI 2 "s_register_operand" "r")]))
2841 (clobber (reg:CC CC_REGNUM))]
2844 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2845 operands[1], operands[2]);
2846 output_asm_insn (\"cmp\\t%1, %2\", operands);
2848 output_asm_insn (\"ite\t%d3\", operands);
2849 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2850 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2853 [(set_attr "conds" "clob")
2854 (set (attr "length")
2855 (if_then_else (eq_attr "is_thumb" "yes")
2858 (set_attr "type" "store1")]
2861 ; Reject the frame pointer in operand[1], since reloading this after
2862 ; it has been eliminated can cause carnage.
2863 (define_insn "*minmax_arithsi"
2864 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2865 (match_operator:SI 4 "shiftable_operator"
2866 [(match_operator:SI 5 "minmax_operator"
2867 [(match_operand:SI 2 "s_register_operand" "r,r")
2868 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2869 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2870 (clobber (reg:CC CC_REGNUM))]
2871 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2874 enum rtx_code code = GET_CODE (operands[4]);
2877 if (which_alternative != 0 || operands[3] != const0_rtx
2878 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2883 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2884 operands[2], operands[3]);
2885 output_asm_insn (\"cmp\\t%2, %3\", operands);
2889 output_asm_insn (\"ite\\t%d5\", operands);
2891 output_asm_insn (\"it\\t%d5\", operands);
2893 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
2895 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
2898 [(set_attr "conds" "clob")
2899 (set (attr "length")
2900 (if_then_else (eq_attr "is_thumb" "yes")
2906 ;; Shift and rotation insns
2908 (define_expand "ashldi3"
2909 [(set (match_operand:DI 0 "s_register_operand" "")
2910 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
2911 (match_operand:SI 2 "reg_or_int_operand" "")))]
2914 if (GET_CODE (operands[2]) == CONST_INT)
2916 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2918 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
2921 /* Ideally we shouldn't fail here if we could know that operands[1]
2922 ends up already living in an iwmmxt register. Otherwise it's
2923 cheaper to have the alternate code being generated than moving
2924 values to iwmmxt regs and back. */
2927 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
2932 (define_insn "arm_ashldi3_1bit"
2933 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2934 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2936 (clobber (reg:CC CC_REGNUM))]
2938 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
2939 [(set_attr "conds" "clob")
2940 (set_attr "length" "8")]
2943 (define_expand "ashlsi3"
2944 [(set (match_operand:SI 0 "s_register_operand" "")
2945 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
2946 (match_operand:SI 2 "arm_rhs_operand" "")))]
2949 if (GET_CODE (operands[2]) == CONST_INT
2950 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
2952 emit_insn (gen_movsi (operands[0], const0_rtx));
2958 (define_insn "*thumb1_ashlsi3"
2959 [(set (match_operand:SI 0 "register_operand" "=l,l")
2960 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
2961 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
2964 [(set_attr "length" "2")]
2967 (define_expand "ashrdi3"
2968 [(set (match_operand:DI 0 "s_register_operand" "")
2969 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
2970 (match_operand:SI 2 "reg_or_int_operand" "")))]
2973 if (GET_CODE (operands[2]) == CONST_INT)
2975 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
2977 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
2980 /* Ideally we shouldn't fail here if we could know that operands[1]
2981 ends up already living in an iwmmxt register. Otherwise it's
2982 cheaper to have the alternate code being generated than moving
2983 values to iwmmxt regs and back. */
2986 else if (!TARGET_REALLY_IWMMXT)
2991 (define_insn "arm_ashrdi3_1bit"
2992 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
2993 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
2995 (clobber (reg:CC CC_REGNUM))]
2997 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
2998 [(set_attr "conds" "clob")
2999 (set_attr "length" "8")]
3002 (define_expand "ashrsi3"
3003 [(set (match_operand:SI 0 "s_register_operand" "")
3004 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3005 (match_operand:SI 2 "arm_rhs_operand" "")))]
3008 if (GET_CODE (operands[2]) == CONST_INT
3009 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3010 operands[2] = GEN_INT (31);
3014 (define_insn "*thumb1_ashrsi3"
3015 [(set (match_operand:SI 0 "register_operand" "=l,l")
3016 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3017 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3020 [(set_attr "length" "2")]
3023 (define_expand "lshrdi3"
3024 [(set (match_operand:DI 0 "s_register_operand" "")
3025 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3026 (match_operand:SI 2 "reg_or_int_operand" "")))]
3029 if (GET_CODE (operands[2]) == CONST_INT)
3031 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3033 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3036 /* Ideally we shouldn't fail here if we could know that operands[1]
3037 ends up already living in an iwmmxt register. Otherwise it's
3038 cheaper to have the alternate code being generated than moving
3039 values to iwmmxt regs and back. */
3042 else if (!TARGET_REALLY_IWMMXT)
3047 (define_insn "arm_lshrdi3_1bit"
3048 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3049 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3051 (clobber (reg:CC CC_REGNUM))]
3053 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3054 [(set_attr "conds" "clob")
3055 (set_attr "length" "8")]
3058 (define_expand "lshrsi3"
3059 [(set (match_operand:SI 0 "s_register_operand" "")
3060 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3061 (match_operand:SI 2 "arm_rhs_operand" "")))]
3064 if (GET_CODE (operands[2]) == CONST_INT
3065 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3067 emit_insn (gen_movsi (operands[0], const0_rtx));
3073 (define_insn "*thumb1_lshrsi3"
3074 [(set (match_operand:SI 0 "register_operand" "=l,l")
3075 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3076 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3079 [(set_attr "length" "2")]
3082 (define_expand "rotlsi3"
3083 [(set (match_operand:SI 0 "s_register_operand" "")
3084 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3085 (match_operand:SI 2 "reg_or_int_operand" "")))]
3088 if (GET_CODE (operands[2]) == CONST_INT)
3089 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3092 rtx reg = gen_reg_rtx (SImode);
3093 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3099 (define_expand "rotrsi3"
3100 [(set (match_operand:SI 0 "s_register_operand" "")
3101 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3102 (match_operand:SI 2 "arm_rhs_operand" "")))]
3107 if (GET_CODE (operands[2]) == CONST_INT
3108 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3109 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3111 else /* TARGET_THUMB1 */
3113 if (GET_CODE (operands [2]) == CONST_INT)
3114 operands [2] = force_reg (SImode, operands[2]);
3119 (define_insn "*thumb1_rotrsi3"
3120 [(set (match_operand:SI 0 "register_operand" "=l")
3121 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3122 (match_operand:SI 2 "register_operand" "l")))]
3125 [(set_attr "length" "2")]
3128 (define_insn "*arm_shiftsi3"
3129 [(set (match_operand:SI 0 "s_register_operand" "=r")
3130 (match_operator:SI 3 "shift_operator"
3131 [(match_operand:SI 1 "s_register_operand" "r")
3132 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3134 "* return arm_output_shift(operands, 0);"
3135 [(set_attr "predicable" "yes")
3136 (set_attr "shift" "1")
3137 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3138 (const_string "alu_shift")
3139 (const_string "alu_shift_reg")))]
3142 (define_insn "*shiftsi3_compare0"
3143 [(set (reg:CC_NOOV CC_REGNUM)
3144 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3145 [(match_operand:SI 1 "s_register_operand" "r")
3146 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3148 (set (match_operand:SI 0 "s_register_operand" "=r")
3149 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3151 "* return arm_output_shift(operands, 1);"
3152 [(set_attr "conds" "set")
3153 (set_attr "shift" "1")
3154 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3155 (const_string "alu_shift")
3156 (const_string "alu_shift_reg")))]
3159 (define_insn "*shiftsi3_compare0_scratch"
3160 [(set (reg:CC_NOOV CC_REGNUM)
3161 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3162 [(match_operand:SI 1 "s_register_operand" "r")
3163 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3165 (clobber (match_scratch:SI 0 "=r"))]
3167 "* return arm_output_shift(operands, 1);"
3168 [(set_attr "conds" "set")
3169 (set_attr "shift" "1")]
3172 (define_insn "*arm_notsi_shiftsi"
3173 [(set (match_operand:SI 0 "s_register_operand" "=r")
3174 (not:SI (match_operator:SI 3 "shift_operator"
3175 [(match_operand:SI 1 "s_register_operand" "r")
3176 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3179 [(set_attr "predicable" "yes")
3180 (set_attr "shift" "1")
3181 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3182 (const_string "alu_shift")
3183 (const_string "alu_shift_reg")))]
3186 (define_insn "*arm_notsi_shiftsi_compare0"
3187 [(set (reg:CC_NOOV CC_REGNUM)
3188 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3189 [(match_operand:SI 1 "s_register_operand" "r")
3190 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3192 (set (match_operand:SI 0 "s_register_operand" "=r")
3193 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3196 [(set_attr "conds" "set")
3197 (set_attr "shift" "1")
3198 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3199 (const_string "alu_shift")
3200 (const_string "alu_shift_reg")))]
3203 (define_insn "*arm_not_shiftsi_compare0_scratch"
3204 [(set (reg:CC_NOOV CC_REGNUM)
3205 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3206 [(match_operand:SI 1 "s_register_operand" "r")
3207 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3209 (clobber (match_scratch:SI 0 "=r"))]
3212 [(set_attr "conds" "set")
3213 (set_attr "shift" "1")
3214 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3215 (const_string "alu_shift")
3216 (const_string "alu_shift_reg")))]
3219 ;; We don't really have extzv, but defining this using shifts helps
3220 ;; to reduce register pressure later on.
3222 (define_expand "extzv"
3224 (ashift:SI (match_operand:SI 1 "register_operand" "")
3225 (match_operand:SI 2 "const_int_operand" "")))
3226 (set (match_operand:SI 0 "register_operand" "")
3227 (lshiftrt:SI (match_dup 4)
3228 (match_operand:SI 3 "const_int_operand" "")))]
3232 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3233 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3235 operands[3] = GEN_INT (rshift);
3239 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3243 operands[2] = GEN_INT (lshift);
3244 operands[4] = gen_reg_rtx (SImode);
3249 ;; Unary arithmetic insns
3251 (define_expand "negdi2"
3253 [(set (match_operand:DI 0 "s_register_operand" "")
3254 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3255 (clobber (reg:CC CC_REGNUM))])]
3260 if (GET_CODE (operands[1]) != REG)
3261 operands[1] = force_reg (SImode, operands[1]);
3266 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3267 ;; The second alternative is to allow the common case of a *full* overlap.
3268 (define_insn "*arm_negdi2"
3269 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3270 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3271 (clobber (reg:CC CC_REGNUM))]
3273 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3274 [(set_attr "conds" "clob")
3275 (set_attr "length" "8")]
3278 (define_insn "*thumb1_negdi2"
3279 [(set (match_operand:DI 0 "register_operand" "=&l")
3280 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3281 (clobber (reg:CC CC_REGNUM))]
3283 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3284 [(set_attr "length" "6")]
3287 (define_expand "negsi2"
3288 [(set (match_operand:SI 0 "s_register_operand" "")
3289 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3294 (define_insn "*arm_negsi2"
3295 [(set (match_operand:SI 0 "s_register_operand" "=r")
3296 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3298 "rsb%?\\t%0, %1, #0"
3299 [(set_attr "predicable" "yes")]
3302 (define_insn "*thumb1_negsi2"
3303 [(set (match_operand:SI 0 "register_operand" "=l")
3304 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3307 [(set_attr "length" "2")]
3310 (define_expand "negsf2"
3311 [(set (match_operand:SF 0 "s_register_operand" "")
3312 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3313 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3317 (define_expand "negdf2"
3318 [(set (match_operand:DF 0 "s_register_operand" "")
3319 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3320 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3323 ;; abssi2 doesn't really clobber the condition codes if a different register
3324 ;; is being set. To keep things simple, assume during rtl manipulations that
3325 ;; it does, but tell the final scan operator the truth. Similarly for
3328 (define_expand "abssi2"
3330 [(set (match_operand:SI 0 "s_register_operand" "")
3331 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3332 (clobber (match_dup 2))])]
3336 operands[2] = gen_rtx_SCRATCH (SImode);
3338 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3341 (define_insn "*arm_abssi2"
3342 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3343 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3344 (clobber (reg:CC CC_REGNUM))]
3347 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3348 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3349 [(set_attr "conds" "clob,*")
3350 (set_attr "shift" "1")
3351 ;; predicable can't be set based on the variant, so left as no
3352 (set_attr "length" "8")]
3355 (define_insn_and_split "*thumb1_abssi2"
3356 [(set (match_operand:SI 0 "s_register_operand" "=l")
3357 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3358 (clobber (match_scratch:SI 2 "=&l"))]
3361 "TARGET_THUMB1 && reload_completed"
3362 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3363 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3364 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3366 [(set_attr "length" "6")]
3369 (define_insn "*arm_neg_abssi2"
3370 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3371 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3372 (clobber (reg:CC CC_REGNUM))]
3375 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3376 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3377 [(set_attr "conds" "clob,*")
3378 (set_attr "shift" "1")
3379 ;; predicable can't be set based on the variant, so left as no
3380 (set_attr "length" "8")]
3383 (define_insn_and_split "*thumb1_neg_abssi2"
3384 [(set (match_operand:SI 0 "s_register_operand" "=l")
3385 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3386 (clobber (match_scratch:SI 2 "=&l"))]
3389 "TARGET_THUMB1 && reload_completed"
3390 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3391 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3392 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3394 [(set_attr "length" "6")]
3397 (define_expand "abssf2"
3398 [(set (match_operand:SF 0 "s_register_operand" "")
3399 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3400 "TARGET_32BIT && TARGET_HARD_FLOAT"
3403 (define_expand "absdf2"
3404 [(set (match_operand:DF 0 "s_register_operand" "")
3405 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3406 "TARGET_32BIT && TARGET_HARD_FLOAT"
3409 (define_expand "sqrtsf2"
3410 [(set (match_operand:SF 0 "s_register_operand" "")
3411 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3412 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3415 (define_expand "sqrtdf2"
3416 [(set (match_operand:DF 0 "s_register_operand" "")
3417 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3418 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3421 (define_insn_and_split "one_cmpldi2"
3422 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3423 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3426 "TARGET_32BIT && reload_completed"
3427 [(set (match_dup 0) (not:SI (match_dup 1)))
3428 (set (match_dup 2) (not:SI (match_dup 3)))]
3431 operands[2] = gen_highpart (SImode, operands[0]);
3432 operands[0] = gen_lowpart (SImode, operands[0]);
3433 operands[3] = gen_highpart (SImode, operands[1]);
3434 operands[1] = gen_lowpart (SImode, operands[1]);
3436 [(set_attr "length" "8")
3437 (set_attr "predicable" "yes")]
3440 (define_expand "one_cmplsi2"
3441 [(set (match_operand:SI 0 "s_register_operand" "")
3442 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3447 (define_insn "*arm_one_cmplsi2"
3448 [(set (match_operand:SI 0 "s_register_operand" "=r")
3449 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3452 [(set_attr "predicable" "yes")]
3455 (define_insn "*thumb1_one_cmplsi2"
3456 [(set (match_operand:SI 0 "register_operand" "=l")
3457 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3460 [(set_attr "length" "2")]
3463 (define_insn "*notsi_compare0"
3464 [(set (reg:CC_NOOV CC_REGNUM)
3465 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3467 (set (match_operand:SI 0 "s_register_operand" "=r")
3468 (not:SI (match_dup 1)))]
3471 [(set_attr "conds" "set")]
3474 (define_insn "*notsi_compare0_scratch"
3475 [(set (reg:CC_NOOV CC_REGNUM)
3476 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3478 (clobber (match_scratch:SI 0 "=r"))]
3481 [(set_attr "conds" "set")]
3484 ;; Fixed <--> Floating conversion insns
3486 (define_expand "floatsisf2"
3487 [(set (match_operand:SF 0 "s_register_operand" "")
3488 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3489 "TARGET_32BIT && TARGET_HARD_FLOAT"
3491 if (TARGET_MAVERICK)
3493 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3498 (define_expand "floatsidf2"
3499 [(set (match_operand:DF 0 "s_register_operand" "")
3500 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3501 "TARGET_32BIT && TARGET_HARD_FLOAT"
3503 if (TARGET_MAVERICK)
3505 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3510 (define_expand "fix_truncsfsi2"
3511 [(set (match_operand:SI 0 "s_register_operand" "")
3512 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3513 "TARGET_32BIT && TARGET_HARD_FLOAT"
3515 if (TARGET_MAVERICK)
3517 if (!cirrus_fp_register (operands[0], SImode))
3518 operands[0] = force_reg (SImode, operands[0]);
3519 if (!cirrus_fp_register (operands[1], SFmode))
3520 operands[1] = force_reg (SFmode, operands[0]);
3521 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3526 (define_expand "fix_truncdfsi2"
3527 [(set (match_operand:SI 0 "s_register_operand" "")
3528 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3529 "TARGET_32BIT && TARGET_HARD_FLOAT"
3531 if (TARGET_MAVERICK)
3533 if (!cirrus_fp_register (operands[1], DFmode))
3534 operands[1] = force_reg (DFmode, operands[0]);
3535 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3542 (define_expand "truncdfsf2"
3543 [(set (match_operand:SF 0 "s_register_operand" "")
3545 (match_operand:DF 1 "s_register_operand" "")))]
3546 "TARGET_32BIT && TARGET_HARD_FLOAT"
3550 ;; Zero and sign extension instructions.
3552 (define_expand "zero_extendsidi2"
3553 [(set (match_operand:DI 0 "s_register_operand" "")
3554 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3559 (define_insn "*arm_zero_extendsidi2"
3560 [(set (match_operand:DI 0 "s_register_operand" "=r")
3561 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3564 if (REGNO (operands[1])
3565 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3566 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3567 return \"mov%?\\t%R0, #0\";
3569 [(set_attr "length" "8")
3570 (set_attr "predicable" "yes")]
3573 (define_expand "zero_extendqidi2"
3574 [(set (match_operand:DI 0 "s_register_operand" "")
3575 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3580 (define_insn "*arm_zero_extendqidi2"
3581 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3582 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3585 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3586 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3587 [(set_attr "length" "8")
3588 (set_attr "predicable" "yes")
3589 (set_attr "type" "*,load_byte")
3590 (set_attr "pool_range" "*,4092")
3591 (set_attr "neg_pool_range" "*,4084")]
3594 (define_expand "extendsidi2"
3595 [(set (match_operand:DI 0 "s_register_operand" "")
3596 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3601 (define_insn "*arm_extendsidi2"
3602 [(set (match_operand:DI 0 "s_register_operand" "=r")
3603 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3606 if (REGNO (operands[1])
3607 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3608 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3609 return \"mov%?\\t%R0, %Q0, asr #31\";
3611 [(set_attr "length" "8")
3612 (set_attr "shift" "1")
3613 (set_attr "predicable" "yes")]
3616 (define_expand "zero_extendhisi2"
3618 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3620 (set (match_operand:SI 0 "s_register_operand" "")
3621 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3625 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3627 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3628 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3632 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3634 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3638 if (!s_register_operand (operands[1], HImode))
3639 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3643 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3644 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3648 operands[1] = gen_lowpart (SImode, operands[1]);
3649 operands[2] = gen_reg_rtx (SImode);
3653 (define_insn "*thumb1_zero_extendhisi2"
3654 [(set (match_operand:SI 0 "register_operand" "=l")
3655 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3656 "TARGET_THUMB1 && !arm_arch6"
3658 rtx mem = XEXP (operands[1], 0);
3660 if (GET_CODE (mem) == CONST)
3661 mem = XEXP (mem, 0);
3663 if (GET_CODE (mem) == LABEL_REF)
3664 return \"ldr\\t%0, %1\";
3666 if (GET_CODE (mem) == PLUS)
3668 rtx a = XEXP (mem, 0);
3669 rtx b = XEXP (mem, 1);
3671 /* This can happen due to bugs in reload. */
3672 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3675 ops[0] = operands[0];
3678 output_asm_insn (\"mov %0, %1\", ops);
3680 XEXP (mem, 0) = operands[0];
3683 else if ( GET_CODE (a) == LABEL_REF
3684 && GET_CODE (b) == CONST_INT)
3685 return \"ldr\\t%0, %1\";
3688 return \"ldrh\\t%0, %1\";
3690 [(set_attr "length" "4")
3691 (set_attr "type" "load_byte")
3692 (set_attr "pool_range" "60")]
3695 (define_insn "*thumb1_zero_extendhisi2_v6"
3696 [(set (match_operand:SI 0 "register_operand" "=l,l")
3697 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3698 "TARGET_THUMB1 && arm_arch6"
3702 if (which_alternative == 0)
3703 return \"uxth\\t%0, %1\";
3705 mem = XEXP (operands[1], 0);
3707 if (GET_CODE (mem) == CONST)
3708 mem = XEXP (mem, 0);
3710 if (GET_CODE (mem) == LABEL_REF)
3711 return \"ldr\\t%0, %1\";
3713 if (GET_CODE (mem) == PLUS)
3715 rtx a = XEXP (mem, 0);
3716 rtx b = XEXP (mem, 1);
3718 /* This can happen due to bugs in reload. */
3719 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3722 ops[0] = operands[0];
3725 output_asm_insn (\"mov %0, %1\", ops);
3727 XEXP (mem, 0) = operands[0];
3730 else if ( GET_CODE (a) == LABEL_REF
3731 && GET_CODE (b) == CONST_INT)
3732 return \"ldr\\t%0, %1\";
3735 return \"ldrh\\t%0, %1\";
3737 [(set_attr "length" "2,4")
3738 (set_attr "type" "alu_shift,load_byte")
3739 (set_attr "pool_range" "*,60")]
3742 (define_insn "*arm_zero_extendhisi2"
3743 [(set (match_operand:SI 0 "s_register_operand" "=r")
3744 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3745 "TARGET_ARM && arm_arch4 && !arm_arch6"
3747 [(set_attr "type" "load_byte")
3748 (set_attr "predicable" "yes")
3749 (set_attr "pool_range" "256")
3750 (set_attr "neg_pool_range" "244")]
3753 (define_insn "*arm_zero_extendhisi2_v6"
3754 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3755 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3756 "TARGET_ARM && arm_arch6"
3760 [(set_attr "type" "alu_shift,load_byte")
3761 (set_attr "predicable" "yes")
3762 (set_attr "pool_range" "*,256")
3763 (set_attr "neg_pool_range" "*,244")]
3766 (define_insn "*arm_zero_extendhisi2addsi"
3767 [(set (match_operand:SI 0 "s_register_operand" "=r")
3768 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3769 (match_operand:SI 2 "s_register_operand" "r")))]
3771 "uxtah%?\\t%0, %2, %1"
3772 [(set_attr "type" "alu_shift")
3773 (set_attr "predicable" "yes")]
3776 (define_expand "zero_extendqisi2"
3777 [(set (match_operand:SI 0 "s_register_operand" "")
3778 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3781 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3785 emit_insn (gen_andsi3 (operands[0],
3786 gen_lowpart (SImode, operands[1]),
3789 else /* TARGET_THUMB */
3791 rtx temp = gen_reg_rtx (SImode);
3794 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3795 operands[1] = gen_lowpart (SImode, operands[1]);
3798 ops[1] = operands[1];
3799 ops[2] = GEN_INT (24);
3801 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3802 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3804 ops[0] = operands[0];
3806 ops[2] = GEN_INT (24);
3808 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3809 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3816 (define_insn "*thumb1_zero_extendqisi2"
3817 [(set (match_operand:SI 0 "register_operand" "=l")
3818 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3819 "TARGET_THUMB1 && !arm_arch6"
3821 [(set_attr "length" "2")
3822 (set_attr "type" "load_byte")
3823 (set_attr "pool_range" "32")]
3826 (define_insn "*thumb1_zero_extendqisi2_v6"
3827 [(set (match_operand:SI 0 "register_operand" "=l,l")
3828 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3829 "TARGET_THUMB1 && arm_arch6"
3833 [(set_attr "length" "2,2")
3834 (set_attr "type" "alu_shift,load_byte")
3835 (set_attr "pool_range" "*,32")]
3838 (define_insn "*arm_zero_extendqisi2"
3839 [(set (match_operand:SI 0 "s_register_operand" "=r")
3840 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3841 "TARGET_ARM && !arm_arch6"
3842 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3843 [(set_attr "type" "load_byte")
3844 (set_attr "predicable" "yes")
3845 (set_attr "pool_range" "4096")
3846 (set_attr "neg_pool_range" "4084")]
3849 (define_insn "*arm_zero_extendqisi2_v6"
3850 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3851 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3852 "TARGET_ARM && arm_arch6"
3855 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3856 [(set_attr "type" "alu_shift,load_byte")
3857 (set_attr "predicable" "yes")
3858 (set_attr "pool_range" "*,4096")
3859 (set_attr "neg_pool_range" "*,4084")]
3862 (define_insn "*arm_zero_extendqisi2addsi"
3863 [(set (match_operand:SI 0 "s_register_operand" "=r")
3864 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
3865 (match_operand:SI 2 "s_register_operand" "r")))]
3867 "uxtab%?\\t%0, %2, %1"
3868 [(set_attr "predicable" "yes")
3869 (set_attr "type" "alu_shift")]
3873 [(set (match_operand:SI 0 "s_register_operand" "")
3874 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
3875 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3876 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
3877 [(set (match_dup 2) (match_dup 1))
3878 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3883 [(set (match_operand:SI 0 "s_register_operand" "")
3884 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
3885 (clobber (match_operand:SI 2 "s_register_operand" ""))]
3886 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
3887 [(set (match_dup 2) (match_dup 1))
3888 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
3892 (define_insn "*compareqi_eq0"
3893 [(set (reg:CC_Z CC_REGNUM)
3894 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
3898 [(set_attr "conds" "set")]
3901 (define_expand "extendhisi2"
3903 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3905 (set (match_operand:SI 0 "s_register_operand" "")
3906 (ashiftrt:SI (match_dup 2)
3911 if (GET_CODE (operands[1]) == MEM)
3915 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3920 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3921 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3926 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3928 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
3932 if (!s_register_operand (operands[1], HImode))
3933 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3938 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
3940 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3941 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
3946 operands[1] = gen_lowpart (SImode, operands[1]);
3947 operands[2] = gen_reg_rtx (SImode);
3951 (define_insn "thumb1_extendhisi2"
3952 [(set (match_operand:SI 0 "register_operand" "=l")
3953 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
3954 (clobber (match_scratch:SI 2 "=&l"))]
3955 "TARGET_THUMB1 && !arm_arch6"
3959 rtx mem = XEXP (operands[1], 0);
3961 /* This code used to try to use 'V', and fix the address only if it was
3962 offsettable, but this fails for e.g. REG+48 because 48 is outside the
3963 range of QImode offsets, and offsettable_address_p does a QImode
3966 if (GET_CODE (mem) == CONST)
3967 mem = XEXP (mem, 0);
3969 if (GET_CODE (mem) == LABEL_REF)
3970 return \"ldr\\t%0, %1\";
3972 if (GET_CODE (mem) == PLUS)
3974 rtx a = XEXP (mem, 0);
3975 rtx b = XEXP (mem, 1);
3977 if (GET_CODE (a) == LABEL_REF
3978 && GET_CODE (b) == CONST_INT)
3979 return \"ldr\\t%0, %1\";
3981 if (GET_CODE (b) == REG)
3982 return \"ldrsh\\t%0, %1\";
3990 ops[2] = const0_rtx;
3993 gcc_assert (GET_CODE (ops[1]) == REG);
3995 ops[0] = operands[0];
3996 ops[3] = operands[2];
3997 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4000 [(set_attr "length" "4")
4001 (set_attr "type" "load_byte")
4002 (set_attr "pool_range" "1020")]
4005 ;; We used to have an early-clobber on the scratch register here.
4006 ;; However, there's a bug somewhere in reload which means that this
4007 ;; can be partially ignored during spill allocation if the memory
4008 ;; address also needs reloading; this causes us to die later on when
4009 ;; we try to verify the operands. Fortunately, we don't really need
4010 ;; the early-clobber: we can always use operand 0 if operand 2
4011 ;; overlaps the address.
4012 (define_insn "*thumb1_extendhisi2_insn_v6"
4013 [(set (match_operand:SI 0 "register_operand" "=l,l")
4014 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4015 (clobber (match_scratch:SI 2 "=X,l"))]
4016 "TARGET_THUMB1 && arm_arch6"
4022 if (which_alternative == 0)
4023 return \"sxth\\t%0, %1\";
4025 mem = XEXP (operands[1], 0);
4027 /* This code used to try to use 'V', and fix the address only if it was
4028 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4029 range of QImode offsets, and offsettable_address_p does a QImode
4032 if (GET_CODE (mem) == CONST)
4033 mem = XEXP (mem, 0);
4035 if (GET_CODE (mem) == LABEL_REF)
4036 return \"ldr\\t%0, %1\";
4038 if (GET_CODE (mem) == PLUS)
4040 rtx a = XEXP (mem, 0);
4041 rtx b = XEXP (mem, 1);
4043 if (GET_CODE (a) == LABEL_REF
4044 && GET_CODE (b) == CONST_INT)
4045 return \"ldr\\t%0, %1\";
4047 if (GET_CODE (b) == REG)
4048 return \"ldrsh\\t%0, %1\";
4056 ops[2] = const0_rtx;
4059 gcc_assert (GET_CODE (ops[1]) == REG);
4061 ops[0] = operands[0];
4062 if (reg_mentioned_p (operands[2], ops[1]))
4065 ops[3] = operands[2];
4066 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4069 [(set_attr "length" "2,4")
4070 (set_attr "type" "alu_shift,load_byte")
4071 (set_attr "pool_range" "*,1020")]
4074 ;; This pattern will only be used when ldsh is not available
4075 (define_expand "extendhisi2_mem"
4076 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4078 (zero_extend:SI (match_dup 7)))
4079 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4080 (set (match_operand:SI 0 "" "")
4081 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4086 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4088 mem1 = change_address (operands[1], QImode, addr);
4089 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4090 operands[0] = gen_lowpart (SImode, operands[0]);
4092 operands[2] = gen_reg_rtx (SImode);
4093 operands[3] = gen_reg_rtx (SImode);
4094 operands[6] = gen_reg_rtx (SImode);
4097 if (BYTES_BIG_ENDIAN)
4099 operands[4] = operands[2];
4100 operands[5] = operands[3];
4104 operands[4] = operands[3];
4105 operands[5] = operands[2];
4110 (define_insn "*arm_extendhisi2"
4111 [(set (match_operand:SI 0 "s_register_operand" "=r")
4112 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4113 "TARGET_ARM && arm_arch4 && !arm_arch6"
4114 "ldr%(sh%)\\t%0, %1"
4115 [(set_attr "type" "load_byte")
4116 (set_attr "predicable" "yes")
4117 (set_attr "pool_range" "256")
4118 (set_attr "neg_pool_range" "244")]
4121 ;; ??? Check Thumb-2 pool range
4122 (define_insn "*arm_extendhisi2_v6"
4123 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4124 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4125 "TARGET_32BIT && arm_arch6"
4129 [(set_attr "type" "alu_shift,load_byte")
4130 (set_attr "predicable" "yes")
4131 (set_attr "pool_range" "*,256")
4132 (set_attr "neg_pool_range" "*,244")]
4135 (define_insn "*arm_extendhisi2addsi"
4136 [(set (match_operand:SI 0 "s_register_operand" "=r")
4137 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4138 (match_operand:SI 2 "s_register_operand" "r")))]
4140 "sxtah%?\\t%0, %2, %1"
4143 (define_expand "extendqihi2"
4145 (ashift:SI (match_operand:QI 1 "general_operand" "")
4147 (set (match_operand:HI 0 "s_register_operand" "")
4148 (ashiftrt:SI (match_dup 2)
4153 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4155 emit_insn (gen_rtx_SET (VOIDmode,
4157 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4160 if (!s_register_operand (operands[1], QImode))
4161 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4162 operands[0] = gen_lowpart (SImode, operands[0]);
4163 operands[1] = gen_lowpart (SImode, operands[1]);
4164 operands[2] = gen_reg_rtx (SImode);
4168 (define_insn "*arm_extendqihi_insn"
4169 [(set (match_operand:HI 0 "s_register_operand" "=r")
4170 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4171 "TARGET_ARM && arm_arch4"
4172 "ldr%(sb%)\\t%0, %1"
4173 [(set_attr "type" "load_byte")
4174 (set_attr "predicable" "yes")
4175 (set_attr "pool_range" "256")
4176 (set_attr "neg_pool_range" "244")]
4179 (define_expand "extendqisi2"
4181 (ashift:SI (match_operand:QI 1 "general_operand" "")
4183 (set (match_operand:SI 0 "s_register_operand" "")
4184 (ashiftrt:SI (match_dup 2)
4189 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4191 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4192 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4196 if (!s_register_operand (operands[1], QImode))
4197 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4201 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4202 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4206 operands[1] = gen_lowpart (SImode, operands[1]);
4207 operands[2] = gen_reg_rtx (SImode);
4211 (define_insn "*arm_extendqisi"
4212 [(set (match_operand:SI 0 "s_register_operand" "=r")
4213 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4214 "TARGET_ARM && arm_arch4 && !arm_arch6"
4215 "ldr%(sb%)\\t%0, %1"
4216 [(set_attr "type" "load_byte")
4217 (set_attr "predicable" "yes")
4218 (set_attr "pool_range" "256")
4219 (set_attr "neg_pool_range" "244")]
4222 (define_insn "*arm_extendqisi_v6"
4223 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4224 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4225 "TARGET_ARM && arm_arch6"
4229 [(set_attr "type" "alu_shift,load_byte")
4230 (set_attr "predicable" "yes")
4231 (set_attr "pool_range" "*,256")
4232 (set_attr "neg_pool_range" "*,244")]
4235 (define_insn "*arm_extendqisi2addsi"
4236 [(set (match_operand:SI 0 "s_register_operand" "=r")
4237 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4238 (match_operand:SI 2 "s_register_operand" "r")))]
4240 "sxtab%?\\t%0, %2, %1"
4241 [(set_attr "type" "alu_shift")
4242 (set_attr "predicable" "yes")]
4245 (define_insn "*thumb1_extendqisi2"
4246 [(set (match_operand:SI 0 "register_operand" "=l,l")
4247 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4248 "TARGET_THUMB1 && !arm_arch6"
4252 rtx mem = XEXP (operands[1], 0);
4254 if (GET_CODE (mem) == CONST)
4255 mem = XEXP (mem, 0);
4257 if (GET_CODE (mem) == LABEL_REF)
4258 return \"ldr\\t%0, %1\";
4260 if (GET_CODE (mem) == PLUS
4261 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4262 return \"ldr\\t%0, %1\";
4264 if (which_alternative == 0)
4265 return \"ldrsb\\t%0, %1\";
4267 ops[0] = operands[0];
4269 if (GET_CODE (mem) == PLUS)
4271 rtx a = XEXP (mem, 0);
4272 rtx b = XEXP (mem, 1);
4277 if (GET_CODE (a) == REG)
4279 if (GET_CODE (b) == REG)
4280 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4281 else if (REGNO (a) == REGNO (ops[0]))
4283 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4284 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4285 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4288 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4292 gcc_assert (GET_CODE (b) == REG);
4293 if (REGNO (b) == REGNO (ops[0]))
4295 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4296 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4297 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4300 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4303 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4305 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4306 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4307 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4312 ops[2] = const0_rtx;
4314 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4318 [(set_attr "length" "2,6")
4319 (set_attr "type" "load_byte,load_byte")
4320 (set_attr "pool_range" "32,32")]
4323 (define_insn "*thumb1_extendqisi2_v6"
4324 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4325 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4326 "TARGET_THUMB1 && arm_arch6"
4332 if (which_alternative == 0)
4333 return \"sxtb\\t%0, %1\";
4335 mem = XEXP (operands[1], 0);
4337 if (GET_CODE (mem) == CONST)
4338 mem = XEXP (mem, 0);
4340 if (GET_CODE (mem) == LABEL_REF)
4341 return \"ldr\\t%0, %1\";
4343 if (GET_CODE (mem) == PLUS
4344 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4345 return \"ldr\\t%0, %1\";
4347 if (which_alternative == 0)
4348 return \"ldrsb\\t%0, %1\";
4350 ops[0] = operands[0];
4352 if (GET_CODE (mem) == PLUS)
4354 rtx a = XEXP (mem, 0);
4355 rtx b = XEXP (mem, 1);
4360 if (GET_CODE (a) == REG)
4362 if (GET_CODE (b) == REG)
4363 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4364 else if (REGNO (a) == REGNO (ops[0]))
4366 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4367 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4370 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4374 gcc_assert (GET_CODE (b) == REG);
4375 if (REGNO (b) == REGNO (ops[0]))
4377 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4378 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4381 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4384 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4386 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4387 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4392 ops[2] = const0_rtx;
4394 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4398 [(set_attr "length" "2,2,4")
4399 (set_attr "type" "alu_shift,load_byte,load_byte")
4400 (set_attr "pool_range" "*,32,32")]
4403 (define_expand "extendsfdf2"
4404 [(set (match_operand:DF 0 "s_register_operand" "")
4405 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4406 "TARGET_32BIT && TARGET_HARD_FLOAT"
4410 ;; Move insns (including loads and stores)
4412 ;; XXX Just some ideas about movti.
4413 ;; I don't think these are a good idea on the arm, there just aren't enough
4415 ;;(define_expand "loadti"
4416 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4417 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4420 ;;(define_expand "storeti"
4421 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4422 ;; (match_operand:TI 1 "s_register_operand" ""))]
4425 ;;(define_expand "movti"
4426 ;; [(set (match_operand:TI 0 "general_operand" "")
4427 ;; (match_operand:TI 1 "general_operand" ""))]
4433 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4434 ;; operands[1] = copy_to_reg (operands[1]);
4435 ;; if (GET_CODE (operands[0]) == MEM)
4436 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4437 ;; else if (GET_CODE (operands[1]) == MEM)
4438 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4442 ;; emit_insn (insn);
4446 ;; Recognize garbage generated above.
4449 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4450 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4454 ;; register mem = (which_alternative < 3);
4455 ;; register const char *template;
4457 ;; operands[mem] = XEXP (operands[mem], 0);
4458 ;; switch (which_alternative)
4460 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4461 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4462 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4463 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4464 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4465 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4467 ;; output_asm_insn (template, operands);
4471 (define_expand "movdi"
4472 [(set (match_operand:DI 0 "general_operand" "")
4473 (match_operand:DI 1 "general_operand" ""))]
4476 if (can_create_pseudo_p ())
4478 if (GET_CODE (operands[0]) != REG)
4479 operands[1] = force_reg (DImode, operands[1]);
4484 (define_insn "*arm_movdi"
4485 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4486 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4488 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4490 && ( register_operand (operands[0], DImode)
4491 || register_operand (operands[1], DImode))"
4493 switch (which_alternative)
4500 return output_move_double (operands);
4503 [(set_attr "length" "8,12,16,8,8")
4504 (set_attr "type" "*,*,*,load2,store2")
4505 (set_attr "pool_range" "*,*,*,1020,*")
4506 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4510 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4511 (match_operand:ANY64 1 "const_double_operand" ""))]
4514 && (arm_const_double_inline_cost (operands[1])
4515 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4518 arm_split_constant (SET, SImode, curr_insn,
4519 INTVAL (gen_lowpart (SImode, operands[1])),
4520 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4521 arm_split_constant (SET, SImode, curr_insn,
4522 INTVAL (gen_highpart_mode (SImode,
4523 GET_MODE (operands[0]),
4525 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4530 ; If optimizing for size, or if we have load delay slots, then
4531 ; we want to split the constant into two separate operations.
4532 ; In both cases this may split a trivial part into a single data op
4533 ; leaving a single complex constant to load. We can also get longer
4534 ; offsets in a LDR which means we get better chances of sharing the pool
4535 ; entries. Finally, we can normally do a better job of scheduling
4536 ; LDR instructions than we can with LDM.
4537 ; This pattern will only match if the one above did not.
4539 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4540 (match_operand:ANY64 1 "const_double_operand" ""))]
4541 "TARGET_ARM && reload_completed
4542 && arm_const_double_by_parts (operands[1])"
4543 [(set (match_dup 0) (match_dup 1))
4544 (set (match_dup 2) (match_dup 3))]
4546 operands[2] = gen_highpart (SImode, operands[0]);
4547 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4549 operands[0] = gen_lowpart (SImode, operands[0]);
4550 operands[1] = gen_lowpart (SImode, operands[1]);
4555 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4556 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4557 "TARGET_EITHER && reload_completed"
4558 [(set (match_dup 0) (match_dup 1))
4559 (set (match_dup 2) (match_dup 3))]
4561 operands[2] = gen_highpart (SImode, operands[0]);
4562 operands[3] = gen_highpart (SImode, operands[1]);
4563 operands[0] = gen_lowpart (SImode, operands[0]);
4564 operands[1] = gen_lowpart (SImode, operands[1]);
4566 /* Handle a partial overlap. */
4567 if (rtx_equal_p (operands[0], operands[3]))
4569 rtx tmp0 = operands[0];
4570 rtx tmp1 = operands[1];
4572 operands[0] = operands[2];
4573 operands[1] = operands[3];
4580 ;; We can't actually do base+index doubleword loads if the index and
4581 ;; destination overlap. Split here so that we at least have chance to
4584 [(set (match_operand:DI 0 "s_register_operand" "")
4585 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4586 (match_operand:SI 2 "s_register_operand" ""))))]
4588 && reg_overlap_mentioned_p (operands[0], operands[1])
4589 && reg_overlap_mentioned_p (operands[0], operands[2])"
4591 (plus:SI (match_dup 1)
4594 (mem:DI (match_dup 4)))]
4596 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4600 ;;; ??? This should have alternatives for constants.
4601 ;;; ??? This was originally identical to the movdf_insn pattern.
4602 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4603 ;;; thumb_reorg with a memory reference.
4604 (define_insn "*thumb1_movdi_insn"
4605 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4606 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4608 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4609 && ( register_operand (operands[0], DImode)
4610 || register_operand (operands[1], DImode))"
4613 switch (which_alternative)
4617 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4618 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4619 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4621 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4623 operands[1] = GEN_INT (- INTVAL (operands[1]));
4624 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4626 return \"ldmia\\t%1, {%0, %H0}\";
4628 return \"stmia\\t%0, {%1, %H1}\";
4630 return thumb_load_double_from_address (operands);
4632 operands[2] = gen_rtx_MEM (SImode,
4633 plus_constant (XEXP (operands[0], 0), 4));
4634 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4637 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4638 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4639 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4642 [(set_attr "length" "4,4,6,2,2,6,4,4")
4643 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4644 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4647 (define_expand "movsi"
4648 [(set (match_operand:SI 0 "general_operand" "")
4649 (match_operand:SI 1 "general_operand" ""))]
4652 rtx base, offset, tmp;
4656 /* Everything except mem = const or mem = mem can be done easily. */
4657 if (GET_CODE (operands[0]) == MEM)
4658 operands[1] = force_reg (SImode, operands[1]);
4659 if (arm_general_register_operand (operands[0], SImode)
4660 && GET_CODE (operands[1]) == CONST_INT
4661 && !(const_ok_for_arm (INTVAL (operands[1]))
4662 || const_ok_for_arm (~INTVAL (operands[1]))))
4664 arm_split_constant (SET, SImode, NULL_RTX,
4665 INTVAL (operands[1]), operands[0], NULL_RTX,
4666 optimize && can_create_pseudo_p ());
4670 else /* TARGET_THUMB1... */
4672 if (can_create_pseudo_p ())
4674 if (GET_CODE (operands[0]) != REG)
4675 operands[1] = force_reg (SImode, operands[1]);
4679 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4681 split_const (operands[1], &base, &offset);
4682 if (GET_CODE (base) == SYMBOL_REF
4683 && !offset_within_block_p (base, INTVAL (offset)))
4685 tmp = no_new_pseudos ? operands[0] : gen_reg_rtx (SImode);
4686 emit_move_insn (tmp, base);
4687 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4692 /* Recognize the case where operand[1] is a reference to thread-local
4693 data and load its address to a register. */
4694 if (arm_tls_referenced_p (operands[1]))
4696 rtx tmp = operands[1];
4699 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4701 addend = XEXP (XEXP (tmp, 0), 1);
4702 tmp = XEXP (XEXP (tmp, 0), 0);
4705 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4706 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4708 tmp = legitimize_tls_address (tmp,
4709 !can_create_pseudo_p () ? operands[0] : 0);
4712 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4713 tmp = force_operand (tmp, operands[0]);
4718 && (CONSTANT_P (operands[1])
4719 || symbol_mentioned_p (operands[1])
4720 || label_mentioned_p (operands[1])))
4721 operands[1] = legitimize_pic_address (operands[1], SImode,
4722 (!can_create_pseudo_p ()
4728 (define_insn "*arm_movsi_insn"
4729 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,r, m")
4730 (match_operand:SI 1 "general_operand" "rI,K,N,mi,r"))]
4731 "TARGET_ARM && ! TARGET_IWMMXT
4732 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4733 && ( register_operand (operands[0], SImode)
4734 || register_operand (operands[1], SImode))"
4741 [(set_attr "type" "*,*,*,load1,store1")
4742 (set_attr "predicable" "yes")
4743 (set_attr "pool_range" "*,*,*,4096,*")
4744 (set_attr "neg_pool_range" "*,*,*,4084,*")]
4748 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4749 (match_operand:SI 1 "const_int_operand" ""))]
4751 && (!(const_ok_for_arm (INTVAL (operands[1]))
4752 || const_ok_for_arm (~INTVAL (operands[1]))))"
4753 [(clobber (const_int 0))]
4755 arm_split_constant (SET, SImode, NULL_RTX,
4756 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4761 (define_insn "*thumb1_movsi_insn"
4762 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lh")
4763 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lh"))]
4765 && ( register_operand (operands[0], SImode)
4766 || register_operand (operands[1], SImode))"
4777 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4778 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4779 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4783 [(set (match_operand:SI 0 "register_operand" "")
4784 (match_operand:SI 1 "const_int_operand" ""))]
4785 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4786 [(set (match_dup 0) (match_dup 1))
4787 (set (match_dup 0) (neg:SI (match_dup 0)))]
4788 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4792 [(set (match_operand:SI 0 "register_operand" "")
4793 (match_operand:SI 1 "const_int_operand" ""))]
4794 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4795 [(set (match_dup 0) (match_dup 1))
4796 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4799 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4800 unsigned HOST_WIDE_INT mask = 0xff;
4803 for (i = 0; i < 25; i++)
4804 if ((val & (mask << i)) == val)
4807 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4811 operands[1] = GEN_INT (val >> i);
4812 operands[2] = GEN_INT (i);
4816 ;; When generating pic, we need to load the symbol offset into a register.
4817 ;; So that the optimizer does not confuse this with a normal symbol load
4818 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4819 ;; since that is the only type of relocation we can use.
4821 ;; The rather odd constraints on the following are to force reload to leave
4822 ;; the insn alone, and to force the minipool generation pass to then move
4823 ;; the GOT symbol to memory.
4825 (define_insn "pic_load_addr_arm"
4826 [(set (match_operand:SI 0 "s_register_operand" "=r")
4827 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4828 "TARGET_ARM && flag_pic"
4830 [(set_attr "type" "load1")
4831 (set (attr "pool_range") (const_int 4096))
4832 (set (attr "neg_pool_range") (const_int 4084))]
4835 (define_insn "pic_load_addr_thumb1"
4836 [(set (match_operand:SI 0 "s_register_operand" "=l")
4837 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4838 "TARGET_THUMB1 && flag_pic"
4840 [(set_attr "type" "load1")
4841 (set (attr "pool_range") (const_int 1024))]
4844 ;; This variant is used for AOF assembly, since it needs to mention the
4845 ;; pic register in the rtl.
4846 (define_expand "pic_load_addr_based"
4847 [(set (match_operand:SI 0 "s_register_operand" "")
4848 (unspec:SI [(match_operand 1 "" "") (match_dup 2)] UNSPEC_PIC_SYM))]
4849 "TARGET_ARM && flag_pic"
4850 "operands[2] = cfun->machine->pic_reg;"
4853 (define_insn "*pic_load_addr_based_insn"
4854 [(set (match_operand:SI 0 "s_register_operand" "=r")
4855 (unspec:SI [(match_operand 1 "" "")
4856 (match_operand 2 "s_register_operand" "r")]
4858 "TARGET_EITHER && flag_pic && operands[2] == cfun->machine->pic_reg"
4860 #ifdef AOF_ASSEMBLER
4861 operands[1] = aof_pic_entry (operands[1]);
4863 output_asm_insn (\"ldr%?\\t%0, %a1\", operands);
4866 [(set_attr "type" "load1")
4867 (set (attr "pool_range")
4868 (if_then_else (eq_attr "is_thumb" "yes")
4871 (set (attr "neg_pool_range")
4872 (if_then_else (eq_attr "is_thumb" "yes")
4877 (define_insn "pic_add_dot_plus_four"
4878 [(set (match_operand:SI 0 "register_operand" "=r")
4879 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4880 (const (plus:SI (pc) (const_int 4))))
4881 (match_operand 2 "" "")]
4885 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4886 INTVAL (operands[2]));
4887 return \"add\\t%0, %|pc\";
4889 [(set_attr "length" "2")]
4892 (define_insn "pic_add_dot_plus_eight"
4893 [(set (match_operand:SI 0 "register_operand" "=r")
4894 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4895 (const (plus:SI (pc) (const_int 8))))
4896 (match_operand 2 "" "")]
4900 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4901 INTVAL (operands[2]));
4902 return \"add%?\\t%0, %|pc, %1\";
4904 [(set_attr "predicable" "yes")]
4907 (define_insn "tls_load_dot_plus_eight"
4908 [(set (match_operand:SI 0 "register_operand" "+r")
4909 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
4910 (const (plus:SI (pc) (const_int 8))))
4911 (match_operand 2 "" "")]
4915 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
4916 INTVAL (operands[2]));
4917 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
4919 [(set_attr "predicable" "yes")]
4922 ;; PIC references to local variables can generate pic_add_dot_plus_eight
4923 ;; followed by a load. These sequences can be crunched down to
4924 ;; tls_load_dot_plus_eight by a peephole.
4927 [(parallel [(set (match_operand:SI 0 "register_operand" "")
4928 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
4929 (const (plus:SI (pc) (const_int 8))))]
4931 (use (label_ref (match_operand 1 "" "")))])
4932 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
4933 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
4934 [(parallel [(set (match_dup 2)
4935 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
4936 (const (plus:SI (pc) (const_int 8))))]
4938 (use (label_ref (match_dup 1)))])]
4942 (define_insn "pic_offset_arm"
4943 [(set (match_operand:SI 0 "register_operand" "=r")
4944 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
4945 (unspec:SI [(match_operand:SI 2 "" "X")]
4946 UNSPEC_PIC_OFFSET))))]
4947 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
4948 "ldr%?\\t%0, [%1,%2]"
4949 [(set_attr "type" "load1")]
4952 (define_expand "builtin_setjmp_receiver"
4953 [(label_ref (match_operand 0 "" ""))]
4957 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
4959 if (arm_pic_register != INVALID_REGNUM)
4960 arm_load_pic_register (1UL << 3);
4964 ;; If copying one reg to another we can set the condition codes according to
4965 ;; its value. Such a move is common after a return from subroutine and the
4966 ;; result is being tested against zero.
4968 (define_insn "*movsi_compare0"
4969 [(set (reg:CC CC_REGNUM)
4970 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
4972 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4978 [(set_attr "conds" "set")]
4981 ;; Subroutine to store a half word from a register into memory.
4982 ;; Operand 0 is the source register (HImode)
4983 ;; Operand 1 is the destination address in a register (SImode)
4985 ;; In both this routine and the next, we must be careful not to spill
4986 ;; a memory address of reg+large_const into a separate PLUS insn, since this
4987 ;; can generate unrecognizable rtl.
4989 (define_expand "storehi"
4990 [;; store the low byte
4991 (set (match_operand 1 "" "") (match_dup 3))
4992 ;; extract the high byte
4994 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
4995 ;; store the high byte
4996 (set (match_dup 4) (match_dup 5))]
5000 rtx op1 = operands[1];
5001 rtx addr = XEXP (op1, 0);
5002 enum rtx_code code = GET_CODE (addr);
5004 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5006 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5008 operands[4] = adjust_address (op1, QImode, 1);
5009 operands[1] = adjust_address (operands[1], QImode, 0);
5010 operands[3] = gen_lowpart (QImode, operands[0]);
5011 operands[0] = gen_lowpart (SImode, operands[0]);
5012 operands[2] = gen_reg_rtx (SImode);
5013 operands[5] = gen_lowpart (QImode, operands[2]);
5017 (define_expand "storehi_bigend"
5018 [(set (match_dup 4) (match_dup 3))
5020 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5021 (set (match_operand 1 "" "") (match_dup 5))]
5025 rtx op1 = operands[1];
5026 rtx addr = XEXP (op1, 0);
5027 enum rtx_code code = GET_CODE (addr);
5029 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5031 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5033 operands[4] = adjust_address (op1, QImode, 1);
5034 operands[1] = adjust_address (operands[1], QImode, 0);
5035 operands[3] = gen_lowpart (QImode, operands[0]);
5036 operands[0] = gen_lowpart (SImode, operands[0]);
5037 operands[2] = gen_reg_rtx (SImode);
5038 operands[5] = gen_lowpart (QImode, operands[2]);
5042 ;; Subroutine to store a half word integer constant into memory.
5043 (define_expand "storeinthi"
5044 [(set (match_operand 0 "" "")
5045 (match_operand 1 "" ""))
5046 (set (match_dup 3) (match_dup 2))]
5050 HOST_WIDE_INT value = INTVAL (operands[1]);
5051 rtx addr = XEXP (operands[0], 0);
5052 rtx op0 = operands[0];
5053 enum rtx_code code = GET_CODE (addr);
5055 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5057 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5059 operands[1] = gen_reg_rtx (SImode);
5060 if (BYTES_BIG_ENDIAN)
5062 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5063 if ((value & 255) == ((value >> 8) & 255))
5064 operands[2] = operands[1];
5067 operands[2] = gen_reg_rtx (SImode);
5068 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5073 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5074 if ((value & 255) == ((value >> 8) & 255))
5075 operands[2] = operands[1];
5078 operands[2] = gen_reg_rtx (SImode);
5079 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5083 operands[3] = adjust_address (op0, QImode, 1);
5084 operands[0] = adjust_address (operands[0], QImode, 0);
5085 operands[2] = gen_lowpart (QImode, operands[2]);
5086 operands[1] = gen_lowpart (QImode, operands[1]);
5090 (define_expand "storehi_single_op"
5091 [(set (match_operand:HI 0 "memory_operand" "")
5092 (match_operand:HI 1 "general_operand" ""))]
5093 "TARGET_32BIT && arm_arch4"
5095 if (!s_register_operand (operands[1], HImode))
5096 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5100 (define_expand "movhi"
5101 [(set (match_operand:HI 0 "general_operand" "")
5102 (match_operand:HI 1 "general_operand" ""))]
5107 if (can_create_pseudo_p ())
5109 if (GET_CODE (operands[0]) == MEM)
5113 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5116 if (GET_CODE (operands[1]) == CONST_INT)
5117 emit_insn (gen_storeinthi (operands[0], operands[1]));
5120 if (GET_CODE (operands[1]) == MEM)
5121 operands[1] = force_reg (HImode, operands[1]);
5122 if (BYTES_BIG_ENDIAN)
5123 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5125 emit_insn (gen_storehi (operands[1], operands[0]));
5129 /* Sign extend a constant, and keep it in an SImode reg. */
5130 else if (GET_CODE (operands[1]) == CONST_INT)
5132 rtx reg = gen_reg_rtx (SImode);
5133 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5135 /* If the constant is already valid, leave it alone. */
5136 if (!const_ok_for_arm (val))
5138 /* If setting all the top bits will make the constant
5139 loadable in a single instruction, then set them.
5140 Otherwise, sign extend the number. */
5142 if (const_ok_for_arm (~(val | ~0xffff)))
5144 else if (val & 0x8000)
5148 emit_insn (gen_movsi (reg, GEN_INT (val)));
5149 operands[1] = gen_lowpart (HImode, reg);
5151 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5152 && GET_CODE (operands[1]) == MEM)
5154 rtx reg = gen_reg_rtx (SImode);
5156 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5157 operands[1] = gen_lowpart (HImode, reg);
5159 else if (!arm_arch4)
5161 if (GET_CODE (operands[1]) == MEM)
5164 rtx offset = const0_rtx;
5165 rtx reg = gen_reg_rtx (SImode);
5167 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5168 || (GET_CODE (base) == PLUS
5169 && (GET_CODE (offset = XEXP (base, 1))
5171 && ((INTVAL(offset) & 1) != 1)
5172 && GET_CODE (base = XEXP (base, 0)) == REG))
5173 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5177 new = widen_memory_access (operands[1], SImode,
5178 ((INTVAL (offset) & ~3)
5179 - INTVAL (offset)));
5180 emit_insn (gen_movsi (reg, new));
5181 if (((INTVAL (offset) & 2) != 0)
5182 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5184 rtx reg2 = gen_reg_rtx (SImode);
5186 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5191 emit_insn (gen_movhi_bytes (reg, operands[1]));
5193 operands[1] = gen_lowpart (HImode, reg);
5197 /* Handle loading a large integer during reload. */
5198 else if (GET_CODE (operands[1]) == CONST_INT
5199 && !const_ok_for_arm (INTVAL (operands[1]))
5200 && !const_ok_for_arm (~INTVAL (operands[1])))
5202 /* Writing a constant to memory needs a scratch, which should
5203 be handled with SECONDARY_RELOADs. */
5204 gcc_assert (GET_CODE (operands[0]) == REG);
5206 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5207 emit_insn (gen_movsi (operands[0], operands[1]));
5211 else if (TARGET_THUMB2)
5213 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5214 if (can_create_pseudo_p ())
5216 if (GET_CODE (operands[0]) != REG)
5217 operands[1] = force_reg (HImode, operands[1]);
5218 /* Zero extend a constant, and keep it in an SImode reg. */
5219 else if (GET_CODE (operands[1]) == CONST_INT)
5221 rtx reg = gen_reg_rtx (SImode);
5222 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5224 emit_insn (gen_movsi (reg, GEN_INT (val)));
5225 operands[1] = gen_lowpart (HImode, reg);
5229 else /* TARGET_THUMB1 */
5231 if (can_create_pseudo_p ())
5233 if (GET_CODE (operands[1]) == CONST_INT)
5235 rtx reg = gen_reg_rtx (SImode);
5237 emit_insn (gen_movsi (reg, operands[1]));
5238 operands[1] = gen_lowpart (HImode, reg);
5241 /* ??? We shouldn't really get invalid addresses here, but this can
5242 happen if we are passed a SP (never OK for HImode/QImode) or
5243 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5244 HImode/QImode) relative address. */
5245 /* ??? This should perhaps be fixed elsewhere, for instance, in
5246 fixup_stack_1, by checking for other kinds of invalid addresses,
5247 e.g. a bare reference to a virtual register. This may confuse the
5248 alpha though, which must handle this case differently. */
5249 if (GET_CODE (operands[0]) == MEM
5250 && !memory_address_p (GET_MODE (operands[0]),
5251 XEXP (operands[0], 0)))
5253 = replace_equiv_address (operands[0],
5254 copy_to_reg (XEXP (operands[0], 0)));
5256 if (GET_CODE (operands[1]) == MEM
5257 && !memory_address_p (GET_MODE (operands[1]),
5258 XEXP (operands[1], 0)))
5260 = replace_equiv_address (operands[1],
5261 copy_to_reg (XEXP (operands[1], 0)));
5263 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5265 rtx reg = gen_reg_rtx (SImode);
5267 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5268 operands[1] = gen_lowpart (HImode, reg);
5271 if (GET_CODE (operands[0]) == MEM)
5272 operands[1] = force_reg (HImode, operands[1]);
5274 else if (GET_CODE (operands[1]) == CONST_INT
5275 && !satisfies_constraint_I (operands[1]))
5277 /* Handle loading a large integer during reload. */
5279 /* Writing a constant to memory needs a scratch, which should
5280 be handled with SECONDARY_RELOADs. */
5281 gcc_assert (GET_CODE (operands[0]) == REG);
5283 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5284 emit_insn (gen_movsi (operands[0], operands[1]));
5291 (define_insn "*thumb1_movhi_insn"
5292 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5293 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5295 && ( register_operand (operands[0], HImode)
5296 || register_operand (operands[1], HImode))"
5298 switch (which_alternative)
5300 case 0: return \"add %0, %1, #0\";
5301 case 2: return \"strh %1, %0\";
5302 case 3: return \"mov %0, %1\";
5303 case 4: return \"mov %0, %1\";
5304 case 5: return \"mov %0, %1\";
5305 default: gcc_unreachable ();
5307 /* The stack pointer can end up being taken as an index register.
5308 Catch this case here and deal with it. */
5309 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5310 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5311 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5314 ops[0] = operands[0];
5315 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5317 output_asm_insn (\"mov %0, %1\", ops);
5319 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5322 return \"ldrh %0, %1\";
5324 [(set_attr "length" "2,4,2,2,2,2")
5325 (set_attr "type" "*,load1,store1,*,*,*")]
5329 (define_expand "movhi_bytes"
5330 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5332 (zero_extend:SI (match_dup 6)))
5333 (set (match_operand:SI 0 "" "")
5334 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5339 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5341 mem1 = change_address (operands[1], QImode, addr);
5342 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5343 operands[0] = gen_lowpart (SImode, operands[0]);
5345 operands[2] = gen_reg_rtx (SImode);
5346 operands[3] = gen_reg_rtx (SImode);
5349 if (BYTES_BIG_ENDIAN)
5351 operands[4] = operands[2];
5352 operands[5] = operands[3];
5356 operands[4] = operands[3];
5357 operands[5] = operands[2];
5362 (define_expand "movhi_bigend"
5364 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5367 (ashiftrt:SI (match_dup 2) (const_int 16)))
5368 (set (match_operand:HI 0 "s_register_operand" "")
5372 operands[2] = gen_reg_rtx (SImode);
5373 operands[3] = gen_reg_rtx (SImode);
5374 operands[4] = gen_lowpart (HImode, operands[3]);
5378 ;; Pattern to recognize insn generated default case above
5379 (define_insn "*movhi_insn_arch4"
5380 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5381 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5384 && (GET_CODE (operands[1]) != CONST_INT
5385 || const_ok_for_arm (INTVAL (operands[1]))
5386 || const_ok_for_arm (~INTVAL (operands[1])))"
5388 mov%?\\t%0, %1\\t%@ movhi
5389 mvn%?\\t%0, #%B1\\t%@ movhi
5390 str%(h%)\\t%1, %0\\t%@ movhi
5391 ldr%(h%)\\t%0, %1\\t%@ movhi"
5392 [(set_attr "type" "*,*,store1,load1")
5393 (set_attr "predicable" "yes")
5394 (set_attr "pool_range" "*,*,*,256")
5395 (set_attr "neg_pool_range" "*,*,*,244")]
5398 (define_insn "*movhi_bytes"
5399 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5400 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5403 mov%?\\t%0, %1\\t%@ movhi
5404 mvn%?\\t%0, #%B1\\t%@ movhi"
5405 [(set_attr "predicable" "yes")]
5408 (define_expand "thumb_movhi_clobber"
5409 [(set (match_operand:HI 0 "memory_operand" "")
5410 (match_operand:HI 1 "register_operand" ""))
5411 (clobber (match_operand:DI 2 "register_operand" ""))]
5414 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5415 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5417 emit_insn (gen_movhi (operands[0], operands[1]));
5420 /* XXX Fixme, need to handle other cases here as well. */
5425 ;; We use a DImode scratch because we may occasionally need an additional
5426 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5427 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5428 (define_expand "reload_outhi"
5429 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5430 (match_operand:HI 1 "s_register_operand" "r")
5431 (match_operand:DI 2 "s_register_operand" "=&l")])]
5434 arm_reload_out_hi (operands);
5436 thumb_reload_out_hi (operands);
5441 (define_expand "reload_inhi"
5442 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5443 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5444 (match_operand:DI 2 "s_register_operand" "=&r")])]
5448 arm_reload_in_hi (operands);
5450 thumb_reload_out_hi (operands);
5454 (define_expand "movqi"
5455 [(set (match_operand:QI 0 "general_operand" "")
5456 (match_operand:QI 1 "general_operand" ""))]
5459 /* Everything except mem = const or mem = mem can be done easily */
5461 if (can_create_pseudo_p ())
5463 if (GET_CODE (operands[1]) == CONST_INT)
5465 rtx reg = gen_reg_rtx (SImode);
5467 emit_insn (gen_movsi (reg, operands[1]));
5468 operands[1] = gen_lowpart (QImode, reg);
5473 /* ??? We shouldn't really get invalid addresses here, but this can
5474 happen if we are passed a SP (never OK for HImode/QImode) or
5475 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5476 HImode/QImode) relative address. */
5477 /* ??? This should perhaps be fixed elsewhere, for instance, in
5478 fixup_stack_1, by checking for other kinds of invalid addresses,
5479 e.g. a bare reference to a virtual register. This may confuse the
5480 alpha though, which must handle this case differently. */
5481 if (GET_CODE (operands[0]) == MEM
5482 && !memory_address_p (GET_MODE (operands[0]),
5483 XEXP (operands[0], 0)))
5485 = replace_equiv_address (operands[0],
5486 copy_to_reg (XEXP (operands[0], 0)));
5487 if (GET_CODE (operands[1]) == MEM
5488 && !memory_address_p (GET_MODE (operands[1]),
5489 XEXP (operands[1], 0)))
5491 = replace_equiv_address (operands[1],
5492 copy_to_reg (XEXP (operands[1], 0)));
5495 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5497 rtx reg = gen_reg_rtx (SImode);
5499 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5500 operands[1] = gen_lowpart (QImode, reg);
5503 if (GET_CODE (operands[0]) == MEM)
5504 operands[1] = force_reg (QImode, operands[1]);
5506 else if (TARGET_THUMB
5507 && GET_CODE (operands[1]) == CONST_INT
5508 && !satisfies_constraint_I (operands[1]))
5510 /* Handle loading a large integer during reload. */
5512 /* Writing a constant to memory needs a scratch, which should
5513 be handled with SECONDARY_RELOADs. */
5514 gcc_assert (GET_CODE (operands[0]) == REG);
5516 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5517 emit_insn (gen_movsi (operands[0], operands[1]));
5524 (define_insn "*arm_movqi_insn"
5525 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5526 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5528 && ( register_operand (operands[0], QImode)
5529 || register_operand (operands[1], QImode))"
5535 [(set_attr "type" "*,*,load1,store1")
5536 (set_attr "predicable" "yes")]
5539 (define_insn "*thumb1_movqi_insn"
5540 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5541 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5543 && ( register_operand (operands[0], QImode)
5544 || register_operand (operands[1], QImode))"
5552 [(set_attr "length" "2")
5553 (set_attr "type" "*,load1,store1,*,*,*")
5554 (set_attr "pool_range" "*,32,*,*,*,*")]
5557 (define_expand "movsf"
5558 [(set (match_operand:SF 0 "general_operand" "")
5559 (match_operand:SF 1 "general_operand" ""))]
5564 if (GET_CODE (operands[0]) == MEM)
5565 operands[1] = force_reg (SFmode, operands[1]);
5567 else /* TARGET_THUMB1 */
5569 if (can_create_pseudo_p ())
5571 if (GET_CODE (operands[0]) != REG)
5572 operands[1] = force_reg (SFmode, operands[1]);
5578 ;; Transform a floating-point move of a constant into a core register into
5579 ;; an SImode operation.
5581 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5582 (match_operand:SF 1 "immediate_operand" ""))]
5585 && GET_CODE (operands[1]) == CONST_DOUBLE"
5586 [(set (match_dup 2) (match_dup 3))]
5588 operands[2] = gen_lowpart (SImode, operands[0]);
5589 operands[3] = gen_lowpart (SImode, operands[1]);
5590 if (operands[2] == 0 || operands[3] == 0)
5595 (define_insn "*arm_movsf_soft_insn"
5596 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5597 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5599 && TARGET_SOFT_FLOAT
5600 && (GET_CODE (operands[0]) != MEM
5601 || register_operand (operands[1], SFmode))"
5604 ldr%?\\t%0, %1\\t%@ float
5605 str%?\\t%1, %0\\t%@ float"
5606 [(set_attr "length" "4,4,4")
5607 (set_attr "predicable" "yes")
5608 (set_attr "type" "*,load1,store1")
5609 (set_attr "pool_range" "*,4096,*")
5610 (set_attr "neg_pool_range" "*,4084,*")]
5613 ;;; ??? This should have alternatives for constants.
5614 (define_insn "*thumb1_movsf_insn"
5615 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5616 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5618 && ( register_operand (operands[0], SFmode)
5619 || register_operand (operands[1], SFmode))"
5628 [(set_attr "length" "2")
5629 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5630 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5633 (define_expand "movdf"
5634 [(set (match_operand:DF 0 "general_operand" "")
5635 (match_operand:DF 1 "general_operand" ""))]
5640 if (GET_CODE (operands[0]) == MEM)
5641 operands[1] = force_reg (DFmode, operands[1]);
5643 else /* TARGET_THUMB */
5645 if (can_create_pseudo_p ())
5647 if (GET_CODE (operands[0]) != REG)
5648 operands[1] = force_reg (DFmode, operands[1]);
5654 ;; Reloading a df mode value stored in integer regs to memory can require a
5656 (define_expand "reload_outdf"
5657 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5658 (match_operand:DF 1 "s_register_operand" "r")
5659 (match_operand:SI 2 "s_register_operand" "=&r")]
5663 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5666 operands[2] = XEXP (operands[0], 0);
5667 else if (code == POST_INC || code == PRE_DEC)
5669 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5670 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5671 emit_insn (gen_movdi (operands[0], operands[1]));
5674 else if (code == PRE_INC)
5676 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5678 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5681 else if (code == POST_DEC)
5682 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5684 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5685 XEXP (XEXP (operands[0], 0), 1)));
5687 emit_insn (gen_rtx_SET (VOIDmode,
5688 replace_equiv_address (operands[0], operands[2]),
5691 if (code == POST_DEC)
5692 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5698 (define_insn "*movdf_soft_insn"
5699 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5700 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5701 "TARGET_ARM && TARGET_SOFT_FLOAT
5702 && ( register_operand (operands[0], DFmode)
5703 || register_operand (operands[1], DFmode))"
5705 switch (which_alternative)
5712 return output_move_double (operands);
5715 [(set_attr "length" "8,12,16,8,8")
5716 (set_attr "type" "*,*,*,load2,store2")
5717 (set_attr "pool_range" "1020")
5718 (set_attr "neg_pool_range" "1008")]
5721 ;;; ??? This should have alternatives for constants.
5722 ;;; ??? This was originally identical to the movdi_insn pattern.
5723 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5724 ;;; thumb_reorg with a memory reference.
5725 (define_insn "*thumb_movdf_insn"
5726 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5727 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5729 && ( register_operand (operands[0], DFmode)
5730 || register_operand (operands[1], DFmode))"
5732 switch (which_alternative)
5736 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5737 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5738 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5740 return \"ldmia\\t%1, {%0, %H0}\";
5742 return \"stmia\\t%0, {%1, %H1}\";
5744 return thumb_load_double_from_address (operands);
5746 operands[2] = gen_rtx_MEM (SImode,
5747 plus_constant (XEXP (operands[0], 0), 4));
5748 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5751 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5752 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5753 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5756 [(set_attr "length" "4,2,2,6,4,4")
5757 (set_attr "type" "*,load2,store2,load2,store2,*")
5758 (set_attr "pool_range" "*,*,*,1020,*,*")]
5761 (define_expand "movxf"
5762 [(set (match_operand:XF 0 "general_operand" "")
5763 (match_operand:XF 1 "general_operand" ""))]
5764 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5766 if (GET_CODE (operands[0]) == MEM)
5767 operands[1] = force_reg (XFmode, operands[1]);
5772 (define_expand "movv2si"
5773 [(set (match_operand:V2SI 0 "nonimmediate_operand" "")
5774 (match_operand:V2SI 1 "general_operand" ""))]
5775 "TARGET_REALLY_IWMMXT"
5779 (define_expand "movv4hi"
5780 [(set (match_operand:V4HI 0 "nonimmediate_operand" "")
5781 (match_operand:V4HI 1 "general_operand" ""))]
5782 "TARGET_REALLY_IWMMXT"
5786 (define_expand "movv8qi"
5787 [(set (match_operand:V8QI 0 "nonimmediate_operand" "")
5788 (match_operand:V8QI 1 "general_operand" ""))]
5789 "TARGET_REALLY_IWMMXT"
5794 ;; load- and store-multiple insns
5795 ;; The arm can load/store any set of registers, provided that they are in
5796 ;; ascending order; but that is beyond GCC so stick with what it knows.
5798 (define_expand "load_multiple"
5799 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5800 (match_operand:SI 1 "" ""))
5801 (use (match_operand:SI 2 "" ""))])]
5804 HOST_WIDE_INT offset = 0;
5806 /* Support only fixed point registers. */
5807 if (GET_CODE (operands[2]) != CONST_INT
5808 || INTVAL (operands[2]) > 14
5809 || INTVAL (operands[2]) < 2
5810 || GET_CODE (operands[1]) != MEM
5811 || GET_CODE (operands[0]) != REG
5812 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5813 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5817 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5818 force_reg (SImode, XEXP (operands[1], 0)),
5819 TRUE, FALSE, operands[1], &offset);
5822 ;; Load multiple with write-back
5824 (define_insn "*ldmsi_postinc4"
5825 [(match_parallel 0 "load_multiple_operation"
5826 [(set (match_operand:SI 1 "s_register_operand" "=r")
5827 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5829 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5830 (mem:SI (match_dup 2)))
5831 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5832 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5833 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5834 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5835 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5836 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5837 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5838 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5839 [(set_attr "type" "load4")
5840 (set_attr "predicable" "yes")]
5843 (define_insn "*ldmsi_postinc4_thumb1"
5844 [(match_parallel 0 "load_multiple_operation"
5845 [(set (match_operand:SI 1 "s_register_operand" "=l")
5846 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5848 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5849 (mem:SI (match_dup 2)))
5850 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5851 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5852 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5853 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5854 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5855 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5856 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5857 "ldmia\\t%1!, {%3, %4, %5, %6}"
5858 [(set_attr "type" "load4")]
5861 (define_insn "*ldmsi_postinc3"
5862 [(match_parallel 0 "load_multiple_operation"
5863 [(set (match_operand:SI 1 "s_register_operand" "=r")
5864 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5866 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5867 (mem:SI (match_dup 2)))
5868 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5869 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5870 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5871 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5872 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5873 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5874 [(set_attr "type" "load3")
5875 (set_attr "predicable" "yes")]
5878 (define_insn "*ldmsi_postinc2"
5879 [(match_parallel 0 "load_multiple_operation"
5880 [(set (match_operand:SI 1 "s_register_operand" "=r")
5881 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5883 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5884 (mem:SI (match_dup 2)))
5885 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5886 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5887 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5888 "ldm%(ia%)\\t%1!, {%3, %4}"
5889 [(set_attr "type" "load2")
5890 (set_attr "predicable" "yes")]
5893 ;; Ordinary load multiple
5895 (define_insn "*ldmsi4"
5896 [(match_parallel 0 "load_multiple_operation"
5897 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5898 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5899 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5900 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5901 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5902 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5903 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5904 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5905 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5906 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
5907 [(set_attr "type" "load4")
5908 (set_attr "predicable" "yes")]
5911 (define_insn "*ldmsi3"
5912 [(match_parallel 0 "load_multiple_operation"
5913 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5914 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5915 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5916 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5917 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5918 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
5919 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5920 "ldm%(ia%)\\t%1, {%2, %3, %4}"
5921 [(set_attr "type" "load3")
5922 (set_attr "predicable" "yes")]
5925 (define_insn "*ldmsi2"
5926 [(match_parallel 0 "load_multiple_operation"
5927 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5928 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5929 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5930 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
5931 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
5932 "ldm%(ia%)\\t%1, {%2, %3}"
5933 [(set_attr "type" "load2")
5934 (set_attr "predicable" "yes")]
5937 (define_expand "store_multiple"
5938 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5939 (match_operand:SI 1 "" ""))
5940 (use (match_operand:SI 2 "" ""))])]
5943 HOST_WIDE_INT offset = 0;
5945 /* Support only fixed point registers. */
5946 if (GET_CODE (operands[2]) != CONST_INT
5947 || INTVAL (operands[2]) > 14
5948 || INTVAL (operands[2]) < 2
5949 || GET_CODE (operands[1]) != REG
5950 || GET_CODE (operands[0]) != MEM
5951 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
5952 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5956 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
5957 force_reg (SImode, XEXP (operands[0], 0)),
5958 TRUE, FALSE, operands[0], &offset);
5961 ;; Store multiple with write-back
5963 (define_insn "*stmsi_postinc4"
5964 [(match_parallel 0 "store_multiple_operation"
5965 [(set (match_operand:SI 1 "s_register_operand" "=r")
5966 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5968 (set (mem:SI (match_dup 2))
5969 (match_operand:SI 3 "arm_hard_register_operand" ""))
5970 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5971 (match_operand:SI 4 "arm_hard_register_operand" ""))
5972 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5973 (match_operand:SI 5 "arm_hard_register_operand" ""))
5974 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5975 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5976 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5977 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5978 [(set_attr "predicable" "yes")
5979 (set_attr "type" "store4")]
5982 (define_insn "*stmsi_postinc4_thumb1"
5983 [(match_parallel 0 "store_multiple_operation"
5984 [(set (match_operand:SI 1 "s_register_operand" "=l")
5985 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5987 (set (mem:SI (match_dup 2))
5988 (match_operand:SI 3 "arm_hard_register_operand" ""))
5989 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
5990 (match_operand:SI 4 "arm_hard_register_operand" ""))
5991 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
5992 (match_operand:SI 5 "arm_hard_register_operand" ""))
5993 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
5994 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
5995 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5996 "stmia\\t%1!, {%3, %4, %5, %6}"
5997 [(set_attr "type" "store4")]
6000 (define_insn "*stmsi_postinc3"
6001 [(match_parallel 0 "store_multiple_operation"
6002 [(set (match_operand:SI 1 "s_register_operand" "=r")
6003 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6005 (set (mem:SI (match_dup 2))
6006 (match_operand:SI 3 "arm_hard_register_operand" ""))
6007 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6008 (match_operand:SI 4 "arm_hard_register_operand" ""))
6009 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6010 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6011 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6012 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6013 [(set_attr "predicable" "yes")
6014 (set_attr "type" "store3")]
6017 (define_insn "*stmsi_postinc2"
6018 [(match_parallel 0 "store_multiple_operation"
6019 [(set (match_operand:SI 1 "s_register_operand" "=r")
6020 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6022 (set (mem:SI (match_dup 2))
6023 (match_operand:SI 3 "arm_hard_register_operand" ""))
6024 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6025 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6026 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6027 "stm%(ia%)\\t%1!, {%3, %4}"
6028 [(set_attr "predicable" "yes")
6029 (set_attr "type" "store2")]
6032 ;; Ordinary store multiple
6034 (define_insn "*stmsi4"
6035 [(match_parallel 0 "store_multiple_operation"
6036 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6037 (match_operand:SI 2 "arm_hard_register_operand" ""))
6038 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6039 (match_operand:SI 3 "arm_hard_register_operand" ""))
6040 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6041 (match_operand:SI 4 "arm_hard_register_operand" ""))
6042 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6043 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6044 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6045 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6046 [(set_attr "predicable" "yes")
6047 (set_attr "type" "store4")]
6050 (define_insn "*stmsi3"
6051 [(match_parallel 0 "store_multiple_operation"
6052 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6053 (match_operand:SI 2 "arm_hard_register_operand" ""))
6054 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6055 (match_operand:SI 3 "arm_hard_register_operand" ""))
6056 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6057 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6058 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6059 "stm%(ia%)\\t%1, {%2, %3, %4}"
6060 [(set_attr "predicable" "yes")
6061 (set_attr "type" "store3")]
6064 (define_insn "*stmsi2"
6065 [(match_parallel 0 "store_multiple_operation"
6066 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6067 (match_operand:SI 2 "arm_hard_register_operand" ""))
6068 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6069 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6070 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6071 "stm%(ia%)\\t%1, {%2, %3}"
6072 [(set_attr "predicable" "yes")
6073 (set_attr "type" "store2")]
6076 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6077 ;; We could let this apply for blocks of less than this, but it clobbers so
6078 ;; many registers that there is then probably a better way.
6080 (define_expand "movmemqi"
6081 [(match_operand:BLK 0 "general_operand" "")
6082 (match_operand:BLK 1 "general_operand" "")
6083 (match_operand:SI 2 "const_int_operand" "")
6084 (match_operand:SI 3 "const_int_operand" "")]
6089 if (arm_gen_movmemqi (operands))
6093 else /* TARGET_THUMB1 */
6095 if ( INTVAL (operands[3]) != 4
6096 || INTVAL (operands[2]) > 48)
6099 thumb_expand_movmemqi (operands);
6105 ;; Thumb block-move insns
6107 (define_insn "movmem12b"
6108 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6109 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6110 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6111 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6112 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6113 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6114 (set (match_operand:SI 0 "register_operand" "=l")
6115 (plus:SI (match_dup 2) (const_int 12)))
6116 (set (match_operand:SI 1 "register_operand" "=l")
6117 (plus:SI (match_dup 3) (const_int 12)))
6118 (clobber (match_scratch:SI 4 "=&l"))
6119 (clobber (match_scratch:SI 5 "=&l"))
6120 (clobber (match_scratch:SI 6 "=&l"))]
6122 "* return thumb_output_move_mem_multiple (3, operands);"
6123 [(set_attr "length" "4")
6124 ; This isn't entirely accurate... It loads as well, but in terms of
6125 ; scheduling the following insn it is better to consider it as a store
6126 (set_attr "type" "store3")]
6129 (define_insn "movmem8b"
6130 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6131 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6132 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6133 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6134 (set (match_operand:SI 0 "register_operand" "=l")
6135 (plus:SI (match_dup 2) (const_int 8)))
6136 (set (match_operand:SI 1 "register_operand" "=l")
6137 (plus:SI (match_dup 3) (const_int 8)))
6138 (clobber (match_scratch:SI 4 "=&l"))
6139 (clobber (match_scratch:SI 5 "=&l"))]
6141 "* return thumb_output_move_mem_multiple (2, operands);"
6142 [(set_attr "length" "4")
6143 ; This isn't entirely accurate... It loads as well, but in terms of
6144 ; scheduling the following insn it is better to consider it as a store
6145 (set_attr "type" "store2")]
6150 ;; Compare & branch insns
6151 ;; The range calculations are based as follows:
6152 ;; For forward branches, the address calculation returns the address of
6153 ;; the next instruction. This is 2 beyond the branch instruction.
6154 ;; For backward branches, the address calculation returns the address of
6155 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6156 ;; instruction for the shortest sequence, and 4 before the branch instruction
6157 ;; if we have to jump around an unconditional branch.
6158 ;; To the basic branch range the PC offset must be added (this is +4).
6159 ;; So for forward branches we have
6160 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6161 ;; And for backward branches we have
6162 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6164 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6165 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6167 (define_expand "cbranchsi4"
6168 [(set (pc) (if_then_else
6169 (match_operator 0 "arm_comparison_operator"
6170 [(match_operand:SI 1 "s_register_operand" "")
6171 (match_operand:SI 2 "nonmemory_operand" "")])
6172 (label_ref (match_operand 3 "" ""))
6176 if (thumb1_cmpneg_operand (operands[2], SImode))
6178 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6179 operands[3], operands[0]));
6182 if (!thumb1_cmp_operand (operands[2], SImode))
6183 operands[2] = force_reg (SImode, operands[2]);
6186 (define_insn "*cbranchsi4_insn"
6187 [(set (pc) (if_then_else
6188 (match_operator 0 "arm_comparison_operator"
6189 [(match_operand:SI 1 "s_register_operand" "l,*h")
6190 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6191 (label_ref (match_operand 3 "" ""))
6195 output_asm_insn (\"cmp\\t%1, %2\", operands);
6197 switch (get_attr_length (insn))
6199 case 4: return \"b%d0\\t%l3\";
6200 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6201 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6204 [(set (attr "far_jump")
6206 (eq_attr "length" "8")
6207 (const_string "yes")
6208 (const_string "no")))
6209 (set (attr "length")
6211 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6212 (le (minus (match_dup 3) (pc)) (const_int 256)))
6215 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6216 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6221 (define_insn "cbranchsi4_scratch"
6222 [(set (pc) (if_then_else
6223 (match_operator 4 "arm_comparison_operator"
6224 [(match_operand:SI 1 "s_register_operand" "l,0")
6225 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6226 (label_ref (match_operand 3 "" ""))
6228 (clobber (match_scratch:SI 0 "=l,l"))]
6231 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6233 switch (get_attr_length (insn))
6235 case 4: return \"b%d4\\t%l3\";
6236 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6237 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6240 [(set (attr "far_jump")
6242 (eq_attr "length" "8")
6243 (const_string "yes")
6244 (const_string "no")))
6245 (set (attr "length")
6247 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6248 (le (minus (match_dup 3) (pc)) (const_int 256)))
6251 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6252 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6256 (define_insn "*movsi_cbranchsi4"
6259 (match_operator 3 "arm_comparison_operator"
6260 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6262 (label_ref (match_operand 2 "" ""))
6264 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6268 if (which_alternative == 0)
6269 output_asm_insn (\"cmp\t%0, #0\", operands);
6270 else if (which_alternative == 1)
6271 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6274 output_asm_insn (\"cmp\t%1, #0\", operands);
6275 if (which_alternative == 2)
6276 output_asm_insn (\"mov\t%0, %1\", operands);
6278 output_asm_insn (\"str\t%1, %0\", operands);
6280 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6282 case 4: return \"b%d3\\t%l2\";
6283 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6284 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6287 [(set (attr "far_jump")
6289 (ior (and (gt (symbol_ref ("which_alternative"))
6291 (eq_attr "length" "8"))
6292 (eq_attr "length" "10"))
6293 (const_string "yes")
6294 (const_string "no")))
6295 (set (attr "length")
6297 (le (symbol_ref ("which_alternative"))
6300 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6301 (le (minus (match_dup 2) (pc)) (const_int 256)))
6304 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6305 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6309 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6310 (le (minus (match_dup 2) (pc)) (const_int 256)))
6313 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6314 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6319 (define_insn "*negated_cbranchsi4"
6322 (match_operator 0 "equality_operator"
6323 [(match_operand:SI 1 "s_register_operand" "l")
6324 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6325 (label_ref (match_operand 3 "" ""))
6329 output_asm_insn (\"cmn\\t%1, %2\", operands);
6330 switch (get_attr_length (insn))
6332 case 4: return \"b%d0\\t%l3\";
6333 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6334 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6337 [(set (attr "far_jump")
6339 (eq_attr "length" "8")
6340 (const_string "yes")
6341 (const_string "no")))
6342 (set (attr "length")
6344 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6345 (le (minus (match_dup 3) (pc)) (const_int 256)))
6348 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6349 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6354 (define_insn "*tbit_cbranch"
6357 (match_operator 0 "equality_operator"
6358 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6360 (match_operand:SI 2 "const_int_operand" "i"))
6362 (label_ref (match_operand 3 "" ""))
6364 (clobber (match_scratch:SI 4 "=l"))]
6369 op[0] = operands[4];
6370 op[1] = operands[1];
6371 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6373 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6374 switch (get_attr_length (insn))
6376 case 4: return \"b%d0\\t%l3\";
6377 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6378 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6381 [(set (attr "far_jump")
6383 (eq_attr "length" "8")
6384 (const_string "yes")
6385 (const_string "no")))
6386 (set (attr "length")
6388 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6389 (le (minus (match_dup 3) (pc)) (const_int 256)))
6392 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6393 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6398 (define_insn "*tlobits_cbranch"
6401 (match_operator 0 "equality_operator"
6402 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6403 (match_operand:SI 2 "const_int_operand" "i")
6406 (label_ref (match_operand 3 "" ""))
6408 (clobber (match_scratch:SI 4 "=l"))]
6413 op[0] = operands[4];
6414 op[1] = operands[1];
6415 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6417 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6418 switch (get_attr_length (insn))
6420 case 4: return \"b%d0\\t%l3\";
6421 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6422 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6425 [(set (attr "far_jump")
6427 (eq_attr "length" "8")
6428 (const_string "yes")
6429 (const_string "no")))
6430 (set (attr "length")
6432 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6433 (le (minus (match_dup 3) (pc)) (const_int 256)))
6436 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6437 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6442 (define_insn "*tstsi3_cbranch"
6445 (match_operator 3 "equality_operator"
6446 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6447 (match_operand:SI 1 "s_register_operand" "l"))
6449 (label_ref (match_operand 2 "" ""))
6454 output_asm_insn (\"tst\\t%0, %1\", operands);
6455 switch (get_attr_length (insn))
6457 case 4: return \"b%d3\\t%l2\";
6458 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6459 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6462 [(set (attr "far_jump")
6464 (eq_attr "length" "8")
6465 (const_string "yes")
6466 (const_string "no")))
6467 (set (attr "length")
6469 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6470 (le (minus (match_dup 2) (pc)) (const_int 256)))
6473 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6474 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6479 (define_insn "*andsi3_cbranch"
6482 (match_operator 5 "equality_operator"
6483 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6484 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6486 (label_ref (match_operand 4 "" ""))
6488 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6489 (and:SI (match_dup 2) (match_dup 3)))
6490 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6494 if (which_alternative == 0)
6495 output_asm_insn (\"and\\t%0, %3\", operands);
6496 else if (which_alternative == 1)
6498 output_asm_insn (\"and\\t%1, %3\", operands);
6499 output_asm_insn (\"mov\\t%0, %1\", operands);
6503 output_asm_insn (\"and\\t%1, %3\", operands);
6504 output_asm_insn (\"str\\t%1, %0\", operands);
6507 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6509 case 4: return \"b%d5\\t%l4\";
6510 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6511 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6514 [(set (attr "far_jump")
6516 (ior (and (eq (symbol_ref ("which_alternative"))
6518 (eq_attr "length" "8"))
6519 (eq_attr "length" "10"))
6520 (const_string "yes")
6521 (const_string "no")))
6522 (set (attr "length")
6524 (eq (symbol_ref ("which_alternative"))
6527 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6528 (le (minus (match_dup 4) (pc)) (const_int 256)))
6531 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6532 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6536 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6537 (le (minus (match_dup 4) (pc)) (const_int 256)))
6540 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6541 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6546 (define_insn "*orrsi3_cbranch_scratch"
6549 (match_operator 4 "equality_operator"
6550 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6551 (match_operand:SI 2 "s_register_operand" "l"))
6553 (label_ref (match_operand 3 "" ""))
6555 (clobber (match_scratch:SI 0 "=l"))]
6559 output_asm_insn (\"orr\\t%0, %2\", operands);
6560 switch (get_attr_length (insn))
6562 case 4: return \"b%d4\\t%l3\";
6563 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6564 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6567 [(set (attr "far_jump")
6569 (eq_attr "length" "8")
6570 (const_string "yes")
6571 (const_string "no")))
6572 (set (attr "length")
6574 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6575 (le (minus (match_dup 3) (pc)) (const_int 256)))
6578 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6579 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6584 (define_insn "*orrsi3_cbranch"
6587 (match_operator 5 "equality_operator"
6588 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6589 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6591 (label_ref (match_operand 4 "" ""))
6593 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6594 (ior:SI (match_dup 2) (match_dup 3)))
6595 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6599 if (which_alternative == 0)
6600 output_asm_insn (\"orr\\t%0, %3\", operands);
6601 else if (which_alternative == 1)
6603 output_asm_insn (\"orr\\t%1, %3\", operands);
6604 output_asm_insn (\"mov\\t%0, %1\", operands);
6608 output_asm_insn (\"orr\\t%1, %3\", operands);
6609 output_asm_insn (\"str\\t%1, %0\", operands);
6612 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6614 case 4: return \"b%d5\\t%l4\";
6615 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6616 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6619 [(set (attr "far_jump")
6621 (ior (and (eq (symbol_ref ("which_alternative"))
6623 (eq_attr "length" "8"))
6624 (eq_attr "length" "10"))
6625 (const_string "yes")
6626 (const_string "no")))
6627 (set (attr "length")
6629 (eq (symbol_ref ("which_alternative"))
6632 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6633 (le (minus (match_dup 4) (pc)) (const_int 256)))
6636 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6637 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6641 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6642 (le (minus (match_dup 4) (pc)) (const_int 256)))
6645 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6646 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6651 (define_insn "*xorsi3_cbranch_scratch"
6654 (match_operator 4 "equality_operator"
6655 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6656 (match_operand:SI 2 "s_register_operand" "l"))
6658 (label_ref (match_operand 3 "" ""))
6660 (clobber (match_scratch:SI 0 "=l"))]
6664 output_asm_insn (\"eor\\t%0, %2\", operands);
6665 switch (get_attr_length (insn))
6667 case 4: return \"b%d4\\t%l3\";
6668 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6669 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6672 [(set (attr "far_jump")
6674 (eq_attr "length" "8")
6675 (const_string "yes")
6676 (const_string "no")))
6677 (set (attr "length")
6679 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6680 (le (minus (match_dup 3) (pc)) (const_int 256)))
6683 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6684 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6689 (define_insn "*xorsi3_cbranch"
6692 (match_operator 5 "equality_operator"
6693 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6694 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6696 (label_ref (match_operand 4 "" ""))
6698 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6699 (xor:SI (match_dup 2) (match_dup 3)))
6700 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6704 if (which_alternative == 0)
6705 output_asm_insn (\"eor\\t%0, %3\", operands);
6706 else if (which_alternative == 1)
6708 output_asm_insn (\"eor\\t%1, %3\", operands);
6709 output_asm_insn (\"mov\\t%0, %1\", operands);
6713 output_asm_insn (\"eor\\t%1, %3\", operands);
6714 output_asm_insn (\"str\\t%1, %0\", operands);
6717 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6719 case 4: return \"b%d5\\t%l4\";
6720 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6721 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6724 [(set (attr "far_jump")
6726 (ior (and (eq (symbol_ref ("which_alternative"))
6728 (eq_attr "length" "8"))
6729 (eq_attr "length" "10"))
6730 (const_string "yes")
6731 (const_string "no")))
6732 (set (attr "length")
6734 (eq (symbol_ref ("which_alternative"))
6737 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6738 (le (minus (match_dup 4) (pc)) (const_int 256)))
6741 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6742 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6746 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6747 (le (minus (match_dup 4) (pc)) (const_int 256)))
6750 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6751 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6756 (define_insn "*bicsi3_cbranch_scratch"
6759 (match_operator 4 "equality_operator"
6760 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6761 (match_operand:SI 1 "s_register_operand" "0"))
6763 (label_ref (match_operand 3 "" ""))
6765 (clobber (match_scratch:SI 0 "=l"))]
6769 output_asm_insn (\"bic\\t%0, %2\", operands);
6770 switch (get_attr_length (insn))
6772 case 4: return \"b%d4\\t%l3\";
6773 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6774 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6777 [(set (attr "far_jump")
6779 (eq_attr "length" "8")
6780 (const_string "yes")
6781 (const_string "no")))
6782 (set (attr "length")
6784 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6785 (le (minus (match_dup 3) (pc)) (const_int 256)))
6788 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6789 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6794 (define_insn "*bicsi3_cbranch"
6797 (match_operator 5 "equality_operator"
6798 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6799 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6801 (label_ref (match_operand 4 "" ""))
6803 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6804 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6805 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6809 if (which_alternative == 0)
6810 output_asm_insn (\"bic\\t%0, %3\", operands);
6811 else if (which_alternative <= 2)
6813 output_asm_insn (\"bic\\t%1, %3\", operands);
6814 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6815 conditions again, since we're only testing for equality. */
6816 output_asm_insn (\"mov\\t%0, %1\", operands);
6820 output_asm_insn (\"bic\\t%1, %3\", operands);
6821 output_asm_insn (\"str\\t%1, %0\", operands);
6824 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6826 case 4: return \"b%d5\\t%l4\";
6827 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6828 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6831 [(set (attr "far_jump")
6833 (ior (and (eq (symbol_ref ("which_alternative"))
6835 (eq_attr "length" "8"))
6836 (eq_attr "length" "10"))
6837 (const_string "yes")
6838 (const_string "no")))
6839 (set (attr "length")
6841 (eq (symbol_ref ("which_alternative"))
6844 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6845 (le (minus (match_dup 4) (pc)) (const_int 256)))
6848 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6849 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6853 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6854 (le (minus (match_dup 4) (pc)) (const_int 256)))
6857 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6858 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6863 (define_insn "*cbranchne_decr1"
6865 (if_then_else (match_operator 3 "equality_operator"
6866 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6868 (label_ref (match_operand 4 "" ""))
6870 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6871 (plus:SI (match_dup 2) (const_int -1)))
6872 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6877 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6879 VOIDmode, operands[2], const1_rtx);
6880 cond[1] = operands[4];
6882 if (which_alternative == 0)
6883 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6884 else if (which_alternative == 1)
6886 /* We must provide an alternative for a hi reg because reload
6887 cannot handle output reloads on a jump instruction, but we
6888 can't subtract into that. Fortunately a mov from lo to hi
6889 does not clobber the condition codes. */
6890 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6891 output_asm_insn (\"mov\\t%0, %1\", operands);
6895 /* Similarly, but the target is memory. */
6896 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6897 output_asm_insn (\"str\\t%1, %0\", operands);
6900 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6903 output_asm_insn (\"b%d0\\t%l1\", cond);
6906 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6907 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6909 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6910 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6914 [(set (attr "far_jump")
6916 (ior (and (eq (symbol_ref ("which_alternative"))
6918 (eq_attr "length" "8"))
6919 (eq_attr "length" "10"))
6920 (const_string "yes")
6921 (const_string "no")))
6922 (set_attr_alternative "length"
6926 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6927 (le (minus (match_dup 4) (pc)) (const_int 256)))
6930 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6931 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6936 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6937 (le (minus (match_dup 4) (pc)) (const_int 256)))
6940 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6941 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6946 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6947 (le (minus (match_dup 4) (pc)) (const_int 256)))
6950 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6951 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6956 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6957 (le (minus (match_dup 4) (pc)) (const_int 256)))
6960 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6961 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6966 (define_insn "*addsi3_cbranch"
6969 (match_operator 4 "comparison_operator"
6971 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
6972 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
6974 (label_ref (match_operand 5 "" ""))
6977 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6978 (plus:SI (match_dup 2) (match_dup 3)))
6979 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
6981 && (GET_CODE (operands[4]) == EQ
6982 || GET_CODE (operands[4]) == NE
6983 || GET_CODE (operands[4]) == GE
6984 || GET_CODE (operands[4]) == LT)"
6990 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
6991 cond[1] = operands[2];
6992 cond[2] = operands[3];
6994 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6995 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6997 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6999 if (which_alternative >= 3
7000 && which_alternative < 4)
7001 output_asm_insn (\"mov\\t%0, %1\", operands);
7002 else if (which_alternative >= 4)
7003 output_asm_insn (\"str\\t%1, %0\", operands);
7005 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7008 return \"b%d4\\t%l5\";
7010 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7012 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7016 [(set (attr "far_jump")
7018 (ior (and (lt (symbol_ref ("which_alternative"))
7020 (eq_attr "length" "8"))
7021 (eq_attr "length" "10"))
7022 (const_string "yes")
7023 (const_string "no")))
7024 (set (attr "length")
7026 (lt (symbol_ref ("which_alternative"))
7029 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7030 (le (minus (match_dup 5) (pc)) (const_int 256)))
7033 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7034 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7038 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7039 (le (minus (match_dup 5) (pc)) (const_int 256)))
7042 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7043 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7048 (define_insn "*addsi3_cbranch_scratch"
7051 (match_operator 3 "comparison_operator"
7053 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7054 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7056 (label_ref (match_operand 4 "" ""))
7058 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7060 && (GET_CODE (operands[3]) == EQ
7061 || GET_CODE (operands[3]) == NE
7062 || GET_CODE (operands[3]) == GE
7063 || GET_CODE (operands[3]) == LT)"
7066 switch (which_alternative)
7069 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7072 output_asm_insn (\"cmn\t%1, %2\", operands);
7075 if (INTVAL (operands[2]) < 0)
7076 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7078 output_asm_insn (\"add\t%0, %1, %2\", operands);
7081 if (INTVAL (operands[2]) < 0)
7082 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7084 output_asm_insn (\"add\t%0, %0, %2\", operands);
7088 switch (get_attr_length (insn))
7091 return \"b%d3\\t%l4\";
7093 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7095 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7099 [(set (attr "far_jump")
7101 (eq_attr "length" "8")
7102 (const_string "yes")
7103 (const_string "no")))
7104 (set (attr "length")
7106 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7107 (le (minus (match_dup 4) (pc)) (const_int 256)))
7110 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7111 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7116 (define_insn "*subsi3_cbranch"
7119 (match_operator 4 "comparison_operator"
7121 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7122 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7124 (label_ref (match_operand 5 "" ""))
7126 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7127 (minus:SI (match_dup 2) (match_dup 3)))
7128 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7130 && (GET_CODE (operands[4]) == EQ
7131 || GET_CODE (operands[4]) == NE
7132 || GET_CODE (operands[4]) == GE
7133 || GET_CODE (operands[4]) == LT)"
7136 if (which_alternative == 0)
7137 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7138 else if (which_alternative == 1)
7140 /* We must provide an alternative for a hi reg because reload
7141 cannot handle output reloads on a jump instruction, but we
7142 can't subtract into that. Fortunately a mov from lo to hi
7143 does not clobber the condition codes. */
7144 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7145 output_asm_insn (\"mov\\t%0, %1\", operands);
7149 /* Similarly, but the target is memory. */
7150 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7151 output_asm_insn (\"str\\t%1, %0\", operands);
7154 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7157 return \"b%d4\\t%l5\";
7159 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7161 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7165 [(set (attr "far_jump")
7167 (ior (and (eq (symbol_ref ("which_alternative"))
7169 (eq_attr "length" "8"))
7170 (eq_attr "length" "10"))
7171 (const_string "yes")
7172 (const_string "no")))
7173 (set (attr "length")
7175 (eq (symbol_ref ("which_alternative"))
7178 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7179 (le (minus (match_dup 5) (pc)) (const_int 256)))
7182 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7183 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7187 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7188 (le (minus (match_dup 5) (pc)) (const_int 256)))
7191 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7192 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7197 (define_insn "*subsi3_cbranch_scratch"
7200 (match_operator 0 "arm_comparison_operator"
7201 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7202 (match_operand:SI 2 "nonmemory_operand" "l"))
7204 (label_ref (match_operand 3 "" ""))
7207 && (GET_CODE (operands[0]) == EQ
7208 || GET_CODE (operands[0]) == NE
7209 || GET_CODE (operands[0]) == GE
7210 || GET_CODE (operands[0]) == LT)"
7212 output_asm_insn (\"cmp\\t%1, %2\", operands);
7213 switch (get_attr_length (insn))
7215 case 4: return \"b%d0\\t%l3\";
7216 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7217 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7220 [(set (attr "far_jump")
7222 (eq_attr "length" "8")
7223 (const_string "yes")
7224 (const_string "no")))
7225 (set (attr "length")
7227 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7228 (le (minus (match_dup 3) (pc)) (const_int 256)))
7231 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7232 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7237 ;; Comparison and test insns
7239 (define_expand "cmpsi"
7240 [(match_operand:SI 0 "s_register_operand" "")
7241 (match_operand:SI 1 "arm_add_operand" "")]
7244 arm_compare_op0 = operands[0];
7245 arm_compare_op1 = operands[1];
7250 (define_expand "cmpsf"
7251 [(match_operand:SF 0 "s_register_operand" "")
7252 (match_operand:SF 1 "arm_float_compare_operand" "")]
7253 "TARGET_32BIT && TARGET_HARD_FLOAT"
7255 arm_compare_op0 = operands[0];
7256 arm_compare_op1 = operands[1];
7261 (define_expand "cmpdf"
7262 [(match_operand:DF 0 "s_register_operand" "")
7263 (match_operand:DF 1 "arm_float_compare_operand" "")]
7264 "TARGET_32BIT && TARGET_HARD_FLOAT"
7266 arm_compare_op0 = operands[0];
7267 arm_compare_op1 = operands[1];
7272 (define_insn "*arm_cmpsi_insn"
7273 [(set (reg:CC CC_REGNUM)
7274 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7275 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7280 [(set_attr "conds" "set")]
7283 (define_insn "*arm_cmpsi_shiftsi"
7284 [(set (reg:CC CC_REGNUM)
7285 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7286 (match_operator:SI 3 "shift_operator"
7287 [(match_operand:SI 1 "s_register_operand" "r")
7288 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7291 [(set_attr "conds" "set")
7292 (set_attr "shift" "1")
7293 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7294 (const_string "alu_shift")
7295 (const_string "alu_shift_reg")))]
7298 (define_insn "*arm_cmpsi_shiftsi_swp"
7299 [(set (reg:CC_SWP CC_REGNUM)
7300 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7301 [(match_operand:SI 1 "s_register_operand" "r")
7302 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7303 (match_operand:SI 0 "s_register_operand" "r")))]
7306 [(set_attr "conds" "set")
7307 (set_attr "shift" "1")
7308 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7309 (const_string "alu_shift")
7310 (const_string "alu_shift_reg")))]
7313 (define_insn "*arm_cmpsi_negshiftsi_si"
7314 [(set (reg:CC_Z CC_REGNUM)
7316 (neg:SI (match_operator:SI 1 "shift_operator"
7317 [(match_operand:SI 2 "s_register_operand" "r")
7318 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7319 (match_operand:SI 0 "s_register_operand" "r")))]
7322 [(set_attr "conds" "set")
7323 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7324 (const_string "alu_shift")
7325 (const_string "alu_shift_reg")))]
7328 ;; Cirrus SF compare instruction
7329 (define_insn "*cirrus_cmpsf"
7330 [(set (reg:CCFP CC_REGNUM)
7331 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7332 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7333 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7334 "cfcmps%?\\tr15, %V0, %V1"
7335 [(set_attr "type" "mav_farith")
7336 (set_attr "cirrus" "compare")]
7339 ;; Cirrus DF compare instruction
7340 (define_insn "*cirrus_cmpdf"
7341 [(set (reg:CCFP CC_REGNUM)
7342 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7343 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7344 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7345 "cfcmpd%?\\tr15, %V0, %V1"
7346 [(set_attr "type" "mav_farith")
7347 (set_attr "cirrus" "compare")]
7350 ;; Cirrus DI compare instruction
7351 (define_expand "cmpdi"
7352 [(match_operand:DI 0 "cirrus_fp_register" "")
7353 (match_operand:DI 1 "cirrus_fp_register" "")]
7354 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7356 arm_compare_op0 = operands[0];
7357 arm_compare_op1 = operands[1];
7361 (define_insn "*cirrus_cmpdi"
7362 [(set (reg:CC CC_REGNUM)
7363 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7364 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7365 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7366 "cfcmp64%?\\tr15, %V0, %V1"
7367 [(set_attr "type" "mav_farith")
7368 (set_attr "cirrus" "compare")]
7371 ; This insn allows redundant compares to be removed by cse, nothing should
7372 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7373 ; is deleted later on. The match_dup will match the mode here, so that
7374 ; mode changes of the condition codes aren't lost by this even though we don't
7375 ; specify what they are.
7377 (define_insn "*deleted_compare"
7378 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7380 "\\t%@ deleted compare"
7381 [(set_attr "conds" "set")
7382 (set_attr "length" "0")]
7386 ;; Conditional branch insns
7388 (define_expand "beq"
7390 (if_then_else (eq (match_dup 1) (const_int 0))
7391 (label_ref (match_operand 0 "" ""))
7394 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7397 (define_expand "bne"
7399 (if_then_else (ne (match_dup 1) (const_int 0))
7400 (label_ref (match_operand 0 "" ""))
7403 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7406 (define_expand "bgt"
7408 (if_then_else (gt (match_dup 1) (const_int 0))
7409 (label_ref (match_operand 0 "" ""))
7412 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7415 (define_expand "ble"
7417 (if_then_else (le (match_dup 1) (const_int 0))
7418 (label_ref (match_operand 0 "" ""))
7421 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7424 (define_expand "bge"
7426 (if_then_else (ge (match_dup 1) (const_int 0))
7427 (label_ref (match_operand 0 "" ""))
7430 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7433 (define_expand "blt"
7435 (if_then_else (lt (match_dup 1) (const_int 0))
7436 (label_ref (match_operand 0 "" ""))
7439 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7442 (define_expand "bgtu"
7444 (if_then_else (gtu (match_dup 1) (const_int 0))
7445 (label_ref (match_operand 0 "" ""))
7448 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7451 (define_expand "bleu"
7453 (if_then_else (leu (match_dup 1) (const_int 0))
7454 (label_ref (match_operand 0 "" ""))
7457 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7460 (define_expand "bgeu"
7462 (if_then_else (geu (match_dup 1) (const_int 0))
7463 (label_ref (match_operand 0 "" ""))
7466 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7469 (define_expand "bltu"
7471 (if_then_else (ltu (match_dup 1) (const_int 0))
7472 (label_ref (match_operand 0 "" ""))
7475 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7478 (define_expand "bunordered"
7480 (if_then_else (unordered (match_dup 1) (const_int 0))
7481 (label_ref (match_operand 0 "" ""))
7483 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7484 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7488 (define_expand "bordered"
7490 (if_then_else (ordered (match_dup 1) (const_int 0))
7491 (label_ref (match_operand 0 "" ""))
7493 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7494 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7498 (define_expand "bungt"
7500 (if_then_else (ungt (match_dup 1) (const_int 0))
7501 (label_ref (match_operand 0 "" ""))
7503 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7504 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7507 (define_expand "bunlt"
7509 (if_then_else (unlt (match_dup 1) (const_int 0))
7510 (label_ref (match_operand 0 "" ""))
7512 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7513 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7516 (define_expand "bunge"
7518 (if_then_else (unge (match_dup 1) (const_int 0))
7519 (label_ref (match_operand 0 "" ""))
7521 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7522 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7525 (define_expand "bunle"
7527 (if_then_else (unle (match_dup 1) (const_int 0))
7528 (label_ref (match_operand 0 "" ""))
7530 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7531 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7534 ;; The following two patterns need two branch instructions, since there is
7535 ;; no single instruction that will handle all cases.
7536 (define_expand "buneq"
7538 (if_then_else (uneq (match_dup 1) (const_int 0))
7539 (label_ref (match_operand 0 "" ""))
7541 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7542 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7545 (define_expand "bltgt"
7547 (if_then_else (ltgt (match_dup 1) (const_int 0))
7548 (label_ref (match_operand 0 "" ""))
7550 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7551 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7555 ;; Patterns to match conditional branch insns.
7558 ; Special pattern to match UNEQ.
7559 (define_insn "*arm_buneq"
7561 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7562 (label_ref (match_operand 0 "" ""))
7564 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7566 gcc_assert (!arm_ccfsm_state);
7568 return \"bvs\\t%l0\;beq\\t%l0\";
7570 [(set_attr "conds" "jump_clob")
7571 (set_attr "length" "8")]
7574 ; Special pattern to match LTGT.
7575 (define_insn "*arm_bltgt"
7577 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7578 (label_ref (match_operand 0 "" ""))
7580 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7582 gcc_assert (!arm_ccfsm_state);
7584 return \"bmi\\t%l0\;bgt\\t%l0\";
7586 [(set_attr "conds" "jump_clob")
7587 (set_attr "length" "8")]
7590 (define_insn "*arm_cond_branch"
7592 (if_then_else (match_operator 1 "arm_comparison_operator"
7593 [(match_operand 2 "cc_register" "") (const_int 0)])
7594 (label_ref (match_operand 0 "" ""))
7598 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7600 arm_ccfsm_state += 2;
7603 return \"b%d1\\t%l0\";
7605 [(set_attr "conds" "use")
7606 (set_attr "type" "branch")]
7609 ; Special pattern to match reversed UNEQ.
7610 (define_insn "*arm_buneq_reversed"
7612 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7614 (label_ref (match_operand 0 "" ""))))]
7615 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7617 gcc_assert (!arm_ccfsm_state);
7619 return \"bmi\\t%l0\;bgt\\t%l0\";
7621 [(set_attr "conds" "jump_clob")
7622 (set_attr "length" "8")]
7625 ; Special pattern to match reversed LTGT.
7626 (define_insn "*arm_bltgt_reversed"
7628 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7630 (label_ref (match_operand 0 "" ""))))]
7631 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7633 gcc_assert (!arm_ccfsm_state);
7635 return \"bvs\\t%l0\;beq\\t%l0\";
7637 [(set_attr "conds" "jump_clob")
7638 (set_attr "length" "8")]
7641 (define_insn "*arm_cond_branch_reversed"
7643 (if_then_else (match_operator 1 "arm_comparison_operator"
7644 [(match_operand 2 "cc_register" "") (const_int 0)])
7646 (label_ref (match_operand 0 "" ""))))]
7649 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7651 arm_ccfsm_state += 2;
7654 return \"b%D1\\t%l0\";
7656 [(set_attr "conds" "use")
7657 (set_attr "type" "branch")]
7664 (define_expand "seq"
7665 [(set (match_operand:SI 0 "s_register_operand" "")
7666 (eq:SI (match_dup 1) (const_int 0)))]
7668 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7671 (define_expand "sne"
7672 [(set (match_operand:SI 0 "s_register_operand" "")
7673 (ne:SI (match_dup 1) (const_int 0)))]
7675 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7678 (define_expand "sgt"
7679 [(set (match_operand:SI 0 "s_register_operand" "")
7680 (gt:SI (match_dup 1) (const_int 0)))]
7682 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7685 (define_expand "sle"
7686 [(set (match_operand:SI 0 "s_register_operand" "")
7687 (le:SI (match_dup 1) (const_int 0)))]
7689 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7692 (define_expand "sge"
7693 [(set (match_operand:SI 0 "s_register_operand" "")
7694 (ge:SI (match_dup 1) (const_int 0)))]
7696 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7699 (define_expand "slt"
7700 [(set (match_operand:SI 0 "s_register_operand" "")
7701 (lt:SI (match_dup 1) (const_int 0)))]
7703 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7706 (define_expand "sgtu"
7707 [(set (match_operand:SI 0 "s_register_operand" "")
7708 (gtu:SI (match_dup 1) (const_int 0)))]
7710 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7713 (define_expand "sleu"
7714 [(set (match_operand:SI 0 "s_register_operand" "")
7715 (leu:SI (match_dup 1) (const_int 0)))]
7717 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7720 (define_expand "sgeu"
7721 [(set (match_operand:SI 0 "s_register_operand" "")
7722 (geu:SI (match_dup 1) (const_int 0)))]
7724 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7727 (define_expand "sltu"
7728 [(set (match_operand:SI 0 "s_register_operand" "")
7729 (ltu:SI (match_dup 1) (const_int 0)))]
7731 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7734 (define_expand "sunordered"
7735 [(set (match_operand:SI 0 "s_register_operand" "")
7736 (unordered:SI (match_dup 1) (const_int 0)))]
7737 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7738 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7742 (define_expand "sordered"
7743 [(set (match_operand:SI 0 "s_register_operand" "")
7744 (ordered:SI (match_dup 1) (const_int 0)))]
7745 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7746 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7750 (define_expand "sungt"
7751 [(set (match_operand:SI 0 "s_register_operand" "")
7752 (ungt:SI (match_dup 1) (const_int 0)))]
7753 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7754 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7758 (define_expand "sunge"
7759 [(set (match_operand:SI 0 "s_register_operand" "")
7760 (unge:SI (match_dup 1) (const_int 0)))]
7761 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7762 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7766 (define_expand "sunlt"
7767 [(set (match_operand:SI 0 "s_register_operand" "")
7768 (unlt:SI (match_dup 1) (const_int 0)))]
7769 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7770 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7774 (define_expand "sunle"
7775 [(set (match_operand:SI 0 "s_register_operand" "")
7776 (unle:SI (match_dup 1) (const_int 0)))]
7777 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7778 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7782 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7783 ;;; simple ARM instructions.
7785 ; (define_expand "suneq"
7786 ; [(set (match_operand:SI 0 "s_register_operand" "")
7787 ; (uneq:SI (match_dup 1) (const_int 0)))]
7788 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7789 ; "gcc_unreachable ();"
7792 ; (define_expand "sltgt"
7793 ; [(set (match_operand:SI 0 "s_register_operand" "")
7794 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7795 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7796 ; "gcc_unreachable ();"
7799 (define_insn "*mov_scc"
7800 [(set (match_operand:SI 0 "s_register_operand" "=r")
7801 (match_operator:SI 1 "arm_comparison_operator"
7802 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7804 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7805 [(set_attr "conds" "use")
7806 (set_attr "length" "8")]
7809 (define_insn "*mov_negscc"
7810 [(set (match_operand:SI 0 "s_register_operand" "=r")
7811 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7812 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7814 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7815 [(set_attr "conds" "use")
7816 (set_attr "length" "8")]
7819 (define_insn "*mov_notscc"
7820 [(set (match_operand:SI 0 "s_register_operand" "=r")
7821 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7822 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7824 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7825 [(set_attr "conds" "use")
7826 (set_attr "length" "8")]
7829 (define_expand "cstoresi4"
7830 [(set (match_operand:SI 0 "s_register_operand" "")
7831 (match_operator:SI 1 "arm_comparison_operator"
7832 [(match_operand:SI 2 "s_register_operand" "")
7833 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7836 rtx op3, scratch, scratch2;
7838 if (operands[3] == const0_rtx)
7840 switch (GET_CODE (operands[1]))
7843 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7847 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7851 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7852 NULL_RTX, 0, OPTAB_WIDEN);
7853 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7854 NULL_RTX, 0, OPTAB_WIDEN);
7855 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7856 operands[0], 1, OPTAB_WIDEN);
7860 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7862 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7863 NULL_RTX, 1, OPTAB_WIDEN);
7867 scratch = expand_binop (SImode, ashr_optab, operands[2],
7868 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7869 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7870 NULL_RTX, 0, OPTAB_WIDEN);
7871 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7875 /* LT is handled by generic code. No need for unsigned with 0. */
7882 switch (GET_CODE (operands[1]))
7885 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7886 NULL_RTX, 0, OPTAB_WIDEN);
7887 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7891 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7892 NULL_RTX, 0, OPTAB_WIDEN);
7893 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7897 op3 = force_reg (SImode, operands[3]);
7899 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7900 NULL_RTX, 1, OPTAB_WIDEN);
7901 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7902 NULL_RTX, 0, OPTAB_WIDEN);
7903 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7909 if (!thumb1_cmp_operand (op3, SImode))
7910 op3 = force_reg (SImode, op3);
7911 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7912 NULL_RTX, 0, OPTAB_WIDEN);
7913 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7914 NULL_RTX, 1, OPTAB_WIDEN);
7915 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7920 op3 = force_reg (SImode, operands[3]);
7921 scratch = force_reg (SImode, const0_rtx);
7922 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7928 if (!thumb1_cmp_operand (op3, SImode))
7929 op3 = force_reg (SImode, op3);
7930 scratch = force_reg (SImode, const0_rtx);
7931 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7937 if (!thumb1_cmp_operand (op3, SImode))
7938 op3 = force_reg (SImode, op3);
7939 scratch = gen_reg_rtx (SImode);
7940 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
7941 emit_insn (gen_negsi2 (operands[0], scratch));
7945 op3 = force_reg (SImode, operands[3]);
7946 scratch = gen_reg_rtx (SImode);
7947 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
7948 emit_insn (gen_negsi2 (operands[0], scratch));
7951 /* No good sequences for GT, LT. */
7958 (define_expand "cstoresi_eq0_thumb1"
7960 [(set (match_operand:SI 0 "s_register_operand" "")
7961 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7963 (clobber (match_dup:SI 2))])]
7965 "operands[2] = gen_reg_rtx (SImode);"
7968 (define_expand "cstoresi_ne0_thumb1"
7970 [(set (match_operand:SI 0 "s_register_operand" "")
7971 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7973 (clobber (match_dup:SI 2))])]
7975 "operands[2] = gen_reg_rtx (SImode);"
7978 (define_insn "*cstoresi_eq0_thumb1_insn"
7979 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7980 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7982 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7985 neg\\t%0, %1\;adc\\t%0, %0, %1
7986 neg\\t%2, %1\;adc\\t%0, %1, %2"
7987 [(set_attr "length" "4")]
7990 (define_insn "*cstoresi_ne0_thumb1_insn"
7991 [(set (match_operand:SI 0 "s_register_operand" "=l")
7992 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7994 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7996 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7997 [(set_attr "length" "4")]
8000 (define_insn "cstoresi_nltu_thumb1"
8001 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8002 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8003 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8005 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8006 [(set_attr "length" "4")]
8009 ;; Used as part of the expansion of thumb les sequence.
8010 (define_insn "thumb1_addsi3_addgeu"
8011 [(set (match_operand:SI 0 "s_register_operand" "=l")
8012 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8013 (match_operand:SI 2 "s_register_operand" "l"))
8014 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8015 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8017 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8018 [(set_attr "length" "4")]
8022 ;; Conditional move insns
8024 (define_expand "movsicc"
8025 [(set (match_operand:SI 0 "s_register_operand" "")
8026 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8027 (match_operand:SI 2 "arm_not_operand" "")
8028 (match_operand:SI 3 "arm_not_operand" "")))]
8032 enum rtx_code code = GET_CODE (operands[1]);
8035 if (code == UNEQ || code == LTGT)
8038 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8039 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8043 (define_expand "movsfcc"
8044 [(set (match_operand:SF 0 "s_register_operand" "")
8045 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8046 (match_operand:SF 2 "s_register_operand" "")
8047 (match_operand:SF 3 "nonmemory_operand" "")))]
8051 enum rtx_code code = GET_CODE (operands[1]);
8054 if (code == UNEQ || code == LTGT)
8057 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8058 Otherwise, ensure it is a valid FP add operand */
8059 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8060 || (!arm_float_add_operand (operands[3], SFmode)))
8061 operands[3] = force_reg (SFmode, operands[3]);
8063 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8064 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8068 (define_expand "movdfcc"
8069 [(set (match_operand:DF 0 "s_register_operand" "")
8070 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8071 (match_operand:DF 2 "s_register_operand" "")
8072 (match_operand:DF 3 "arm_float_add_operand" "")))]
8073 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8076 enum rtx_code code = GET_CODE (operands[1]);
8079 if (code == UNEQ || code == LTGT)
8082 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8083 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8087 (define_insn "*movsicc_insn"
8088 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8090 (match_operator 3 "arm_comparison_operator"
8091 [(match_operand 4 "cc_register" "") (const_int 0)])
8092 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8093 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8100 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8101 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8102 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8103 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8104 [(set_attr "length" "4,4,4,4,8,8,8,8")
8105 (set_attr "conds" "use")]
8108 (define_insn "*movsfcc_soft_insn"
8109 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8110 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8111 [(match_operand 4 "cc_register" "") (const_int 0)])
8112 (match_operand:SF 1 "s_register_operand" "0,r")
8113 (match_operand:SF 2 "s_register_operand" "r,0")))]
8114 "TARGET_ARM && TARGET_SOFT_FLOAT"
8118 [(set_attr "conds" "use")]
8122 ;; Jump and linkage insns
8124 (define_expand "jump"
8126 (label_ref (match_operand 0 "" "")))]
8131 (define_insn "*arm_jump"
8133 (label_ref (match_operand 0 "" "")))]
8137 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8139 arm_ccfsm_state += 2;
8142 return \"b%?\\t%l0\";
8145 [(set_attr "predicable" "yes")]
8148 (define_insn "*thumb_jump"
8150 (label_ref (match_operand 0 "" "")))]
8153 if (get_attr_length (insn) == 2)
8155 return \"bl\\t%l0\\t%@ far jump\";
8157 [(set (attr "far_jump")
8159 (eq_attr "length" "4")
8160 (const_string "yes")
8161 (const_string "no")))
8162 (set (attr "length")
8164 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8165 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8170 (define_expand "call"
8171 [(parallel [(call (match_operand 0 "memory_operand" "")
8172 (match_operand 1 "general_operand" ""))
8173 (use (match_operand 2 "" ""))
8174 (clobber (reg:SI LR_REGNUM))])]
8180 /* In an untyped call, we can get NULL for operand 2. */
8181 if (operands[2] == NULL_RTX)
8182 operands[2] = const0_rtx;
8184 /* Decide if we should generate indirect calls by loading the
8185 32-bit address of the callee into a register before performing the
8187 callee = XEXP (operands[0], 0);
8188 if (GET_CODE (callee) == SYMBOL_REF
8189 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8191 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8193 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8194 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8199 (define_expand "call_internal"
8200 [(parallel [(call (match_operand 0 "memory_operand" "")
8201 (match_operand 1 "general_operand" ""))
8202 (use (match_operand 2 "" ""))
8203 (clobber (reg:SI LR_REGNUM))])])
8205 (define_insn "*call_reg_armv5"
8206 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8207 (match_operand 1 "" ""))
8208 (use (match_operand 2 "" ""))
8209 (clobber (reg:SI LR_REGNUM))]
8210 "TARGET_ARM && arm_arch5"
8212 [(set_attr "type" "call")]
8215 (define_insn "*call_reg_arm"
8216 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8217 (match_operand 1 "" ""))
8218 (use (match_operand 2 "" ""))
8219 (clobber (reg:SI LR_REGNUM))]
8220 "TARGET_ARM && !arm_arch5"
8222 return output_call (operands);
8224 ;; length is worst case, normally it is only two
8225 [(set_attr "length" "12")
8226 (set_attr "type" "call")]
8229 (define_insn "*call_mem"
8230 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8231 (match_operand 1 "" ""))
8232 (use (match_operand 2 "" ""))
8233 (clobber (reg:SI LR_REGNUM))]
8236 return output_call_mem (operands);
8238 [(set_attr "length" "12")
8239 (set_attr "type" "call")]
8242 (define_insn "*call_reg_thumb1_v5"
8243 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8244 (match_operand 1 "" ""))
8245 (use (match_operand 2 "" ""))
8246 (clobber (reg:SI LR_REGNUM))]
8247 "TARGET_THUMB1 && arm_arch5"
8249 [(set_attr "length" "2")
8250 (set_attr "type" "call")]
8253 (define_insn "*call_reg_thumb1"
8254 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8255 (match_operand 1 "" ""))
8256 (use (match_operand 2 "" ""))
8257 (clobber (reg:SI LR_REGNUM))]
8258 "TARGET_THUMB1 && !arm_arch5"
8261 if (!TARGET_CALLER_INTERWORKING)
8262 return thumb_call_via_reg (operands[0]);
8263 else if (operands[1] == const0_rtx)
8264 return \"bl\\t%__interwork_call_via_%0\";
8265 else if (frame_pointer_needed)
8266 return \"bl\\t%__interwork_r7_call_via_%0\";
8268 return \"bl\\t%__interwork_r11_call_via_%0\";
8270 [(set_attr "type" "call")]
8273 (define_expand "call_value"
8274 [(parallel [(set (match_operand 0 "" "")
8275 (call (match_operand 1 "memory_operand" "")
8276 (match_operand 2 "general_operand" "")))
8277 (use (match_operand 3 "" ""))
8278 (clobber (reg:SI LR_REGNUM))])]
8284 /* In an untyped call, we can get NULL for operand 2. */
8285 if (operands[3] == 0)
8286 operands[3] = const0_rtx;
8288 /* Decide if we should generate indirect calls by loading the
8289 32-bit address of the callee into a register before performing the
8291 callee = XEXP (operands[1], 0);
8292 if (GET_CODE (callee) == SYMBOL_REF
8293 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8295 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8297 pat = gen_call_value_internal (operands[0], operands[1],
8298 operands[2], operands[3]);
8299 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8304 (define_expand "call_value_internal"
8305 [(parallel [(set (match_operand 0 "" "")
8306 (call (match_operand 1 "memory_operand" "")
8307 (match_operand 2 "general_operand" "")))
8308 (use (match_operand 3 "" ""))
8309 (clobber (reg:SI LR_REGNUM))])])
8311 (define_insn "*call_value_reg_armv5"
8312 [(set (match_operand 0 "" "")
8313 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8314 (match_operand 2 "" "")))
8315 (use (match_operand 3 "" ""))
8316 (clobber (reg:SI LR_REGNUM))]
8317 "TARGET_ARM && arm_arch5"
8319 [(set_attr "type" "call")]
8322 (define_insn "*call_value_reg_arm"
8323 [(set (match_operand 0 "" "")
8324 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8325 (match_operand 2 "" "")))
8326 (use (match_operand 3 "" ""))
8327 (clobber (reg:SI LR_REGNUM))]
8328 "TARGET_ARM && !arm_arch5"
8330 return output_call (&operands[1]);
8332 [(set_attr "length" "12")
8333 (set_attr "type" "call")]
8336 (define_insn "*call_value_mem"
8337 [(set (match_operand 0 "" "")
8338 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8339 (match_operand 2 "" "")))
8340 (use (match_operand 3 "" ""))
8341 (clobber (reg:SI LR_REGNUM))]
8342 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8344 return output_call_mem (&operands[1]);
8346 [(set_attr "length" "12")
8347 (set_attr "type" "call")]
8350 (define_insn "*call_value_reg_thumb1_v5"
8351 [(set (match_operand 0 "" "")
8352 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8353 (match_operand 2 "" "")))
8354 (use (match_operand 3 "" ""))
8355 (clobber (reg:SI LR_REGNUM))]
8356 "TARGET_THUMB1 && arm_arch5"
8358 [(set_attr "length" "2")
8359 (set_attr "type" "call")]
8362 (define_insn "*call_value_reg_thumb1"
8363 [(set (match_operand 0 "" "")
8364 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8365 (match_operand 2 "" "")))
8366 (use (match_operand 3 "" ""))
8367 (clobber (reg:SI LR_REGNUM))]
8368 "TARGET_THUMB1 && !arm_arch5"
8371 if (!TARGET_CALLER_INTERWORKING)
8372 return thumb_call_via_reg (operands[1]);
8373 else if (operands[2] == const0_rtx)
8374 return \"bl\\t%__interwork_call_via_%1\";
8375 else if (frame_pointer_needed)
8376 return \"bl\\t%__interwork_r7_call_via_%1\";
8378 return \"bl\\t%__interwork_r11_call_via_%1\";
8380 [(set_attr "type" "call")]
8383 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8384 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8386 (define_insn "*call_symbol"
8387 [(call (mem:SI (match_operand:SI 0 "" ""))
8388 (match_operand 1 "" ""))
8389 (use (match_operand 2 "" ""))
8390 (clobber (reg:SI LR_REGNUM))]
8392 && (GET_CODE (operands[0]) == SYMBOL_REF)
8393 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8396 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8398 [(set_attr "type" "call")]
8401 (define_insn "*call_value_symbol"
8402 [(set (match_operand 0 "" "")
8403 (call (mem:SI (match_operand:SI 1 "" ""))
8404 (match_operand:SI 2 "" "")))
8405 (use (match_operand 3 "" ""))
8406 (clobber (reg:SI LR_REGNUM))]
8408 && (GET_CODE (operands[1]) == SYMBOL_REF)
8409 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8412 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8414 [(set_attr "type" "call")]
8417 (define_insn "*call_insn"
8418 [(call (mem:SI (match_operand:SI 0 "" ""))
8419 (match_operand:SI 1 "" ""))
8420 (use (match_operand 2 "" ""))
8421 (clobber (reg:SI LR_REGNUM))]
8423 && GET_CODE (operands[0]) == SYMBOL_REF
8424 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8426 [(set_attr "length" "4")
8427 (set_attr "type" "call")]
8430 (define_insn "*call_value_insn"
8431 [(set (match_operand 0 "" "")
8432 (call (mem:SI (match_operand 1 "" ""))
8433 (match_operand 2 "" "")))
8434 (use (match_operand 3 "" ""))
8435 (clobber (reg:SI LR_REGNUM))]
8437 && GET_CODE (operands[1]) == SYMBOL_REF
8438 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8440 [(set_attr "length" "4")
8441 (set_attr "type" "call")]
8444 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8445 (define_expand "sibcall"
8446 [(parallel [(call (match_operand 0 "memory_operand" "")
8447 (match_operand 1 "general_operand" ""))
8449 (use (match_operand 2 "" ""))])]
8453 if (operands[2] == NULL_RTX)
8454 operands[2] = const0_rtx;
8458 (define_expand "sibcall_value"
8459 [(parallel [(set (match_operand 0 "" "")
8460 (call (match_operand 1 "memory_operand" "")
8461 (match_operand 2 "general_operand" "")))
8463 (use (match_operand 3 "" ""))])]
8467 if (operands[3] == NULL_RTX)
8468 operands[3] = const0_rtx;
8472 (define_insn "*sibcall_insn"
8473 [(call (mem:SI (match_operand:SI 0 "" "X"))
8474 (match_operand 1 "" ""))
8476 (use (match_operand 2 "" ""))]
8477 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8479 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8481 [(set_attr "type" "call")]
8484 (define_insn "*sibcall_value_insn"
8485 [(set (match_operand 0 "" "")
8486 (call (mem:SI (match_operand:SI 1 "" "X"))
8487 (match_operand 2 "" "")))
8489 (use (match_operand 3 "" ""))]
8490 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8492 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8494 [(set_attr "type" "call")]
8497 ;; Often the return insn will be the same as loading from memory, so set attr
8498 (define_insn "return"
8500 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8503 if (arm_ccfsm_state == 2)
8505 arm_ccfsm_state += 2;
8508 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8510 [(set_attr "type" "load1")
8511 (set_attr "length" "12")
8512 (set_attr "predicable" "yes")]
8515 (define_insn "*cond_return"
8517 (if_then_else (match_operator 0 "arm_comparison_operator"
8518 [(match_operand 1 "cc_register" "") (const_int 0)])
8521 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8524 if (arm_ccfsm_state == 2)
8526 arm_ccfsm_state += 2;
8529 return output_return_instruction (operands[0], TRUE, FALSE);
8531 [(set_attr "conds" "use")
8532 (set_attr "length" "12")
8533 (set_attr "type" "load1")]
8536 (define_insn "*cond_return_inverted"
8538 (if_then_else (match_operator 0 "arm_comparison_operator"
8539 [(match_operand 1 "cc_register" "") (const_int 0)])
8542 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8545 if (arm_ccfsm_state == 2)
8547 arm_ccfsm_state += 2;
8550 return output_return_instruction (operands[0], TRUE, TRUE);
8552 [(set_attr "conds" "use")
8553 (set_attr "length" "12")
8554 (set_attr "type" "load1")]
8557 ;; Generate a sequence of instructions to determine if the processor is
8558 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8561 (define_expand "return_addr_mask"
8563 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8565 (set (match_operand:SI 0 "s_register_operand" "")
8566 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8568 (const_int 67108860)))] ; 0x03fffffc
8571 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8574 (define_insn "*check_arch2"
8575 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8576 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8579 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8580 [(set_attr "length" "8")
8581 (set_attr "conds" "set")]
8584 ;; Call subroutine returning any type.
8586 (define_expand "untyped_call"
8587 [(parallel [(call (match_operand 0 "" "")
8589 (match_operand 1 "" "")
8590 (match_operand 2 "" "")])]
8595 rtx par = gen_rtx_PARALLEL (VOIDmode,
8596 rtvec_alloc (XVECLEN (operands[2], 0)));
8597 rtx addr = gen_reg_rtx (Pmode);
8601 emit_move_insn (addr, XEXP (operands[1], 0));
8602 mem = change_address (operands[1], BLKmode, addr);
8604 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8606 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8608 /* Default code only uses r0 as a return value, but we could
8609 be using anything up to 4 registers. */
8610 if (REGNO (src) == R0_REGNUM)
8611 src = gen_rtx_REG (TImode, R0_REGNUM);
8613 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8615 size += GET_MODE_SIZE (GET_MODE (src));
8618 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8623 for (i = 0; i < XVECLEN (par, 0); i++)
8625 HOST_WIDE_INT offset = 0;
8626 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8629 emit_move_insn (addr, plus_constant (addr, size));
8631 mem = change_address (mem, GET_MODE (reg), NULL);
8632 if (REGNO (reg) == R0_REGNUM)
8634 /* On thumb we have to use a write-back instruction. */
8635 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8636 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8637 size = TARGET_ARM ? 16 : 0;
8641 emit_move_insn (mem, reg);
8642 size = GET_MODE_SIZE (GET_MODE (reg));
8646 /* The optimizer does not know that the call sets the function value
8647 registers we stored in the result block. We avoid problems by
8648 claiming that all hard registers are used and clobbered at this
8650 emit_insn (gen_blockage ());
8656 (define_expand "untyped_return"
8657 [(match_operand:BLK 0 "memory_operand" "")
8658 (match_operand 1 "" "")]
8663 rtx addr = gen_reg_rtx (Pmode);
8667 emit_move_insn (addr, XEXP (operands[0], 0));
8668 mem = change_address (operands[0], BLKmode, addr);
8670 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8672 HOST_WIDE_INT offset = 0;
8673 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8676 emit_move_insn (addr, plus_constant (addr, size));
8678 mem = change_address (mem, GET_MODE (reg), NULL);
8679 if (REGNO (reg) == R0_REGNUM)
8681 /* On thumb we have to use a write-back instruction. */
8682 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8683 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8684 size = TARGET_ARM ? 16 : 0;
8688 emit_move_insn (reg, mem);
8689 size = GET_MODE_SIZE (GET_MODE (reg));
8693 /* Emit USE insns before the return. */
8694 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8695 emit_insn (gen_rtx_USE (VOIDmode,
8696 SET_DEST (XVECEXP (operands[1], 0, i))));
8698 /* Construct the return. */
8699 expand_naked_return ();
8705 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8706 ;; all of memory. This blocks insns from being moved across this point.
8708 (define_insn "blockage"
8709 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8712 [(set_attr "length" "0")
8713 (set_attr "type" "block")]
8716 (define_expand "casesi"
8717 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8718 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8719 (match_operand:SI 2 "const_int_operand" "") ; total range
8720 (match_operand:SI 3 "" "") ; table label
8721 (match_operand:SI 4 "" "")] ; Out of range label
8726 if (operands[1] != const0_rtx)
8728 reg = gen_reg_rtx (SImode);
8730 emit_insn (gen_addsi3 (reg, operands[0],
8731 GEN_INT (-INTVAL (operands[1]))));
8735 if (!const_ok_for_arm (INTVAL (operands[2])))
8736 operands[2] = force_reg (SImode, operands[2]);
8740 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8741 operands[3], operands[4]));
8745 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8746 operands[2], operands[3], operands[4]));
8750 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8751 operands[3], operands[4]));
8757 ;; The USE in this pattern is needed to tell flow analysis that this is
8758 ;; a CASESI insn. It has no other purpose.
8759 (define_insn "arm_casesi_internal"
8760 [(parallel [(set (pc)
8762 (leu (match_operand:SI 0 "s_register_operand" "r")
8763 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8764 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8765 (label_ref (match_operand 2 "" ""))))
8766 (label_ref (match_operand 3 "" ""))))
8767 (clobber (reg:CC CC_REGNUM))
8768 (use (label_ref (match_dup 2)))])]
8772 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8773 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8775 [(set_attr "conds" "clob")
8776 (set_attr "length" "12")]
8779 (define_expand "indirect_jump"
8781 (match_operand:SI 0 "s_register_operand" ""))]
8784 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8785 address and use bx. */
8789 tmp = gen_reg_rtx (SImode);
8790 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8796 ;; NB Never uses BX.
8797 (define_insn "*arm_indirect_jump"
8799 (match_operand:SI 0 "s_register_operand" "r"))]
8801 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8802 [(set_attr "predicable" "yes")]
8805 (define_insn "*load_indirect_jump"
8807 (match_operand:SI 0 "memory_operand" "m"))]
8809 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8810 [(set_attr "type" "load1")
8811 (set_attr "pool_range" "4096")
8812 (set_attr "neg_pool_range" "4084")
8813 (set_attr "predicable" "yes")]
8816 ;; NB Never uses BX.
8817 (define_insn "*thumb1_indirect_jump"
8819 (match_operand:SI 0 "register_operand" "l*r"))]
8822 [(set_attr "conds" "clob")
8823 (set_attr "length" "2")]
8833 if (TARGET_UNIFIED_ASM)
8836 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8837 return \"mov\\tr8, r8\";
8839 [(set (attr "length")
8840 (if_then_else (eq_attr "is_thumb" "yes")
8846 ;; Patterns to allow combination of arithmetic, cond code and shifts
8848 (define_insn "*arith_shiftsi"
8849 [(set (match_operand:SI 0 "s_register_operand" "=r")
8850 (match_operator:SI 1 "shiftable_operator"
8851 [(match_operator:SI 3 "shift_operator"
8852 [(match_operand:SI 4 "s_register_operand" "r")
8853 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8854 (match_operand:SI 2 "s_register_operand" "r")]))]
8856 "%i1%?\\t%0, %2, %4%S3"
8857 [(set_attr "predicable" "yes")
8858 (set_attr "shift" "4")
8859 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8860 (const_string "alu_shift")
8861 (const_string "alu_shift_reg")))]
8865 [(set (match_operand:SI 0 "s_register_operand" "")
8866 (match_operator:SI 1 "shiftable_operator"
8867 [(match_operator:SI 2 "shiftable_operator"
8868 [(match_operator:SI 3 "shift_operator"
8869 [(match_operand:SI 4 "s_register_operand" "")
8870 (match_operand:SI 5 "reg_or_int_operand" "")])
8871 (match_operand:SI 6 "s_register_operand" "")])
8872 (match_operand:SI 7 "arm_rhs_operand" "")]))
8873 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8876 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8879 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8882 (define_insn "*arith_shiftsi_compare0"
8883 [(set (reg:CC_NOOV CC_REGNUM)
8884 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8885 [(match_operator:SI 3 "shift_operator"
8886 [(match_operand:SI 4 "s_register_operand" "r")
8887 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8888 (match_operand:SI 2 "s_register_operand" "r")])
8890 (set (match_operand:SI 0 "s_register_operand" "=r")
8891 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8894 "%i1%.\\t%0, %2, %4%S3"
8895 [(set_attr "conds" "set")
8896 (set_attr "shift" "4")
8897 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8898 (const_string "alu_shift")
8899 (const_string "alu_shift_reg")))]
8902 (define_insn "*arith_shiftsi_compare0_scratch"
8903 [(set (reg:CC_NOOV CC_REGNUM)
8904 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8905 [(match_operator:SI 3 "shift_operator"
8906 [(match_operand:SI 4 "s_register_operand" "r")
8907 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8908 (match_operand:SI 2 "s_register_operand" "r")])
8910 (clobber (match_scratch:SI 0 "=r"))]
8912 "%i1%.\\t%0, %2, %4%S3"
8913 [(set_attr "conds" "set")
8914 (set_attr "shift" "4")
8915 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8916 (const_string "alu_shift")
8917 (const_string "alu_shift_reg")))]
8920 (define_insn "*sub_shiftsi"
8921 [(set (match_operand:SI 0 "s_register_operand" "=r")
8922 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8923 (match_operator:SI 2 "shift_operator"
8924 [(match_operand:SI 3 "s_register_operand" "r")
8925 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
8927 "sub%?\\t%0, %1, %3%S2"
8928 [(set_attr "predicable" "yes")
8929 (set_attr "shift" "3")
8930 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8931 (const_string "alu_shift")
8932 (const_string "alu_shift_reg")))]
8935 (define_insn "*sub_shiftsi_compare0"
8936 [(set (reg:CC_NOOV CC_REGNUM)
8938 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8939 (match_operator:SI 2 "shift_operator"
8940 [(match_operand:SI 3 "s_register_operand" "r")
8941 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8943 (set (match_operand:SI 0 "s_register_operand" "=r")
8944 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
8947 "sub%.\\t%0, %1, %3%S2"
8948 [(set_attr "conds" "set")
8949 (set_attr "shift" "3")
8950 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8951 (const_string "alu_shift")
8952 (const_string "alu_shift_reg")))]
8955 (define_insn "*sub_shiftsi_compare0_scratch"
8956 [(set (reg:CC_NOOV CC_REGNUM)
8958 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
8959 (match_operator:SI 2 "shift_operator"
8960 [(match_operand:SI 3 "s_register_operand" "r")
8961 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
8963 (clobber (match_scratch:SI 0 "=r"))]
8965 "sub%.\\t%0, %1, %3%S2"
8966 [(set_attr "conds" "set")
8967 (set_attr "shift" "3")
8968 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
8969 (const_string "alu_shift")
8970 (const_string "alu_shift_reg")))]
8975 (define_insn "*and_scc"
8976 [(set (match_operand:SI 0 "s_register_operand" "=r")
8977 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8978 [(match_operand 3 "cc_register" "") (const_int 0)])
8979 (match_operand:SI 2 "s_register_operand" "r")))]
8981 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8982 [(set_attr "conds" "use")
8983 (set_attr "length" "8")]
8986 (define_insn "*ior_scc"
8987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8988 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8989 [(match_operand 3 "cc_register" "") (const_int 0)])
8990 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8994 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8995 [(set_attr "conds" "use")
8996 (set_attr "length" "4,8")]
8999 (define_insn "*compare_scc"
9000 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9001 (match_operator:SI 1 "arm_comparison_operator"
9002 [(match_operand:SI 2 "s_register_operand" "r,r")
9003 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9004 (clobber (reg:CC CC_REGNUM))]
9007 if (operands[3] == const0_rtx)
9009 if (GET_CODE (operands[1]) == LT)
9010 return \"mov\\t%0, %2, lsr #31\";
9012 if (GET_CODE (operands[1]) == GE)
9013 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9015 if (GET_CODE (operands[1]) == EQ)
9016 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9019 if (GET_CODE (operands[1]) == NE)
9021 if (which_alternative == 1)
9022 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9023 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9025 if (which_alternative == 1)
9026 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9028 output_asm_insn (\"cmp\\t%2, %3\", operands);
9029 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9031 [(set_attr "conds" "clob")
9032 (set_attr "length" "12")]
9035 (define_insn "*cond_move"
9036 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9037 (if_then_else:SI (match_operator 3 "equality_operator"
9038 [(match_operator 4 "arm_comparison_operator"
9039 [(match_operand 5 "cc_register" "") (const_int 0)])
9041 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9042 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9045 if (GET_CODE (operands[3]) == NE)
9047 if (which_alternative != 1)
9048 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9049 if (which_alternative != 0)
9050 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9053 if (which_alternative != 0)
9054 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9055 if (which_alternative != 1)
9056 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9059 [(set_attr "conds" "use")
9060 (set_attr "length" "4,4,8")]
9063 (define_insn "*cond_arith"
9064 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9065 (match_operator:SI 5 "shiftable_operator"
9066 [(match_operator:SI 4 "arm_comparison_operator"
9067 [(match_operand:SI 2 "s_register_operand" "r,r")
9068 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9069 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9070 (clobber (reg:CC CC_REGNUM))]
9073 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9074 return \"%i5\\t%0, %1, %2, lsr #31\";
9076 output_asm_insn (\"cmp\\t%2, %3\", operands);
9077 if (GET_CODE (operands[5]) == AND)
9078 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9079 else if (GET_CODE (operands[5]) == MINUS)
9080 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9081 else if (which_alternative != 0)
9082 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9083 return \"%i5%d4\\t%0, %1, #1\";
9085 [(set_attr "conds" "clob")
9086 (set_attr "length" "12")]
9089 (define_insn "*cond_sub"
9090 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9091 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9092 (match_operator:SI 4 "arm_comparison_operator"
9093 [(match_operand:SI 2 "s_register_operand" "r,r")
9094 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9095 (clobber (reg:CC CC_REGNUM))]
9098 output_asm_insn (\"cmp\\t%2, %3\", operands);
9099 if (which_alternative != 0)
9100 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9101 return \"sub%d4\\t%0, %1, #1\";
9103 [(set_attr "conds" "clob")
9104 (set_attr "length" "8,12")]
9107 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9108 (define_insn "*cmp_ite0"
9109 [(set (match_operand 6 "dominant_cc_register" "")
9112 (match_operator 4 "arm_comparison_operator"
9113 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9114 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9115 (match_operator:SI 5 "arm_comparison_operator"
9116 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9117 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9123 static const char * const opcodes[4][2] =
9125 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9126 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9127 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9128 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9129 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9130 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9131 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9132 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9135 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9137 return opcodes[which_alternative][swap];
9139 [(set_attr "conds" "set")
9140 (set_attr "length" "8")]
9143 (define_insn "*cmp_ite1"
9144 [(set (match_operand 6 "dominant_cc_register" "")
9147 (match_operator 4 "arm_comparison_operator"
9148 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9149 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9150 (match_operator:SI 5 "arm_comparison_operator"
9151 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9152 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9158 static const char * const opcodes[4][2] =
9160 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9161 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9162 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9163 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9164 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9165 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9166 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9167 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9170 comparison_dominates_p (GET_CODE (operands[5]),
9171 reverse_condition (GET_CODE (operands[4])));
9173 return opcodes[which_alternative][swap];
9175 [(set_attr "conds" "set")
9176 (set_attr "length" "8")]
9179 (define_insn "*cmp_and"
9180 [(set (match_operand 6 "dominant_cc_register" "")
9183 (match_operator 4 "arm_comparison_operator"
9184 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9185 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9186 (match_operator:SI 5 "arm_comparison_operator"
9187 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9188 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9193 static const char *const opcodes[4][2] =
9195 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9196 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9197 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9198 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9199 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9200 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9201 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9202 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9205 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9207 return opcodes[which_alternative][swap];
9209 [(set_attr "conds" "set")
9210 (set_attr "predicable" "no")
9211 (set_attr "length" "8")]
9214 (define_insn "*cmp_ior"
9215 [(set (match_operand 6 "dominant_cc_register" "")
9218 (match_operator 4 "arm_comparison_operator"
9219 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9220 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9221 (match_operator:SI 5 "arm_comparison_operator"
9222 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9223 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9228 static const char *const opcodes[4][2] =
9230 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9231 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9232 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9233 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9234 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9235 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9236 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9237 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9240 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9242 return opcodes[which_alternative][swap];
9245 [(set_attr "conds" "set")
9246 (set_attr "length" "8")]
9249 (define_insn_and_split "*ior_scc_scc"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r")
9251 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9252 [(match_operand:SI 1 "s_register_operand" "r")
9253 (match_operand:SI 2 "arm_add_operand" "rIL")])
9254 (match_operator:SI 6 "arm_comparison_operator"
9255 [(match_operand:SI 4 "s_register_operand" "r")
9256 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9257 (clobber (reg:CC CC_REGNUM))]
9259 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9262 "TARGET_ARM && reload_completed"
9266 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9267 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9269 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9271 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9274 [(set_attr "conds" "clob")
9275 (set_attr "length" "16")])
9277 ; If the above pattern is followed by a CMP insn, then the compare is
9278 ; redundant, since we can rework the conditional instruction that follows.
9279 (define_insn_and_split "*ior_scc_scc_cmp"
9280 [(set (match_operand 0 "dominant_cc_register" "")
9281 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9282 [(match_operand:SI 1 "s_register_operand" "r")
9283 (match_operand:SI 2 "arm_add_operand" "rIL")])
9284 (match_operator:SI 6 "arm_comparison_operator"
9285 [(match_operand:SI 4 "s_register_operand" "r")
9286 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9288 (set (match_operand:SI 7 "s_register_operand" "=r")
9289 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9290 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9293 "TARGET_ARM && reload_completed"
9297 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9298 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9300 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9302 [(set_attr "conds" "set")
9303 (set_attr "length" "16")])
9305 (define_insn_and_split "*and_scc_scc"
9306 [(set (match_operand:SI 0 "s_register_operand" "=r")
9307 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9308 [(match_operand:SI 1 "s_register_operand" "r")
9309 (match_operand:SI 2 "arm_add_operand" "rIL")])
9310 (match_operator:SI 6 "arm_comparison_operator"
9311 [(match_operand:SI 4 "s_register_operand" "r")
9312 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9313 (clobber (reg:CC CC_REGNUM))]
9315 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9318 "TARGET_ARM && reload_completed
9319 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9324 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9325 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9327 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9329 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9332 [(set_attr "conds" "clob")
9333 (set_attr "length" "16")])
9335 ; If the above pattern is followed by a CMP insn, then the compare is
9336 ; redundant, since we can rework the conditional instruction that follows.
9337 (define_insn_and_split "*and_scc_scc_cmp"
9338 [(set (match_operand 0 "dominant_cc_register" "")
9339 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9340 [(match_operand:SI 1 "s_register_operand" "r")
9341 (match_operand:SI 2 "arm_add_operand" "rIL")])
9342 (match_operator:SI 6 "arm_comparison_operator"
9343 [(match_operand:SI 4 "s_register_operand" "r")
9344 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9346 (set (match_operand:SI 7 "s_register_operand" "=r")
9347 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9348 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9351 "TARGET_ARM && reload_completed"
9355 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9356 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9358 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9360 [(set_attr "conds" "set")
9361 (set_attr "length" "16")])
9363 ;; If there is no dominance in the comparison, then we can still save an
9364 ;; instruction in the AND case, since we can know that the second compare
9365 ;; need only zero the value if false (if true, then the value is already
9367 (define_insn_and_split "*and_scc_scc_nodom"
9368 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9369 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9370 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9371 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9372 (match_operator:SI 6 "arm_comparison_operator"
9373 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9374 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9375 (clobber (reg:CC CC_REGNUM))]
9377 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9380 "TARGET_ARM && reload_completed"
9381 [(parallel [(set (match_dup 0)
9382 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9383 (clobber (reg:CC CC_REGNUM))])
9384 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9386 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9389 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9390 operands[4], operands[5]),
9392 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9394 [(set_attr "conds" "clob")
9395 (set_attr "length" "20")])
9398 [(set (reg:CC_NOOV CC_REGNUM)
9399 (compare:CC_NOOV (ior:SI
9400 (and:SI (match_operand:SI 0 "s_register_operand" "")
9402 (match_operator:SI 1 "comparison_operator"
9403 [(match_operand:SI 2 "s_register_operand" "")
9404 (match_operand:SI 3 "arm_add_operand" "")]))
9406 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9409 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9411 (set (reg:CC_NOOV CC_REGNUM)
9412 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9417 [(set (reg:CC_NOOV CC_REGNUM)
9418 (compare:CC_NOOV (ior:SI
9419 (match_operator:SI 1 "comparison_operator"
9420 [(match_operand:SI 2 "s_register_operand" "")
9421 (match_operand:SI 3 "arm_add_operand" "")])
9422 (and:SI (match_operand:SI 0 "s_register_operand" "")
9425 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9428 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9430 (set (reg:CC_NOOV CC_REGNUM)
9431 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9434 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9436 (define_insn "*negscc"
9437 [(set (match_operand:SI 0 "s_register_operand" "=r")
9438 (neg:SI (match_operator 3 "arm_comparison_operator"
9439 [(match_operand:SI 1 "s_register_operand" "r")
9440 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9441 (clobber (reg:CC CC_REGNUM))]
9444 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9445 return \"mov\\t%0, %1, asr #31\";
9447 if (GET_CODE (operands[3]) == NE)
9448 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9450 output_asm_insn (\"cmp\\t%1, %2\", operands);
9451 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9452 return \"mvn%d3\\t%0, #0\";
9454 [(set_attr "conds" "clob")
9455 (set_attr "length" "12")]
9458 (define_insn "movcond"
9459 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9461 (match_operator 5 "arm_comparison_operator"
9462 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9463 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9464 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9465 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9466 (clobber (reg:CC CC_REGNUM))]
9469 if (GET_CODE (operands[5]) == LT
9470 && (operands[4] == const0_rtx))
9472 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9474 if (operands[2] == const0_rtx)
9475 return \"and\\t%0, %1, %3, asr #31\";
9476 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9478 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9480 if (operands[1] == const0_rtx)
9481 return \"bic\\t%0, %2, %3, asr #31\";
9482 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9484 /* The only case that falls through to here is when both ops 1 & 2
9488 if (GET_CODE (operands[5]) == GE
9489 && (operands[4] == const0_rtx))
9491 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9493 if (operands[2] == const0_rtx)
9494 return \"bic\\t%0, %1, %3, asr #31\";
9495 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9497 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9499 if (operands[1] == const0_rtx)
9500 return \"and\\t%0, %2, %3, asr #31\";
9501 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9503 /* The only case that falls through to here is when both ops 1 & 2
9506 if (GET_CODE (operands[4]) == CONST_INT
9507 && !const_ok_for_arm (INTVAL (operands[4])))
9508 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9510 output_asm_insn (\"cmp\\t%3, %4\", operands);
9511 if (which_alternative != 0)
9512 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9513 if (which_alternative != 1)
9514 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9517 [(set_attr "conds" "clob")
9518 (set_attr "length" "8,8,12")]
9521 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9523 (define_insn "*ifcompare_plus_move"
9524 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9525 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9526 [(match_operand:SI 4 "s_register_operand" "r,r")
9527 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9529 (match_operand:SI 2 "s_register_operand" "r,r")
9530 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9531 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9532 (clobber (reg:CC CC_REGNUM))]
9535 [(set_attr "conds" "clob")
9536 (set_attr "length" "8,12")]
9539 (define_insn "*if_plus_move"
9540 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9542 (match_operator 4 "arm_comparison_operator"
9543 [(match_operand 5 "cc_register" "") (const_int 0)])
9545 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9546 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9547 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9551 sub%d4\\t%0, %2, #%n3
9552 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9553 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9554 [(set_attr "conds" "use")
9555 (set_attr "length" "4,4,8,8")
9556 (set_attr "type" "*,*,*,*")]
9559 (define_insn "*ifcompare_move_plus"
9560 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9561 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9562 [(match_operand:SI 4 "s_register_operand" "r,r")
9563 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9564 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9566 (match_operand:SI 2 "s_register_operand" "r,r")
9567 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9568 (clobber (reg:CC CC_REGNUM))]
9571 [(set_attr "conds" "clob")
9572 (set_attr "length" "8,12")]
9575 (define_insn "*if_move_plus"
9576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9578 (match_operator 4 "arm_comparison_operator"
9579 [(match_operand 5 "cc_register" "") (const_int 0)])
9580 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9582 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9583 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9587 sub%D4\\t%0, %2, #%n3
9588 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9589 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9590 [(set_attr "conds" "use")
9591 (set_attr "length" "4,4,8,8")
9592 (set_attr "type" "*,*,*,*")]
9595 (define_insn "*ifcompare_arith_arith"
9596 [(set (match_operand:SI 0 "s_register_operand" "=r")
9597 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9598 [(match_operand:SI 5 "s_register_operand" "r")
9599 (match_operand:SI 6 "arm_add_operand" "rIL")])
9600 (match_operator:SI 8 "shiftable_operator"
9601 [(match_operand:SI 1 "s_register_operand" "r")
9602 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9603 (match_operator:SI 7 "shiftable_operator"
9604 [(match_operand:SI 3 "s_register_operand" "r")
9605 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9606 (clobber (reg:CC CC_REGNUM))]
9609 [(set_attr "conds" "clob")
9610 (set_attr "length" "12")]
9613 (define_insn "*if_arith_arith"
9614 [(set (match_operand:SI 0 "s_register_operand" "=r")
9615 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9616 [(match_operand 8 "cc_register" "") (const_int 0)])
9617 (match_operator:SI 6 "shiftable_operator"
9618 [(match_operand:SI 1 "s_register_operand" "r")
9619 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9620 (match_operator:SI 7 "shiftable_operator"
9621 [(match_operand:SI 3 "s_register_operand" "r")
9622 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9624 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9625 [(set_attr "conds" "use")
9626 (set_attr "length" "8")]
9629 (define_insn "*ifcompare_arith_move"
9630 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9631 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9632 [(match_operand:SI 2 "s_register_operand" "r,r")
9633 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9634 (match_operator:SI 7 "shiftable_operator"
9635 [(match_operand:SI 4 "s_register_operand" "r,r")
9636 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9637 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9638 (clobber (reg:CC CC_REGNUM))]
9641 /* If we have an operation where (op x 0) is the identity operation and
9642 the conditional operator is LT or GE and we are comparing against zero and
9643 everything is in registers then we can do this in two instructions. */
9644 if (operands[3] == const0_rtx
9645 && GET_CODE (operands[7]) != AND
9646 && GET_CODE (operands[5]) == REG
9647 && GET_CODE (operands[1]) == REG
9648 && REGNO (operands[1]) == REGNO (operands[4])
9649 && REGNO (operands[4]) != REGNO (operands[0]))
9651 if (GET_CODE (operands[6]) == LT)
9652 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9653 else if (GET_CODE (operands[6]) == GE)
9654 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9656 if (GET_CODE (operands[3]) == CONST_INT
9657 && !const_ok_for_arm (INTVAL (operands[3])))
9658 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9660 output_asm_insn (\"cmp\\t%2, %3\", operands);
9661 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9662 if (which_alternative != 0)
9663 return \"mov%D6\\t%0, %1\";
9666 [(set_attr "conds" "clob")
9667 (set_attr "length" "8,12")]
9670 (define_insn "*if_arith_move"
9671 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9672 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9673 [(match_operand 6 "cc_register" "") (const_int 0)])
9674 (match_operator:SI 5 "shiftable_operator"
9675 [(match_operand:SI 2 "s_register_operand" "r,r")
9676 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9677 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9681 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9682 [(set_attr "conds" "use")
9683 (set_attr "length" "4,8")
9684 (set_attr "type" "*,*")]
9687 (define_insn "*ifcompare_move_arith"
9688 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9689 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9690 [(match_operand:SI 4 "s_register_operand" "r,r")
9691 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9692 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9693 (match_operator:SI 7 "shiftable_operator"
9694 [(match_operand:SI 2 "s_register_operand" "r,r")
9695 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9696 (clobber (reg:CC CC_REGNUM))]
9699 /* If we have an operation where (op x 0) is the identity operation and
9700 the conditional operator is LT or GE and we are comparing against zero and
9701 everything is in registers then we can do this in two instructions */
9702 if (operands[5] == const0_rtx
9703 && GET_CODE (operands[7]) != AND
9704 && GET_CODE (operands[3]) == REG
9705 && GET_CODE (operands[1]) == REG
9706 && REGNO (operands[1]) == REGNO (operands[2])
9707 && REGNO (operands[2]) != REGNO (operands[0]))
9709 if (GET_CODE (operands[6]) == GE)
9710 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9711 else if (GET_CODE (operands[6]) == LT)
9712 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9715 if (GET_CODE (operands[5]) == CONST_INT
9716 && !const_ok_for_arm (INTVAL (operands[5])))
9717 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9719 output_asm_insn (\"cmp\\t%4, %5\", operands);
9721 if (which_alternative != 0)
9722 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9723 return \"%I7%D6\\t%0, %2, %3\";
9725 [(set_attr "conds" "clob")
9726 (set_attr "length" "8,12")]
9729 (define_insn "*if_move_arith"
9730 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9732 (match_operator 4 "arm_comparison_operator"
9733 [(match_operand 6 "cc_register" "") (const_int 0)])
9734 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9735 (match_operator:SI 5 "shiftable_operator"
9736 [(match_operand:SI 2 "s_register_operand" "r,r")
9737 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9741 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9742 [(set_attr "conds" "use")
9743 (set_attr "length" "4,8")
9744 (set_attr "type" "*,*")]
9747 (define_insn "*ifcompare_move_not"
9748 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9750 (match_operator 5 "arm_comparison_operator"
9751 [(match_operand:SI 3 "s_register_operand" "r,r")
9752 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9753 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9755 (match_operand:SI 2 "s_register_operand" "r,r"))))
9756 (clobber (reg:CC CC_REGNUM))]
9759 [(set_attr "conds" "clob")
9760 (set_attr "length" "8,12")]
9763 (define_insn "*if_move_not"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9766 (match_operator 4 "arm_comparison_operator"
9767 [(match_operand 3 "cc_register" "") (const_int 0)])
9768 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9769 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9773 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9774 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9775 [(set_attr "conds" "use")
9776 (set_attr "length" "4,8,8")]
9779 (define_insn "*ifcompare_not_move"
9780 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9782 (match_operator 5 "arm_comparison_operator"
9783 [(match_operand:SI 3 "s_register_operand" "r,r")
9784 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9786 (match_operand:SI 2 "s_register_operand" "r,r"))
9787 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9788 (clobber (reg:CC CC_REGNUM))]
9791 [(set_attr "conds" "clob")
9792 (set_attr "length" "8,12")]
9795 (define_insn "*if_not_move"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9798 (match_operator 4 "arm_comparison_operator"
9799 [(match_operand 3 "cc_register" "") (const_int 0)])
9800 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9801 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9805 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9806 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9807 [(set_attr "conds" "use")
9808 (set_attr "length" "4,8,8")]
9811 (define_insn "*ifcompare_shift_move"
9812 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9814 (match_operator 6 "arm_comparison_operator"
9815 [(match_operand:SI 4 "s_register_operand" "r,r")
9816 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9817 (match_operator:SI 7 "shift_operator"
9818 [(match_operand:SI 2 "s_register_operand" "r,r")
9819 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9820 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9821 (clobber (reg:CC CC_REGNUM))]
9824 [(set_attr "conds" "clob")
9825 (set_attr "length" "8,12")]
9828 (define_insn "*if_shift_move"
9829 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9831 (match_operator 5 "arm_comparison_operator"
9832 [(match_operand 6 "cc_register" "") (const_int 0)])
9833 (match_operator:SI 4 "shift_operator"
9834 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9835 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9836 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9840 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9841 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9842 [(set_attr "conds" "use")
9843 (set_attr "shift" "2")
9844 (set_attr "length" "4,8,8")
9845 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9846 (const_string "alu_shift")
9847 (const_string "alu_shift_reg")))]
9850 (define_insn "*ifcompare_move_shift"
9851 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9853 (match_operator 6 "arm_comparison_operator"
9854 [(match_operand:SI 4 "s_register_operand" "r,r")
9855 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9856 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9857 (match_operator:SI 7 "shift_operator"
9858 [(match_operand:SI 2 "s_register_operand" "r,r")
9859 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9860 (clobber (reg:CC CC_REGNUM))]
9863 [(set_attr "conds" "clob")
9864 (set_attr "length" "8,12")]
9867 (define_insn "*if_move_shift"
9868 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9870 (match_operator 5 "arm_comparison_operator"
9871 [(match_operand 6 "cc_register" "") (const_int 0)])
9872 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9873 (match_operator:SI 4 "shift_operator"
9874 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9875 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9879 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9880 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9881 [(set_attr "conds" "use")
9882 (set_attr "shift" "2")
9883 (set_attr "length" "4,8,8")
9884 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9885 (const_string "alu_shift")
9886 (const_string "alu_shift_reg")))]
9889 (define_insn "*ifcompare_shift_shift"
9890 [(set (match_operand:SI 0 "s_register_operand" "=r")
9892 (match_operator 7 "arm_comparison_operator"
9893 [(match_operand:SI 5 "s_register_operand" "r")
9894 (match_operand:SI 6 "arm_add_operand" "rIL")])
9895 (match_operator:SI 8 "shift_operator"
9896 [(match_operand:SI 1 "s_register_operand" "r")
9897 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9898 (match_operator:SI 9 "shift_operator"
9899 [(match_operand:SI 3 "s_register_operand" "r")
9900 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9901 (clobber (reg:CC CC_REGNUM))]
9904 [(set_attr "conds" "clob")
9905 (set_attr "length" "12")]
9908 (define_insn "*if_shift_shift"
9909 [(set (match_operand:SI 0 "s_register_operand" "=r")
9911 (match_operator 5 "arm_comparison_operator"
9912 [(match_operand 8 "cc_register" "") (const_int 0)])
9913 (match_operator:SI 6 "shift_operator"
9914 [(match_operand:SI 1 "s_register_operand" "r")
9915 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9916 (match_operator:SI 7 "shift_operator"
9917 [(match_operand:SI 3 "s_register_operand" "r")
9918 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9920 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9921 [(set_attr "conds" "use")
9922 (set_attr "shift" "1")
9923 (set_attr "length" "8")
9924 (set (attr "type") (if_then_else
9925 (and (match_operand 2 "const_int_operand" "")
9926 (match_operand 4 "const_int_operand" ""))
9927 (const_string "alu_shift")
9928 (const_string "alu_shift_reg")))]
9931 (define_insn "*ifcompare_not_arith"
9932 [(set (match_operand:SI 0 "s_register_operand" "=r")
9934 (match_operator 6 "arm_comparison_operator"
9935 [(match_operand:SI 4 "s_register_operand" "r")
9936 (match_operand:SI 5 "arm_add_operand" "rIL")])
9937 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9938 (match_operator:SI 7 "shiftable_operator"
9939 [(match_operand:SI 2 "s_register_operand" "r")
9940 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9941 (clobber (reg:CC CC_REGNUM))]
9944 [(set_attr "conds" "clob")
9945 (set_attr "length" "12")]
9948 (define_insn "*if_not_arith"
9949 [(set (match_operand:SI 0 "s_register_operand" "=r")
9951 (match_operator 5 "arm_comparison_operator"
9952 [(match_operand 4 "cc_register" "") (const_int 0)])
9953 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9954 (match_operator:SI 6 "shiftable_operator"
9955 [(match_operand:SI 2 "s_register_operand" "r")
9956 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9958 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9959 [(set_attr "conds" "use")
9960 (set_attr "length" "8")]
9963 (define_insn "*ifcompare_arith_not"
9964 [(set (match_operand:SI 0 "s_register_operand" "=r")
9966 (match_operator 6 "arm_comparison_operator"
9967 [(match_operand:SI 4 "s_register_operand" "r")
9968 (match_operand:SI 5 "arm_add_operand" "rIL")])
9969 (match_operator:SI 7 "shiftable_operator"
9970 [(match_operand:SI 2 "s_register_operand" "r")
9971 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9972 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9973 (clobber (reg:CC CC_REGNUM))]
9976 [(set_attr "conds" "clob")
9977 (set_attr "length" "12")]
9980 (define_insn "*if_arith_not"
9981 [(set (match_operand:SI 0 "s_register_operand" "=r")
9983 (match_operator 5 "arm_comparison_operator"
9984 [(match_operand 4 "cc_register" "") (const_int 0)])
9985 (match_operator:SI 6 "shiftable_operator"
9986 [(match_operand:SI 2 "s_register_operand" "r")
9987 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9988 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9990 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9991 [(set_attr "conds" "use")
9992 (set_attr "length" "8")]
9995 (define_insn "*ifcompare_neg_move"
9996 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9998 (match_operator 5 "arm_comparison_operator"
9999 [(match_operand:SI 3 "s_register_operand" "r,r")
10000 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10001 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10002 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10003 (clobber (reg:CC CC_REGNUM))]
10006 [(set_attr "conds" "clob")
10007 (set_attr "length" "8,12")]
10010 (define_insn "*if_neg_move"
10011 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10013 (match_operator 4 "arm_comparison_operator"
10014 [(match_operand 3 "cc_register" "") (const_int 0)])
10015 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10016 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10019 rsb%d4\\t%0, %2, #0
10020 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10021 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10022 [(set_attr "conds" "use")
10023 (set_attr "length" "4,8,8")]
10026 (define_insn "*ifcompare_move_neg"
10027 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10029 (match_operator 5 "arm_comparison_operator"
10030 [(match_operand:SI 3 "s_register_operand" "r,r")
10031 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10032 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10033 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10034 (clobber (reg:CC CC_REGNUM))]
10037 [(set_attr "conds" "clob")
10038 (set_attr "length" "8,12")]
10041 (define_insn "*if_move_neg"
10042 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10044 (match_operator 4 "arm_comparison_operator"
10045 [(match_operand 3 "cc_register" "") (const_int 0)])
10046 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10047 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10050 rsb%D4\\t%0, %2, #0
10051 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10052 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10053 [(set_attr "conds" "use")
10054 (set_attr "length" "4,8,8")]
10057 (define_insn "*arith_adjacentmem"
10058 [(set (match_operand:SI 0 "s_register_operand" "=r")
10059 (match_operator:SI 1 "shiftable_operator"
10060 [(match_operand:SI 2 "memory_operand" "m")
10061 (match_operand:SI 3 "memory_operand" "m")]))
10062 (clobber (match_scratch:SI 4 "=r"))]
10063 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10069 HOST_WIDE_INT val1 = 0, val2 = 0;
10071 if (REGNO (operands[0]) > REGNO (operands[4]))
10073 ldm[1] = operands[4];
10074 ldm[2] = operands[0];
10078 ldm[1] = operands[0];
10079 ldm[2] = operands[4];
10082 base_reg = XEXP (operands[2], 0);
10084 if (!REG_P (base_reg))
10086 val1 = INTVAL (XEXP (base_reg, 1));
10087 base_reg = XEXP (base_reg, 0);
10090 if (!REG_P (XEXP (operands[3], 0)))
10091 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10093 arith[0] = operands[0];
10094 arith[3] = operands[1];
10108 if (val1 !=0 && val2 != 0)
10112 if (val1 == 4 || val2 == 4)
10113 /* Other val must be 8, since we know they are adjacent and neither
10115 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10116 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10118 ldm[0] = ops[0] = operands[4];
10120 ops[2] = GEN_INT (val1);
10121 output_add_immediate (ops);
10123 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10125 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10129 /* Offset is out of range for a single add, so use two ldr. */
10132 ops[2] = GEN_INT (val1);
10133 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10135 ops[2] = GEN_INT (val2);
10136 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10139 else if (val1 != 0)
10142 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10144 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10149 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10151 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10153 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10156 [(set_attr "length" "12")
10157 (set_attr "predicable" "yes")
10158 (set_attr "type" "load1")]
10161 ; This pattern is never tried by combine, so do it as a peephole
10164 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10165 (match_operand:SI 1 "arm_general_register_operand" ""))
10166 (set (reg:CC CC_REGNUM)
10167 (compare:CC (match_dup 1) (const_int 0)))]
10169 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10170 (set (match_dup 0) (match_dup 1))])]
10174 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10175 ; reversed, check that the memory references aren't volatile.
10178 [(set (match_operand:SI 0 "s_register_operand" "=r")
10179 (match_operand:SI 4 "memory_operand" "m"))
10180 (set (match_operand:SI 1 "s_register_operand" "=r")
10181 (match_operand:SI 5 "memory_operand" "m"))
10182 (set (match_operand:SI 2 "s_register_operand" "=r")
10183 (match_operand:SI 6 "memory_operand" "m"))
10184 (set (match_operand:SI 3 "s_register_operand" "=r")
10185 (match_operand:SI 7 "memory_operand" "m"))]
10186 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10188 return emit_ldm_seq (operands, 4);
10193 [(set (match_operand:SI 0 "s_register_operand" "=r")
10194 (match_operand:SI 3 "memory_operand" "m"))
10195 (set (match_operand:SI 1 "s_register_operand" "=r")
10196 (match_operand:SI 4 "memory_operand" "m"))
10197 (set (match_operand:SI 2 "s_register_operand" "=r")
10198 (match_operand:SI 5 "memory_operand" "m"))]
10199 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10201 return emit_ldm_seq (operands, 3);
10206 [(set (match_operand:SI 0 "s_register_operand" "=r")
10207 (match_operand:SI 2 "memory_operand" "m"))
10208 (set (match_operand:SI 1 "s_register_operand" "=r")
10209 (match_operand:SI 3 "memory_operand" "m"))]
10210 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10212 return emit_ldm_seq (operands, 2);
10217 [(set (match_operand:SI 4 "memory_operand" "=m")
10218 (match_operand:SI 0 "s_register_operand" "r"))
10219 (set (match_operand:SI 5 "memory_operand" "=m")
10220 (match_operand:SI 1 "s_register_operand" "r"))
10221 (set (match_operand:SI 6 "memory_operand" "=m")
10222 (match_operand:SI 2 "s_register_operand" "r"))
10223 (set (match_operand:SI 7 "memory_operand" "=m")
10224 (match_operand:SI 3 "s_register_operand" "r"))]
10225 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10227 return emit_stm_seq (operands, 4);
10232 [(set (match_operand:SI 3 "memory_operand" "=m")
10233 (match_operand:SI 0 "s_register_operand" "r"))
10234 (set (match_operand:SI 4 "memory_operand" "=m")
10235 (match_operand:SI 1 "s_register_operand" "r"))
10236 (set (match_operand:SI 5 "memory_operand" "=m")
10237 (match_operand:SI 2 "s_register_operand" "r"))]
10238 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10240 return emit_stm_seq (operands, 3);
10245 [(set (match_operand:SI 2 "memory_operand" "=m")
10246 (match_operand:SI 0 "s_register_operand" "r"))
10247 (set (match_operand:SI 3 "memory_operand" "=m")
10248 (match_operand:SI 1 "s_register_operand" "r"))]
10249 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10251 return emit_stm_seq (operands, 2);
10256 [(set (match_operand:SI 0 "s_register_operand" "")
10257 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10259 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10260 [(match_operand:SI 3 "s_register_operand" "")
10261 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10262 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10264 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10265 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10270 ;; This split can be used because CC_Z mode implies that the following
10271 ;; branch will be an equality, or an unsigned inequality, so the sign
10272 ;; extension is not needed.
10275 [(set (reg:CC_Z CC_REGNUM)
10277 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10279 (match_operand 1 "const_int_operand" "")))
10280 (clobber (match_scratch:SI 2 ""))]
10282 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10283 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10284 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10285 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10287 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10290 ;; ??? Check the patterns above for Thumb-2 usefulness
10292 (define_expand "prologue"
10293 [(clobber (const_int 0))]
10296 arm_expand_prologue ();
10298 thumb1_expand_prologue ();
10303 (define_expand "epilogue"
10304 [(clobber (const_int 0))]
10307 if (current_function_calls_eh_return)
10308 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10310 thumb1_expand_epilogue ();
10311 else if (USE_RETURN_INSN (FALSE))
10313 emit_jump_insn (gen_return ());
10316 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10318 gen_rtx_RETURN (VOIDmode)),
10319 VUNSPEC_EPILOGUE));
10324 ;; Note - although unspec_volatile's USE all hard registers,
10325 ;; USEs are ignored after relaod has completed. Thus we need
10326 ;; to add an unspec of the link register to ensure that flow
10327 ;; does not think that it is unused by the sibcall branch that
10328 ;; will replace the standard function epilogue.
10329 (define_insn "sibcall_epilogue"
10330 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10331 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10334 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10335 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10336 return arm_output_epilogue (next_nonnote_insn (insn));
10338 ;; Length is absolute worst case
10339 [(set_attr "length" "44")
10340 (set_attr "type" "block")
10341 ;; We don't clobber the conditions, but the potential length of this
10342 ;; operation is sufficient to make conditionalizing the sequence
10343 ;; unlikely to be profitable.
10344 (set_attr "conds" "clob")]
10347 (define_insn "*epilogue_insns"
10348 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10352 return arm_output_epilogue (NULL);
10353 else /* TARGET_THUMB1 */
10354 return thumb_unexpanded_epilogue ();
10356 ; Length is absolute worst case
10357 [(set_attr "length" "44")
10358 (set_attr "type" "block")
10359 ;; We don't clobber the conditions, but the potential length of this
10360 ;; operation is sufficient to make conditionalizing the sequence
10361 ;; unlikely to be profitable.
10362 (set_attr "conds" "clob")]
10365 (define_expand "eh_epilogue"
10366 [(use (match_operand:SI 0 "register_operand" ""))
10367 (use (match_operand:SI 1 "register_operand" ""))
10368 (use (match_operand:SI 2 "register_operand" ""))]
10372 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10373 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10375 rtx ra = gen_rtx_REG (Pmode, 2);
10377 emit_move_insn (ra, operands[2]);
10380 /* This is a hack -- we may have crystalized the function type too
10382 cfun->machine->func_type = 0;
10386 ;; This split is only used during output to reduce the number of patterns
10387 ;; that need assembler instructions adding to them. We allowed the setting
10388 ;; of the conditions to be implicit during rtl generation so that
10389 ;; the conditional compare patterns would work. However this conflicts to
10390 ;; some extent with the conditional data operations, so we have to split them
10393 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10394 ;; conditional execution sufficient?
10397 [(set (match_operand:SI 0 "s_register_operand" "")
10398 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10399 [(match_operand 2 "" "") (match_operand 3 "" "")])
10401 (match_operand 4 "" "")))
10402 (clobber (reg:CC CC_REGNUM))]
10403 "TARGET_ARM && reload_completed"
10404 [(set (match_dup 5) (match_dup 6))
10405 (cond_exec (match_dup 7)
10406 (set (match_dup 0) (match_dup 4)))]
10409 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10410 operands[2], operands[3]);
10411 enum rtx_code rc = GET_CODE (operands[1]);
10413 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10414 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10415 if (mode == CCFPmode || mode == CCFPEmode)
10416 rc = reverse_condition_maybe_unordered (rc);
10418 rc = reverse_condition (rc);
10420 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10425 [(set (match_operand:SI 0 "s_register_operand" "")
10426 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10427 [(match_operand 2 "" "") (match_operand 3 "" "")])
10428 (match_operand 4 "" "")
10430 (clobber (reg:CC CC_REGNUM))]
10431 "TARGET_ARM && reload_completed"
10432 [(set (match_dup 5) (match_dup 6))
10433 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10434 (set (match_dup 0) (match_dup 4)))]
10437 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10438 operands[2], operands[3]);
10440 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10441 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10446 [(set (match_operand:SI 0 "s_register_operand" "")
10447 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10448 [(match_operand 2 "" "") (match_operand 3 "" "")])
10449 (match_operand 4 "" "")
10450 (match_operand 5 "" "")))
10451 (clobber (reg:CC CC_REGNUM))]
10452 "TARGET_ARM && reload_completed"
10453 [(set (match_dup 6) (match_dup 7))
10454 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10455 (set (match_dup 0) (match_dup 4)))
10456 (cond_exec (match_dup 8)
10457 (set (match_dup 0) (match_dup 5)))]
10460 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10461 operands[2], operands[3]);
10462 enum rtx_code rc = GET_CODE (operands[1]);
10464 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10465 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10466 if (mode == CCFPmode || mode == CCFPEmode)
10467 rc = reverse_condition_maybe_unordered (rc);
10469 rc = reverse_condition (rc);
10471 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10476 [(set (match_operand:SI 0 "s_register_operand" "")
10477 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10478 [(match_operand:SI 2 "s_register_operand" "")
10479 (match_operand:SI 3 "arm_add_operand" "")])
10480 (match_operand:SI 4 "arm_rhs_operand" "")
10482 (match_operand:SI 5 "s_register_operand" ""))))
10483 (clobber (reg:CC CC_REGNUM))]
10484 "TARGET_ARM && reload_completed"
10485 [(set (match_dup 6) (match_dup 7))
10486 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10487 (set (match_dup 0) (match_dup 4)))
10488 (cond_exec (match_dup 8)
10489 (set (match_dup 0) (not:SI (match_dup 5))))]
10492 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10493 operands[2], operands[3]);
10494 enum rtx_code rc = GET_CODE (operands[1]);
10496 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10497 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10498 if (mode == CCFPmode || mode == CCFPEmode)
10499 rc = reverse_condition_maybe_unordered (rc);
10501 rc = reverse_condition (rc);
10503 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10507 (define_insn "*cond_move_not"
10508 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10509 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10510 [(match_operand 3 "cc_register" "") (const_int 0)])
10511 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10513 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10517 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10518 [(set_attr "conds" "use")
10519 (set_attr "length" "4,8")]
10522 ;; The next two patterns occur when an AND operation is followed by a
10523 ;; scc insn sequence
10525 (define_insn "*sign_extract_onebit"
10526 [(set (match_operand:SI 0 "s_register_operand" "=r")
10527 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10529 (match_operand:SI 2 "const_int_operand" "n")))
10530 (clobber (reg:CC CC_REGNUM))]
10533 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10534 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10535 return \"mvnne\\t%0, #0\";
10537 [(set_attr "conds" "clob")
10538 (set_attr "length" "8")]
10541 (define_insn "*not_signextract_onebit"
10542 [(set (match_operand:SI 0 "s_register_operand" "=r")
10544 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10546 (match_operand:SI 2 "const_int_operand" "n"))))
10547 (clobber (reg:CC CC_REGNUM))]
10550 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10551 output_asm_insn (\"tst\\t%1, %2\", operands);
10552 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10553 return \"movne\\t%0, #0\";
10555 [(set_attr "conds" "clob")
10556 (set_attr "length" "12")]
10558 ;; ??? The above patterns need auditing for Thumb-2
10560 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10561 ;; expressions. For simplicity, the first register is also in the unspec
10563 (define_insn "*push_multi"
10564 [(match_parallel 2 "multi_register_push"
10565 [(set (match_operand:BLK 0 "memory_operand" "=m")
10566 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10567 UNSPEC_PUSH_MULT))])]
10571 int num_saves = XVECLEN (operands[2], 0);
10573 /* For the StrongARM at least it is faster to
10574 use STR to store only a single register.
10575 In Thumb mode always use push, and the assembler will pick
10576 something appropriate. */
10577 if (num_saves == 1 && TARGET_ARM)
10578 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10585 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10587 strcpy (pattern, \"push\\t{%1\");
10589 for (i = 1; i < num_saves; i++)
10591 strcat (pattern, \", %|\");
10593 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10596 strcat (pattern, \"}\");
10597 output_asm_insn (pattern, operands);
10602 [(set_attr "type" "store4")]
10605 (define_insn "stack_tie"
10606 [(set (mem:BLK (scratch))
10607 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "r")
10608 (match_operand:SI 1 "s_register_operand" "r")]
10612 [(set_attr "length" "0")]
10615 ;; Similarly for the floating point registers
10616 (define_insn "*push_fp_multi"
10617 [(match_parallel 2 "multi_register_push"
10618 [(set (match_operand:BLK 0 "memory_operand" "=m")
10619 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10620 UNSPEC_PUSH_MULT))])]
10621 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10626 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10627 output_asm_insn (pattern, operands);
10630 [(set_attr "type" "f_store")]
10633 ;; Special patterns for dealing with the constant pool
10635 (define_insn "align_4"
10636 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10639 assemble_align (32);
10644 (define_insn "align_8"
10645 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10648 assemble_align (64);
10653 (define_insn "consttable_end"
10654 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10657 making_const_table = FALSE;
10662 (define_insn "consttable_1"
10663 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10666 making_const_table = TRUE;
10667 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10668 assemble_zeros (3);
10671 [(set_attr "length" "4")]
10674 (define_insn "consttable_2"
10675 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10678 making_const_table = TRUE;
10679 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10680 assemble_zeros (2);
10683 [(set_attr "length" "4")]
10686 (define_insn "consttable_4"
10687 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10691 making_const_table = TRUE;
10692 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10697 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10698 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10702 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10707 [(set_attr "length" "4")]
10710 (define_insn "consttable_8"
10711 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10715 making_const_table = TRUE;
10716 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10721 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10722 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10726 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10731 [(set_attr "length" "8")]
10734 ;; Miscellaneous Thumb patterns
10736 (define_expand "tablejump"
10737 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10738 (use (label_ref (match_operand 1 "" "")))])]
10743 /* Hopefully, CSE will eliminate this copy. */
10744 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10745 rtx reg2 = gen_reg_rtx (SImode);
10747 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10748 operands[0] = reg2;
10753 ;; NB never uses BX.
10754 (define_insn "*thumb1_tablejump"
10755 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10756 (use (label_ref (match_operand 1 "" "")))]
10759 [(set_attr "length" "2")]
10762 ;; V5 Instructions,
10764 (define_insn "clzsi2"
10765 [(set (match_operand:SI 0 "s_register_operand" "=r")
10766 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10767 "TARGET_32BIT && arm_arch5"
10769 [(set_attr "predicable" "yes")])
10771 (define_expand "ffssi2"
10772 [(set (match_operand:SI 0 "s_register_operand" "")
10773 (ffs:SI (match_operand:SI 1 "s_register_operand" "")))]
10774 "TARGET_32BIT && arm_arch5"
10779 t1 = gen_reg_rtx (SImode);
10780 t2 = gen_reg_rtx (SImode);
10781 t3 = gen_reg_rtx (SImode);
10783 emit_insn (gen_negsi2 (t1, operands[1]));
10784 emit_insn (gen_andsi3 (t2, operands[1], t1));
10785 emit_insn (gen_clzsi2 (t3, t2));
10786 emit_insn (gen_subsi3 (operands[0], GEN_INT (32), t3));
10791 (define_expand "ctzsi2"
10792 [(set (match_operand:SI 0 "s_register_operand" "")
10793 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10794 "TARGET_32BIT && arm_arch5"
10799 t1 = gen_reg_rtx (SImode);
10800 t2 = gen_reg_rtx (SImode);
10801 t3 = gen_reg_rtx (SImode);
10803 emit_insn (gen_negsi2 (t1, operands[1]));
10804 emit_insn (gen_andsi3 (t2, operands[1], t1));
10805 emit_insn (gen_clzsi2 (t3, t2));
10806 emit_insn (gen_subsi3 (operands[0], GEN_INT (31), t3));
10811 ;; V5E instructions.
10813 (define_insn "prefetch"
10814 [(prefetch (match_operand:SI 0 "address_operand" "p")
10815 (match_operand:SI 1 "" "")
10816 (match_operand:SI 2 "" ""))]
10817 "TARGET_32BIT && arm_arch5e"
10820 ;; General predication pattern
10823 [(match_operator 0 "arm_comparison_operator"
10824 [(match_operand 1 "cc_register" "")
10830 (define_insn "prologue_use"
10831 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10833 "%@ %0 needed for prologue"
10837 ;; Patterns for exception handling
10839 (define_expand "eh_return"
10840 [(use (match_operand 0 "general_operand" ""))]
10845 emit_insn (gen_arm_eh_return (operands[0]));
10847 emit_insn (gen_thumb_eh_return (operands[0]));
10852 ;; We can't expand this before we know where the link register is stored.
10853 (define_insn_and_split "arm_eh_return"
10854 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10856 (clobber (match_scratch:SI 1 "=&r"))]
10859 "&& reload_completed"
10863 arm_set_return_address (operands[0], operands[1]);
10868 (define_insn_and_split "thumb_eh_return"
10869 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10871 (clobber (match_scratch:SI 1 "=&l"))]
10874 "&& reload_completed"
10878 thumb_set_return_address (operands[0], operands[1]);
10886 (define_insn "load_tp_hard"
10887 [(set (match_operand:SI 0 "register_operand" "=r")
10888 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10890 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10891 [(set_attr "predicable" "yes")]
10894 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10895 (define_insn "load_tp_soft"
10896 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10897 (clobber (reg:SI LR_REGNUM))
10898 (clobber (reg:SI IP_REGNUM))
10899 (clobber (reg:CC CC_REGNUM))]
10901 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10902 [(set_attr "conds" "clob")]
10905 ;; Load the FPA co-processor patterns
10907 ;; Load the Maverick co-processor patterns
10908 (include "cirrus.md")
10909 ;; Load the Intel Wireless Multimedia Extension patterns
10910 (include "iwmmxt.md")
10911 ;; Load the VFP co-processor patterns
10913 ;; Thumb-2 patterns
10914 (include "thumb2.md")