vfp.md: Move pipeline description for VFP11 to...
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
71 ; GLOBAL_OFFSET_TABLE. The operation is fully
72 ; described by the RTL but must be wrapped to
73 ; prevent combine from trying to rip it apart.
74 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
75 ; being scheduled before the stack adjustment insn.
76 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
77 ; this unspec is used to prevent the deletion of
78 ; instructions setting registers for EH handling
79 ; and stack frame generation. Operand 0 is the
80 ; register to "use".
81 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
82 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
83 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
84 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
85 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
86 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
87 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
88 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
89 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
90 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
91 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
92 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
93 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
94 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
95 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 ; instruction stream.
97 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
98 ; generate correct unwind information.
99 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
100 ; correctly for PIC usage.
101 ]
102 )
103
104 ;; UNSPEC_VOLATILE Usage:
105
106 (define_constants
107 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
108 ; insn in the code.
109 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
110 ; instruction epilogue sequence that isn't expanded
111 ; into normal RTL. Used for both normal and sibcall
112 ; epilogues.
113 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
114 ; for inlined constants.
115 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
116 ; table.
117 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
118 ; an 8-bit object.
119 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
120 ; a 16-bit object.
121 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
122 ; a 32-bit object.
123 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
124 ; a 64-bit object.
125 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
126 ; a 128-bit object.
127 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
128 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
129 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
130 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
131 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
132 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
133 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
134 ; handling.
135 ]
136 )
137 \f
138 ;;---------------------------------------------------------------------------
139 ;; Attributes
140
141 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
142 ; generating ARM code. This is used to control the length of some insn
143 ; patterns that share the same RTL in both ARM and Thumb code.
144 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
145
146 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
147 ; scheduling decisions for the load unit and the multiplier.
148 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
149
150 ; IS_XSCALE is set to 'yes' when compiling for XScale.
151 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
152
153 ;; Operand number of an input operand that is shifted. Zero if the
154 ;; given instruction does not shift one of its input operands.
155 (define_attr "shift" "" (const_int 0))
156
157 ; Floating Point Unit. If we only have floating point emulation, then there
158 ; is no point in scheduling the floating point insns. (Well, for best
159 ; performance we should try and group them together).
160 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
161 (const (symbol_ref "arm_fpu_attr")))
162
163 ; LENGTH of an instruction (in bytes)
164 (define_attr "length" "" (const_int 4))
165
166 ; POOL_RANGE is how far away from a constant pool entry that this insn
167 ; can be placed. If the distance is zero, then this insn will never
168 ; reference the pool.
169 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
170 ; before its address.
171 (define_attr "pool_range" "" (const_int 0))
172 (define_attr "neg_pool_range" "" (const_int 0))
173
174 ; An assembler sequence may clobber the condition codes without us knowing.
175 ; If such an insn references the pool, then we have no way of knowing how,
176 ; so use the most conservative value for pool_range.
177 (define_asm_attributes
178 [(set_attr "conds" "clob")
179 (set_attr "length" "4")
180 (set_attr "pool_range" "250")])
181
182 ;; The instruction used to implement a particular pattern. This
183 ;; information is used by pipeline descriptions to provide accurate
184 ;; scheduling information.
185
186 (define_attr "insn"
187 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
188 (const_string "other"))
189
190 ; TYPE attribute is used to detect floating point instructions which, if
191 ; running on a co-processor can run in parallel with other, basic instructions
192 ; If write-buffer scheduling is enabled then it can also be used in the
193 ; scheduling of writes.
194
195 ; Classification of each insn
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
223 ; branch a branch
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
237 ;
238
239 (define_attr "type"
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
241 (if_then_else
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
245
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
249
250 ; condition codes: this one is used by final_prescan_insn to speed up
251 ; conditionalizing instructions. It saves having to scan the rtl to see if
252 ; it uses or alters the condition codes.
253 ;
254 ; USE means that the condition codes are used by the insn in the process of
255 ; outputting code, this means (at present) that we can't use the insn in
256 ; inlined branches
257 ;
258 ; SET means that the purpose of the insn is to set the condition codes in a
259 ; well defined manner.
260 ;
261 ; CLOB means that the condition codes are altered in an undefined manner, if
262 ; they are altered at all
263 ;
264 ; JUMP_CLOB is used when the condition cannot be represented by a single
265 ; instruction (UNEQ and LTGT). These cannot be predicated.
266 ;
267 ; NOCOND means that the condition codes are neither altered nor affect the
268 ; output of this insn
269
270 (define_attr "conds" "use,set,clob,jump_clob,nocond"
271 (if_then_else (eq_attr "type" "call")
272 (const_string "clob")
273 (const_string "nocond")))
274
275 ; Predicable means that the insn can be conditionally executed based on
276 ; an automatically added predicate (additional patterns are generated by
277 ; gen...). We default to 'no' because no Thumb patterns match this rule
278 ; and not all ARM patterns do.
279 (define_attr "predicable" "no,yes" (const_string "no"))
280
281 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
282 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
283 ; suffer blockages enough to warrant modelling this (and it can adversely
284 ; affect the schedule).
285 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
286
287 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
288 ; to stall the processor. Used with model_wbuf above.
289 (define_attr "write_conflict" "no,yes"
290 (if_then_else (eq_attr "type"
291 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
292 (const_string "yes")
293 (const_string "no")))
294
295 ; Classify the insns into those that take one cycle and those that take more
296 ; than one on the main cpu execution unit.
297 (define_attr "core_cycles" "single,multi"
298 (if_then_else (eq_attr "type"
299 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
300 (const_string "single")
301 (const_string "multi")))
302
303 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
304 ;; distant label. Only applicable to Thumb code.
305 (define_attr "far_jump" "yes,no" (const_string "no"))
306
307
308 ;; The number of machine instructions this pattern expands to.
309 ;; Used for Thumb-2 conditional execution.
310 (define_attr "ce_count" "" (const_int 1))
311
312 ;;---------------------------------------------------------------------------
313 ;; Mode iterators
314
315 ; A list of modes that are exactly 64 bits in size. We use this to expand
316 ; some splits that are the same for all modes when operating on ARM
317 ; registers.
318 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
319
320 ;;---------------------------------------------------------------------------
321 ;; Predicates
322
323 (include "predicates.md")
324 (include "constraints.md")
325
326 ;;---------------------------------------------------------------------------
327 ;; Pipeline descriptions
328
329 ;; Processor type. This is created automatically from arm-cores.def.
330 (include "arm-tune.md")
331
332 ;; True if the generic scheduling description should be used.
333
334 (define_attr "generic_sched" "yes,no"
335 (const (if_then_else
336 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexr4")
337 (const_string "no")
338 (const_string "yes"))))
339
340 (define_attr "generic_vfp" "yes,no"
341 (const (if_then_else
342 (and (eq_attr "fpu" "vfp")
343 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
344 (const_string "yes")
345 (const_string "no"))))
346
347 (include "arm-generic.md")
348 (include "arm926ejs.md")
349 (include "arm1020e.md")
350 (include "arm1026ejs.md")
351 (include "arm1136jfs.md")
352 (include "cortex-a8.md")
353 (include "cortex-r4.md")
354 (include "vfp11.md")
355
356 \f
357 ;;---------------------------------------------------------------------------
358 ;; Insn patterns
359 ;;
360 ;; Addition insns.
361
362 ;; Note: For DImode insns, there is normally no reason why operands should
363 ;; not be in the same register, what we don't want is for something being
364 ;; written to partially overlap something that is an input.
365 ;; Cirrus 64bit additions should not be split because we have a native
366 ;; 64bit addition instructions.
367
368 (define_expand "adddi3"
369 [(parallel
370 [(set (match_operand:DI 0 "s_register_operand" "")
371 (plus:DI (match_operand:DI 1 "s_register_operand" "")
372 (match_operand:DI 2 "s_register_operand" "")))
373 (clobber (reg:CC CC_REGNUM))])]
374 "TARGET_EITHER"
375 "
376 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
377 {
378 if (!cirrus_fp_register (operands[0], DImode))
379 operands[0] = force_reg (DImode, operands[0]);
380 if (!cirrus_fp_register (operands[1], DImode))
381 operands[1] = force_reg (DImode, operands[1]);
382 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
383 DONE;
384 }
385
386 if (TARGET_THUMB1)
387 {
388 if (GET_CODE (operands[1]) != REG)
389 operands[1] = force_reg (SImode, operands[1]);
390 if (GET_CODE (operands[2]) != REG)
391 operands[2] = force_reg (SImode, operands[2]);
392 }
393 "
394 )
395
396 (define_insn "*thumb1_adddi3"
397 [(set (match_operand:DI 0 "register_operand" "=l")
398 (plus:DI (match_operand:DI 1 "register_operand" "%0")
399 (match_operand:DI 2 "register_operand" "l")))
400 (clobber (reg:CC CC_REGNUM))
401 ]
402 "TARGET_THUMB1"
403 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
404 [(set_attr "length" "4")]
405 )
406
407 (define_insn_and_split "*arm_adddi3"
408 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
409 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
410 (match_operand:DI 2 "s_register_operand" "r, 0")))
411 (clobber (reg:CC CC_REGNUM))]
412 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
413 "#"
414 "TARGET_32BIT && reload_completed"
415 [(parallel [(set (reg:CC_C CC_REGNUM)
416 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
417 (match_dup 1)))
418 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
419 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
420 (plus:SI (match_dup 4) (match_dup 5))))]
421 "
422 {
423 operands[3] = gen_highpart (SImode, operands[0]);
424 operands[0] = gen_lowpart (SImode, operands[0]);
425 operands[4] = gen_highpart (SImode, operands[1]);
426 operands[1] = gen_lowpart (SImode, operands[1]);
427 operands[5] = gen_highpart (SImode, operands[2]);
428 operands[2] = gen_lowpart (SImode, operands[2]);
429 }"
430 [(set_attr "conds" "clob")
431 (set_attr "length" "8")]
432 )
433
434 (define_insn_and_split "*adddi_sesidi_di"
435 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
436 (plus:DI (sign_extend:DI
437 (match_operand:SI 2 "s_register_operand" "r,r"))
438 (match_operand:DI 1 "s_register_operand" "r,0")))
439 (clobber (reg:CC CC_REGNUM))]
440 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
441 "#"
442 "TARGET_32BIT && reload_completed"
443 [(parallel [(set (reg:CC_C CC_REGNUM)
444 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
445 (match_dup 1)))
446 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
447 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
448 (plus:SI (ashiftrt:SI (match_dup 2)
449 (const_int 31))
450 (match_dup 4))))]
451 "
452 {
453 operands[3] = gen_highpart (SImode, operands[0]);
454 operands[0] = gen_lowpart (SImode, operands[0]);
455 operands[4] = gen_highpart (SImode, operands[1]);
456 operands[1] = gen_lowpart (SImode, operands[1]);
457 operands[2] = gen_lowpart (SImode, operands[2]);
458 }"
459 [(set_attr "conds" "clob")
460 (set_attr "length" "8")]
461 )
462
463 (define_insn_and_split "*adddi_zesidi_di"
464 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
465 (plus:DI (zero_extend:DI
466 (match_operand:SI 2 "s_register_operand" "r,r"))
467 (match_operand:DI 1 "s_register_operand" "r,0")))
468 (clobber (reg:CC CC_REGNUM))]
469 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
470 "#"
471 "TARGET_32BIT && reload_completed"
472 [(parallel [(set (reg:CC_C CC_REGNUM)
473 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
474 (match_dup 1)))
475 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
476 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
477 (plus:SI (match_dup 4) (const_int 0))))]
478 "
479 {
480 operands[3] = gen_highpart (SImode, operands[0]);
481 operands[0] = gen_lowpart (SImode, operands[0]);
482 operands[4] = gen_highpart (SImode, operands[1]);
483 operands[1] = gen_lowpart (SImode, operands[1]);
484 operands[2] = gen_lowpart (SImode, operands[2]);
485 }"
486 [(set_attr "conds" "clob")
487 (set_attr "length" "8")]
488 )
489
490 (define_expand "addsi3"
491 [(set (match_operand:SI 0 "s_register_operand" "")
492 (plus:SI (match_operand:SI 1 "s_register_operand" "")
493 (match_operand:SI 2 "reg_or_int_operand" "")))]
494 "TARGET_EITHER"
495 "
496 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
497 {
498 arm_split_constant (PLUS, SImode, NULL_RTX,
499 INTVAL (operands[2]), operands[0], operands[1],
500 optimize && can_create_pseudo_p ());
501 DONE;
502 }
503 "
504 )
505
506 ; If there is a scratch available, this will be faster than synthesizing the
507 ; addition.
508 (define_peephole2
509 [(match_scratch:SI 3 "r")
510 (set (match_operand:SI 0 "arm_general_register_operand" "")
511 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
512 (match_operand:SI 2 "const_int_operand" "")))]
513 "TARGET_32BIT &&
514 !(const_ok_for_arm (INTVAL (operands[2]))
515 || const_ok_for_arm (-INTVAL (operands[2])))
516 && const_ok_for_arm (~INTVAL (operands[2]))"
517 [(set (match_dup 3) (match_dup 2))
518 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
519 ""
520 )
521
522 (define_insn_and_split "*arm_addsi3"
523 [(set (match_operand:SI 0 "s_register_operand" "=r, !k,r, !k,r")
524 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k,rk,!k,rk")
525 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,L, L,?n")))]
526 "TARGET_32BIT"
527 "@
528 add%?\\t%0, %1, %2
529 add%?\\t%0, %1, %2
530 sub%?\\t%0, %1, #%n2
531 sub%?\\t%0, %1, #%n2
532 #"
533 "TARGET_32BIT &&
534 GET_CODE (operands[2]) == CONST_INT
535 && !(const_ok_for_arm (INTVAL (operands[2]))
536 || const_ok_for_arm (-INTVAL (operands[2])))"
537 [(clobber (const_int 0))]
538 "
539 arm_split_constant (PLUS, SImode, curr_insn,
540 INTVAL (operands[2]), operands[0],
541 operands[1], 0);
542 DONE;
543 "
544 [(set_attr "length" "4,4,4,4,16")
545 (set_attr "predicable" "yes")]
546 )
547
548 ;; Register group 'k' is a single register group containing only the stack
549 ;; register. Trying to reload it will always fail catastrophically,
550 ;; so never allow those alternatives to match if reloading is needed.
551
552 (define_insn "*thumb1_addsi3"
553 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
554 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
555 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
556 "TARGET_THUMB1"
557 "*
558 static const char * const asms[] =
559 {
560 \"add\\t%0, %0, %2\",
561 \"sub\\t%0, %0, #%n2\",
562 \"add\\t%0, %1, %2\",
563 \"add\\t%0, %0, %2\",
564 \"add\\t%0, %0, %2\",
565 \"add\\t%0, %1, %2\",
566 \"add\\t%0, %1, %2\"
567 };
568 if ((which_alternative == 2 || which_alternative == 6)
569 && GET_CODE (operands[2]) == CONST_INT
570 && INTVAL (operands[2]) < 0)
571 return \"sub\\t%0, %1, #%n2\";
572 return asms[which_alternative];
573 "
574 [(set_attr "length" "2")]
575 )
576
577 ;; Reloading and elimination of the frame pointer can
578 ;; sometimes cause this optimization to be missed.
579 (define_peephole2
580 [(set (match_operand:SI 0 "arm_general_register_operand" "")
581 (match_operand:SI 1 "const_int_operand" ""))
582 (set (match_dup 0)
583 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
584 "TARGET_THUMB1
585 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
586 && (INTVAL (operands[1]) & 3) == 0"
587 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
588 ""
589 )
590
591 ;; ??? Make Thumb-2 variants which prefer low regs
592 (define_insn "*addsi3_compare0"
593 [(set (reg:CC_NOOV CC_REGNUM)
594 (compare:CC_NOOV
595 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
596 (match_operand:SI 2 "arm_add_operand" "rI,L"))
597 (const_int 0)))
598 (set (match_operand:SI 0 "s_register_operand" "=r,r")
599 (plus:SI (match_dup 1) (match_dup 2)))]
600 "TARGET_32BIT"
601 "@
602 add%.\\t%0, %1, %2
603 sub%.\\t%0, %1, #%n2"
604 [(set_attr "conds" "set")]
605 )
606
607 (define_insn "*addsi3_compare0_scratch"
608 [(set (reg:CC_NOOV CC_REGNUM)
609 (compare:CC_NOOV
610 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
611 (match_operand:SI 1 "arm_add_operand" "rI,L"))
612 (const_int 0)))]
613 "TARGET_32BIT"
614 "@
615 cmn%?\\t%0, %1
616 cmp%?\\t%0, #%n1"
617 [(set_attr "conds" "set")]
618 )
619
620 (define_insn "*compare_negsi_si"
621 [(set (reg:CC_Z CC_REGNUM)
622 (compare:CC_Z
623 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
624 (match_operand:SI 1 "s_register_operand" "r")))]
625 "TARGET_32BIT"
626 "cmn%?\\t%1, %0"
627 [(set_attr "conds" "set")]
628 )
629
630 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
631 ;; addend is a constant.
632 (define_insn "*cmpsi2_addneg"
633 [(set (reg:CC CC_REGNUM)
634 (compare:CC
635 (match_operand:SI 1 "s_register_operand" "r,r")
636 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
637 (set (match_operand:SI 0 "s_register_operand" "=r,r")
638 (plus:SI (match_dup 1)
639 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
640 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
641 "@
642 sub%.\\t%0, %1, %2
643 add%.\\t%0, %1, #%n2"
644 [(set_attr "conds" "set")]
645 )
646
647 ;; Convert the sequence
648 ;; sub rd, rn, #1
649 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
650 ;; bne dest
651 ;; into
652 ;; subs rd, rn, #1
653 ;; bcs dest ((unsigned)rn >= 1)
654 ;; similarly for the beq variant using bcc.
655 ;; This is a common looping idiom (while (n--))
656 (define_peephole2
657 [(set (match_operand:SI 0 "arm_general_register_operand" "")
658 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
659 (const_int -1)))
660 (set (match_operand 2 "cc_register" "")
661 (compare (match_dup 0) (const_int -1)))
662 (set (pc)
663 (if_then_else (match_operator 3 "equality_operator"
664 [(match_dup 2) (const_int 0)])
665 (match_operand 4 "" "")
666 (match_operand 5 "" "")))]
667 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
668 [(parallel[
669 (set (match_dup 2)
670 (compare:CC
671 (match_dup 1) (const_int 1)))
672 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
673 (set (pc)
674 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
675 (match_dup 4)
676 (match_dup 5)))]
677 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
678 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
679 ? GEU : LTU),
680 VOIDmode,
681 operands[2], const0_rtx);"
682 )
683
684 ;; The next four insns work because they compare the result with one of
685 ;; the operands, and we know that the use of the condition code is
686 ;; either GEU or LTU, so we can use the carry flag from the addition
687 ;; instead of doing the compare a second time.
688 (define_insn "*addsi3_compare_op1"
689 [(set (reg:CC_C CC_REGNUM)
690 (compare:CC_C
691 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
692 (match_operand:SI 2 "arm_add_operand" "rI,L"))
693 (match_dup 1)))
694 (set (match_operand:SI 0 "s_register_operand" "=r,r")
695 (plus:SI (match_dup 1) (match_dup 2)))]
696 "TARGET_32BIT"
697 "@
698 add%.\\t%0, %1, %2
699 sub%.\\t%0, %1, #%n2"
700 [(set_attr "conds" "set")]
701 )
702
703 (define_insn "*addsi3_compare_op2"
704 [(set (reg:CC_C CC_REGNUM)
705 (compare:CC_C
706 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_add_operand" "rI,L"))
708 (match_dup 2)))
709 (set (match_operand:SI 0 "s_register_operand" "=r,r")
710 (plus:SI (match_dup 1) (match_dup 2)))]
711 "TARGET_32BIT"
712 "@
713 add%.\\t%0, %1, %2
714 sub%.\\t%0, %1, #%n2"
715 [(set_attr "conds" "set")]
716 )
717
718 (define_insn "*compare_addsi2_op0"
719 [(set (reg:CC_C CC_REGNUM)
720 (compare:CC_C
721 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
722 (match_operand:SI 1 "arm_add_operand" "rI,L"))
723 (match_dup 0)))]
724 "TARGET_32BIT"
725 "@
726 cmn%?\\t%0, %1
727 cmp%?\\t%0, #%n1"
728 [(set_attr "conds" "set")]
729 )
730
731 (define_insn "*compare_addsi2_op1"
732 [(set (reg:CC_C CC_REGNUM)
733 (compare:CC_C
734 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
735 (match_operand:SI 1 "arm_add_operand" "rI,L"))
736 (match_dup 1)))]
737 "TARGET_32BIT"
738 "@
739 cmn%?\\t%0, %1
740 cmp%?\\t%0, #%n1"
741 [(set_attr "conds" "set")]
742 )
743
744 (define_insn "*addsi3_carryin"
745 [(set (match_operand:SI 0 "s_register_operand" "=r")
746 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
747 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
748 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
749 "TARGET_32BIT"
750 "adc%?\\t%0, %1, %2"
751 [(set_attr "conds" "use")]
752 )
753
754 (define_insn "*addsi3_carryin_shift"
755 [(set (match_operand:SI 0 "s_register_operand" "=r")
756 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
757 (plus:SI
758 (match_operator:SI 2 "shift_operator"
759 [(match_operand:SI 3 "s_register_operand" "r")
760 (match_operand:SI 4 "reg_or_int_operand" "rM")])
761 (match_operand:SI 1 "s_register_operand" "r"))))]
762 "TARGET_32BIT"
763 "adc%?\\t%0, %1, %3%S2"
764 [(set_attr "conds" "use")
765 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
766 (const_string "alu_shift")
767 (const_string "alu_shift_reg")))]
768 )
769
770 (define_insn "*addsi3_carryin_alt1"
771 [(set (match_operand:SI 0 "s_register_operand" "=r")
772 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
773 (match_operand:SI 2 "arm_rhs_operand" "rI"))
774 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
775 "TARGET_32BIT"
776 "adc%?\\t%0, %1, %2"
777 [(set_attr "conds" "use")]
778 )
779
780 (define_insn "*addsi3_carryin_alt2"
781 [(set (match_operand:SI 0 "s_register_operand" "=r")
782 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
783 (match_operand:SI 1 "s_register_operand" "r"))
784 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
785 "TARGET_32BIT"
786 "adc%?\\t%0, %1, %2"
787 [(set_attr "conds" "use")]
788 )
789
790 (define_insn "*addsi3_carryin_alt3"
791 [(set (match_operand:SI 0 "s_register_operand" "=r")
792 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
793 (match_operand:SI 2 "arm_rhs_operand" "rI"))
794 (match_operand:SI 1 "s_register_operand" "r")))]
795 "TARGET_32BIT"
796 "adc%?\\t%0, %1, %2"
797 [(set_attr "conds" "use")]
798 )
799
800 (define_expand "incscc"
801 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
803 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
804 (match_operand:SI 1 "s_register_operand" "0,?r")))]
805 "TARGET_32BIT"
806 ""
807 )
808
809 (define_insn "*arm_incscc"
810 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
811 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
812 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
813 (match_operand:SI 1 "s_register_operand" "0,?r")))]
814 "TARGET_ARM"
815 "@
816 add%d2\\t%0, %1, #1
817 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
818 [(set_attr "conds" "use")
819 (set_attr "length" "4,8")]
820 )
821
822 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
823 (define_split
824 [(set (match_operand:SI 0 "s_register_operand" "")
825 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
826 (match_operand:SI 2 "s_register_operand" ""))
827 (const_int -1)))
828 (clobber (match_operand:SI 3 "s_register_operand" ""))]
829 "TARGET_32BIT"
830 [(set (match_dup 3) (match_dup 1))
831 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
832 "
833 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
834 ")
835
836 (define_expand "addsf3"
837 [(set (match_operand:SF 0 "s_register_operand" "")
838 (plus:SF (match_operand:SF 1 "s_register_operand" "")
839 (match_operand:SF 2 "arm_float_add_operand" "")))]
840 "TARGET_32BIT && TARGET_HARD_FLOAT"
841 "
842 if (TARGET_MAVERICK
843 && !cirrus_fp_register (operands[2], SFmode))
844 operands[2] = force_reg (SFmode, operands[2]);
845 ")
846
847 (define_expand "adddf3"
848 [(set (match_operand:DF 0 "s_register_operand" "")
849 (plus:DF (match_operand:DF 1 "s_register_operand" "")
850 (match_operand:DF 2 "arm_float_add_operand" "")))]
851 "TARGET_32BIT && TARGET_HARD_FLOAT"
852 "
853 if (TARGET_MAVERICK
854 && !cirrus_fp_register (operands[2], DFmode))
855 operands[2] = force_reg (DFmode, operands[2]);
856 ")
857
858 (define_expand "subdi3"
859 [(parallel
860 [(set (match_operand:DI 0 "s_register_operand" "")
861 (minus:DI (match_operand:DI 1 "s_register_operand" "")
862 (match_operand:DI 2 "s_register_operand" "")))
863 (clobber (reg:CC CC_REGNUM))])]
864 "TARGET_EITHER"
865 "
866 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
867 && TARGET_32BIT
868 && cirrus_fp_register (operands[0], DImode)
869 && cirrus_fp_register (operands[1], DImode))
870 {
871 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
872 DONE;
873 }
874
875 if (TARGET_THUMB1)
876 {
877 if (GET_CODE (operands[1]) != REG)
878 operands[1] = force_reg (SImode, operands[1]);
879 if (GET_CODE (operands[2]) != REG)
880 operands[2] = force_reg (SImode, operands[2]);
881 }
882 "
883 )
884
885 (define_insn "*arm_subdi3"
886 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
887 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
888 (match_operand:DI 2 "s_register_operand" "r,0,0")))
889 (clobber (reg:CC CC_REGNUM))]
890 "TARGET_32BIT"
891 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
892 [(set_attr "conds" "clob")
893 (set_attr "length" "8")]
894 )
895
896 (define_insn "*thumb_subdi3"
897 [(set (match_operand:DI 0 "register_operand" "=l")
898 (minus:DI (match_operand:DI 1 "register_operand" "0")
899 (match_operand:DI 2 "register_operand" "l")))
900 (clobber (reg:CC CC_REGNUM))]
901 "TARGET_THUMB1"
902 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
903 [(set_attr "length" "4")]
904 )
905
906 (define_insn "*subdi_di_zesidi"
907 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
908 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
909 (zero_extend:DI
910 (match_operand:SI 2 "s_register_operand" "r,r"))))
911 (clobber (reg:CC CC_REGNUM))]
912 "TARGET_32BIT"
913 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
914 [(set_attr "conds" "clob")
915 (set_attr "length" "8")]
916 )
917
918 (define_insn "*subdi_di_sesidi"
919 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
920 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
921 (sign_extend:DI
922 (match_operand:SI 2 "s_register_operand" "r,r"))))
923 (clobber (reg:CC CC_REGNUM))]
924 "TARGET_32BIT"
925 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
926 [(set_attr "conds" "clob")
927 (set_attr "length" "8")]
928 )
929
930 (define_insn "*subdi_zesidi_di"
931 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
932 (minus:DI (zero_extend:DI
933 (match_operand:SI 2 "s_register_operand" "r,r"))
934 (match_operand:DI 1 "s_register_operand" "?r,0")))
935 (clobber (reg:CC CC_REGNUM))]
936 "TARGET_ARM"
937 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
938 [(set_attr "conds" "clob")
939 (set_attr "length" "8")]
940 )
941
942 (define_insn "*subdi_sesidi_di"
943 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
944 (minus:DI (sign_extend:DI
945 (match_operand:SI 2 "s_register_operand" "r,r"))
946 (match_operand:DI 1 "s_register_operand" "?r,0")))
947 (clobber (reg:CC CC_REGNUM))]
948 "TARGET_ARM"
949 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
950 [(set_attr "conds" "clob")
951 (set_attr "length" "8")]
952 )
953
954 (define_insn "*subdi_zesidi_zesidi"
955 [(set (match_operand:DI 0 "s_register_operand" "=r")
956 (minus:DI (zero_extend:DI
957 (match_operand:SI 1 "s_register_operand" "r"))
958 (zero_extend:DI
959 (match_operand:SI 2 "s_register_operand" "r"))))
960 (clobber (reg:CC CC_REGNUM))]
961 "TARGET_32BIT"
962 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
963 [(set_attr "conds" "clob")
964 (set_attr "length" "8")]
965 )
966
967 (define_expand "subsi3"
968 [(set (match_operand:SI 0 "s_register_operand" "")
969 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
970 (match_operand:SI 2 "s_register_operand" "")))]
971 "TARGET_EITHER"
972 "
973 if (GET_CODE (operands[1]) == CONST_INT)
974 {
975 if (TARGET_32BIT)
976 {
977 arm_split_constant (MINUS, SImode, NULL_RTX,
978 INTVAL (operands[1]), operands[0],
979 operands[2], optimize && can_create_pseudo_p ());
980 DONE;
981 }
982 else /* TARGET_THUMB1 */
983 operands[1] = force_reg (SImode, operands[1]);
984 }
985 "
986 )
987
988 (define_insn "*thumb1_subsi3_insn"
989 [(set (match_operand:SI 0 "register_operand" "=l")
990 (minus:SI (match_operand:SI 1 "register_operand" "l")
991 (match_operand:SI 2 "register_operand" "l")))]
992 "TARGET_THUMB1"
993 "sub\\t%0, %1, %2"
994 [(set_attr "length" "2")]
995 )
996
997 ; ??? Check Thumb-2 split length
998 (define_insn_and_split "*arm_subsi3_insn"
999 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1000 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1001 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1002 "TARGET_32BIT"
1003 "@
1004 rsb%?\\t%0, %2, %1
1005 sub%?\\t%0, %1, %2
1006 #"
1007 "TARGET_32BIT
1008 && GET_CODE (operands[1]) == CONST_INT
1009 && !const_ok_for_arm (INTVAL (operands[1]))"
1010 [(clobber (const_int 0))]
1011 "
1012 arm_split_constant (MINUS, SImode, curr_insn,
1013 INTVAL (operands[1]), operands[0], operands[2], 0);
1014 DONE;
1015 "
1016 [(set_attr "length" "4,4,16")
1017 (set_attr "predicable" "yes")]
1018 )
1019
1020 (define_peephole2
1021 [(match_scratch:SI 3 "r")
1022 (set (match_operand:SI 0 "arm_general_register_operand" "")
1023 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1024 (match_operand:SI 2 "arm_general_register_operand" "")))]
1025 "TARGET_32BIT
1026 && !const_ok_for_arm (INTVAL (operands[1]))
1027 && const_ok_for_arm (~INTVAL (operands[1]))"
1028 [(set (match_dup 3) (match_dup 1))
1029 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1030 ""
1031 )
1032
1033 (define_insn "*subsi3_compare0"
1034 [(set (reg:CC_NOOV CC_REGNUM)
1035 (compare:CC_NOOV
1036 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1037 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1038 (const_int 0)))
1039 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1040 (minus:SI (match_dup 1) (match_dup 2)))]
1041 "TARGET_32BIT"
1042 "@
1043 sub%.\\t%0, %1, %2
1044 rsb%.\\t%0, %2, %1"
1045 [(set_attr "conds" "set")]
1046 )
1047
1048 (define_expand "decscc"
1049 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1050 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1051 (match_operator:SI 2 "arm_comparison_operator"
1052 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1053 "TARGET_32BIT"
1054 ""
1055 )
1056
1057 (define_insn "*arm_decscc"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1059 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1060 (match_operator:SI 2 "arm_comparison_operator"
1061 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1062 "TARGET_ARM"
1063 "@
1064 sub%d2\\t%0, %1, #1
1065 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1066 [(set_attr "conds" "use")
1067 (set_attr "length" "*,8")]
1068 )
1069
1070 (define_expand "subsf3"
1071 [(set (match_operand:SF 0 "s_register_operand" "")
1072 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1073 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1074 "TARGET_32BIT && TARGET_HARD_FLOAT"
1075 "
1076 if (TARGET_MAVERICK)
1077 {
1078 if (!cirrus_fp_register (operands[1], SFmode))
1079 operands[1] = force_reg (SFmode, operands[1]);
1080 if (!cirrus_fp_register (operands[2], SFmode))
1081 operands[2] = force_reg (SFmode, operands[2]);
1082 }
1083 ")
1084
1085 (define_expand "subdf3"
1086 [(set (match_operand:DF 0 "s_register_operand" "")
1087 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1088 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1089 "TARGET_32BIT && TARGET_HARD_FLOAT"
1090 "
1091 if (TARGET_MAVERICK)
1092 {
1093 if (!cirrus_fp_register (operands[1], DFmode))
1094 operands[1] = force_reg (DFmode, operands[1]);
1095 if (!cirrus_fp_register (operands[2], DFmode))
1096 operands[2] = force_reg (DFmode, operands[2]);
1097 }
1098 ")
1099
1100 \f
1101 ;; Multiplication insns
1102
1103 (define_expand "mulsi3"
1104 [(set (match_operand:SI 0 "s_register_operand" "")
1105 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1106 (match_operand:SI 1 "s_register_operand" "")))]
1107 "TARGET_EITHER"
1108 ""
1109 )
1110
1111 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1112 (define_insn "*arm_mulsi3"
1113 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1114 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1115 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1116 "TARGET_32BIT && !arm_arch6"
1117 "mul%?\\t%0, %2, %1"
1118 [(set_attr "insn" "mul")
1119 (set_attr "predicable" "yes")]
1120 )
1121
1122 (define_insn "*arm_mulsi3_v6"
1123 [(set (match_operand:SI 0 "s_register_operand" "=r")
1124 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1125 (match_operand:SI 2 "s_register_operand" "r")))]
1126 "TARGET_32BIT && arm_arch6"
1127 "mul%?\\t%0, %1, %2"
1128 [(set_attr "insn" "mul")
1129 (set_attr "predicable" "yes")]
1130 )
1131
1132 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1133 ; 1 and 2; are the same, because reload will make operand 0 match
1134 ; operand 1 without realizing that this conflicts with operand 2. We fix
1135 ; this by adding another alternative to match this case, and then `reload'
1136 ; it ourselves. This alternative must come first.
1137 (define_insn "*thumb_mulsi3"
1138 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1139 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1140 (match_operand:SI 2 "register_operand" "l,l,l")))]
1141 "TARGET_THUMB1 && !arm_arch6"
1142 "*
1143 if (which_alternative < 2)
1144 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1145 else
1146 return \"mul\\t%0, %2\";
1147 "
1148 [(set_attr "length" "4,4,2")
1149 (set_attr "insn" "mul")]
1150 )
1151
1152 (define_insn "*thumb_mulsi3_v6"
1153 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1154 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1155 (match_operand:SI 2 "register_operand" "l,0,0")))]
1156 "TARGET_THUMB1 && arm_arch6"
1157 "@
1158 mul\\t%0, %2
1159 mul\\t%0, %1
1160 mul\\t%0, %1"
1161 [(set_attr "length" "2")
1162 (set_attr "insn" "mul")]
1163 )
1164
1165 (define_insn "*mulsi3_compare0"
1166 [(set (reg:CC_NOOV CC_REGNUM)
1167 (compare:CC_NOOV (mult:SI
1168 (match_operand:SI 2 "s_register_operand" "r,r")
1169 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1170 (const_int 0)))
1171 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1172 (mult:SI (match_dup 2) (match_dup 1)))]
1173 "TARGET_ARM && !arm_arch6"
1174 "mul%.\\t%0, %2, %1"
1175 [(set_attr "conds" "set")
1176 (set_attr "insn" "muls")]
1177 )
1178
1179 (define_insn "*mulsi3_compare0_v6"
1180 [(set (reg:CC_NOOV CC_REGNUM)
1181 (compare:CC_NOOV (mult:SI
1182 (match_operand:SI 2 "s_register_operand" "r")
1183 (match_operand:SI 1 "s_register_operand" "r"))
1184 (const_int 0)))
1185 (set (match_operand:SI 0 "s_register_operand" "=r")
1186 (mult:SI (match_dup 2) (match_dup 1)))]
1187 "TARGET_ARM && arm_arch6 && optimize_size"
1188 "mul%.\\t%0, %2, %1"
1189 [(set_attr "conds" "set")
1190 (set_attr "insn" "muls")]
1191 )
1192
1193 (define_insn "*mulsi_compare0_scratch"
1194 [(set (reg:CC_NOOV CC_REGNUM)
1195 (compare:CC_NOOV (mult:SI
1196 (match_operand:SI 2 "s_register_operand" "r,r")
1197 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1198 (const_int 0)))
1199 (clobber (match_scratch:SI 0 "=&r,&r"))]
1200 "TARGET_ARM && !arm_arch6"
1201 "mul%.\\t%0, %2, %1"
1202 [(set_attr "conds" "set")
1203 (set_attr "insn" "muls")]
1204 )
1205
1206 (define_insn "*mulsi_compare0_scratch_v6"
1207 [(set (reg:CC_NOOV CC_REGNUM)
1208 (compare:CC_NOOV (mult:SI
1209 (match_operand:SI 2 "s_register_operand" "r")
1210 (match_operand:SI 1 "s_register_operand" "r"))
1211 (const_int 0)))
1212 (clobber (match_scratch:SI 0 "=r"))]
1213 "TARGET_ARM && arm_arch6 && optimize_size"
1214 "mul%.\\t%0, %2, %1"
1215 [(set_attr "conds" "set")
1216 (set_attr "insn" "muls")]
1217 )
1218
1219 ;; Unnamed templates to match MLA instruction.
1220
1221 (define_insn "*mulsi3addsi"
1222 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1223 (plus:SI
1224 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1225 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1226 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1227 "TARGET_32BIT && !arm_arch6"
1228 "mla%?\\t%0, %2, %1, %3"
1229 [(set_attr "insn" "mla")
1230 (set_attr "predicable" "yes")]
1231 )
1232
1233 (define_insn "*mulsi3addsi_v6"
1234 [(set (match_operand:SI 0 "s_register_operand" "=r")
1235 (plus:SI
1236 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1237 (match_operand:SI 1 "s_register_operand" "r"))
1238 (match_operand:SI 3 "s_register_operand" "r")))]
1239 "TARGET_32BIT && arm_arch6"
1240 "mla%?\\t%0, %2, %1, %3"
1241 [(set_attr "insn" "mla")
1242 (set_attr "predicable" "yes")]
1243 )
1244
1245 (define_insn "*mulsi3addsi_compare0"
1246 [(set (reg:CC_NOOV CC_REGNUM)
1247 (compare:CC_NOOV
1248 (plus:SI (mult:SI
1249 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1250 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1251 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1252 (const_int 0)))
1253 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1254 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1255 (match_dup 3)))]
1256 "TARGET_ARM && arm_arch6"
1257 "mla%.\\t%0, %2, %1, %3"
1258 [(set_attr "conds" "set")
1259 (set_attr "insn" "mlas")]
1260 )
1261
1262 (define_insn "*mulsi3addsi_compare0_v6"
1263 [(set (reg:CC_NOOV CC_REGNUM)
1264 (compare:CC_NOOV
1265 (plus:SI (mult:SI
1266 (match_operand:SI 2 "s_register_operand" "r")
1267 (match_operand:SI 1 "s_register_operand" "r"))
1268 (match_operand:SI 3 "s_register_operand" "r"))
1269 (const_int 0)))
1270 (set (match_operand:SI 0 "s_register_operand" "=r")
1271 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1272 (match_dup 3)))]
1273 "TARGET_ARM && arm_arch6 && optimize_size"
1274 "mla%.\\t%0, %2, %1, %3"
1275 [(set_attr "conds" "set")
1276 (set_attr "insn" "mlas")]
1277 )
1278
1279 (define_insn "*mulsi3addsi_compare0_scratch"
1280 [(set (reg:CC_NOOV CC_REGNUM)
1281 (compare:CC_NOOV
1282 (plus:SI (mult:SI
1283 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1284 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1285 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1286 (const_int 0)))
1287 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1288 "TARGET_ARM && !arm_arch6"
1289 "mla%.\\t%0, %2, %1, %3"
1290 [(set_attr "conds" "set")
1291 (set_attr "insn" "mlas")]
1292 )
1293
1294 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1295 [(set (reg:CC_NOOV CC_REGNUM)
1296 (compare:CC_NOOV
1297 (plus:SI (mult:SI
1298 (match_operand:SI 2 "s_register_operand" "r")
1299 (match_operand:SI 1 "s_register_operand" "r"))
1300 (match_operand:SI 3 "s_register_operand" "r"))
1301 (const_int 0)))
1302 (clobber (match_scratch:SI 0 "=r"))]
1303 "TARGET_ARM && arm_arch6 && optimize_size"
1304 "mla%.\\t%0, %2, %1, %3"
1305 [(set_attr "conds" "set")
1306 (set_attr "insn" "mlas")]
1307 )
1308
1309 (define_insn "*mulsi3subsi"
1310 [(set (match_operand:SI 0 "s_register_operand" "=r")
1311 (minus:SI
1312 (match_operand:SI 3 "s_register_operand" "r")
1313 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1314 (match_operand:SI 1 "s_register_operand" "r"))))]
1315 "TARGET_32BIT && arm_arch_thumb2"
1316 "mls%?\\t%0, %2, %1, %3"
1317 [(set_attr "insn" "mla")
1318 (set_attr "predicable" "yes")]
1319 )
1320
1321 ;; Unnamed template to match long long multiply-accumulate (smlal)
1322
1323 (define_insn "*mulsidi3adddi"
1324 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1325 (plus:DI
1326 (mult:DI
1327 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1328 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1329 (match_operand:DI 1 "s_register_operand" "0")))]
1330 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1331 "smlal%?\\t%Q0, %R0, %3, %2"
1332 [(set_attr "insn" "smlal")
1333 (set_attr "predicable" "yes")]
1334 )
1335
1336 (define_insn "*mulsidi3adddi_v6"
1337 [(set (match_operand:DI 0 "s_register_operand" "=r")
1338 (plus:DI
1339 (mult:DI
1340 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1341 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1342 (match_operand:DI 1 "s_register_operand" "0")))]
1343 "TARGET_32BIT && arm_arch6"
1344 "smlal%?\\t%Q0, %R0, %3, %2"
1345 [(set_attr "insn" "smlal")
1346 (set_attr "predicable" "yes")]
1347 )
1348
1349 ;; 32x32->64 widening multiply.
1350 ;; As with mulsi3, the only difference between the v3-5 and v6+
1351 ;; versions of these patterns is the requirement that the output not
1352 ;; overlap the inputs, but that still means we have to have a named
1353 ;; expander and two different starred insns.
1354
1355 (define_expand "mulsidi3"
1356 [(set (match_operand:DI 0 "s_register_operand" "")
1357 (mult:DI
1358 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1359 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1360 "TARGET_32BIT && arm_arch3m"
1361 ""
1362 )
1363
1364 (define_insn "*mulsidi3_nov6"
1365 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1366 (mult:DI
1367 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1368 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1369 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1370 "smull%?\\t%Q0, %R0, %1, %2"
1371 [(set_attr "insn" "smull")
1372 (set_attr "predicable" "yes")]
1373 )
1374
1375 (define_insn "*mulsidi3_v6"
1376 [(set (match_operand:DI 0 "s_register_operand" "=r")
1377 (mult:DI
1378 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1379 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1380 "TARGET_32BIT && arm_arch6"
1381 "smull%?\\t%Q0, %R0, %1, %2"
1382 [(set_attr "insn" "smull")
1383 (set_attr "predicable" "yes")]
1384 )
1385
1386 (define_expand "umulsidi3"
1387 [(set (match_operand:DI 0 "s_register_operand" "")
1388 (mult:DI
1389 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1390 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1391 "TARGET_32BIT && arm_arch3m"
1392 ""
1393 )
1394
1395 (define_insn "*umulsidi3_nov6"
1396 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1397 (mult:DI
1398 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1399 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1400 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1401 "umull%?\\t%Q0, %R0, %1, %2"
1402 [(set_attr "insn" "umull")
1403 (set_attr "predicable" "yes")]
1404 )
1405
1406 (define_insn "*umulsidi3_v6"
1407 [(set (match_operand:DI 0 "s_register_operand" "=r")
1408 (mult:DI
1409 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1410 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1411 "TARGET_32BIT && arm_arch6"
1412 "umull%?\\t%Q0, %R0, %1, %2"
1413 [(set_attr "insn" "umull")
1414 (set_attr "predicable" "yes")]
1415 )
1416
1417 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1418
1419 (define_insn "*umulsidi3adddi"
1420 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1421 (plus:DI
1422 (mult:DI
1423 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1424 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1425 (match_operand:DI 1 "s_register_operand" "0")))]
1426 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1427 "umlal%?\\t%Q0, %R0, %3, %2"
1428 [(set_attr "insn" "umlal")
1429 (set_attr "predicable" "yes")]
1430 )
1431
1432 (define_insn "*umulsidi3adddi_v6"
1433 [(set (match_operand:DI 0 "s_register_operand" "=r")
1434 (plus:DI
1435 (mult:DI
1436 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1437 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1438 (match_operand:DI 1 "s_register_operand" "0")))]
1439 "TARGET_32BIT && arm_arch6"
1440 "umlal%?\\t%Q0, %R0, %3, %2"
1441 [(set_attr "insn" "umlal")
1442 (set_attr "predicable" "yes")]
1443 )
1444
1445 (define_expand "smulsi3_highpart"
1446 [(parallel
1447 [(set (match_operand:SI 0 "s_register_operand" "")
1448 (truncate:SI
1449 (lshiftrt:DI
1450 (mult:DI
1451 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1452 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1453 (const_int 32))))
1454 (clobber (match_scratch:SI 3 ""))])]
1455 "TARGET_32BIT && arm_arch3m"
1456 ""
1457 )
1458
1459 (define_insn "*smulsi3_highpart_nov6"
1460 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1461 (truncate:SI
1462 (lshiftrt:DI
1463 (mult:DI
1464 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1465 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1466 (const_int 32))))
1467 (clobber (match_scratch:SI 3 "=&r,&r"))]
1468 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1469 "smull%?\\t%3, %0, %2, %1"
1470 [(set_attr "insn" "smull")
1471 (set_attr "predicable" "yes")]
1472 )
1473
1474 (define_insn "*smulsi3_highpart_v6"
1475 [(set (match_operand:SI 0 "s_register_operand" "=r")
1476 (truncate:SI
1477 (lshiftrt:DI
1478 (mult:DI
1479 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1480 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1481 (const_int 32))))
1482 (clobber (match_scratch:SI 3 "=r"))]
1483 "TARGET_32BIT && arm_arch6"
1484 "smull%?\\t%3, %0, %2, %1"
1485 [(set_attr "insn" "smull")
1486 (set_attr "predicable" "yes")]
1487 )
1488
1489 (define_expand "umulsi3_highpart"
1490 [(parallel
1491 [(set (match_operand:SI 0 "s_register_operand" "")
1492 (truncate:SI
1493 (lshiftrt:DI
1494 (mult:DI
1495 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1496 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1497 (const_int 32))))
1498 (clobber (match_scratch:SI 3 ""))])]
1499 "TARGET_32BIT && arm_arch3m"
1500 ""
1501 )
1502
1503 (define_insn "*umulsi3_highpart_nov6"
1504 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1505 (truncate:SI
1506 (lshiftrt:DI
1507 (mult:DI
1508 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1509 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1510 (const_int 32))))
1511 (clobber (match_scratch:SI 3 "=&r,&r"))]
1512 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1513 "umull%?\\t%3, %0, %2, %1"
1514 [(set_attr "insn" "umull")
1515 (set_attr "predicable" "yes")]
1516 )
1517
1518 (define_insn "*umulsi3_highpart_v6"
1519 [(set (match_operand:SI 0 "s_register_operand" "=r")
1520 (truncate:SI
1521 (lshiftrt:DI
1522 (mult:DI
1523 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1524 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1525 (const_int 32))))
1526 (clobber (match_scratch:SI 3 "=r"))]
1527 "TARGET_32BIT && arm_arch6"
1528 "umull%?\\t%3, %0, %2, %1"
1529 [(set_attr "insn" "umull")
1530 (set_attr "predicable" "yes")]
1531 )
1532
1533 (define_insn "mulhisi3"
1534 [(set (match_operand:SI 0 "s_register_operand" "=r")
1535 (mult:SI (sign_extend:SI
1536 (match_operand:HI 1 "s_register_operand" "%r"))
1537 (sign_extend:SI
1538 (match_operand:HI 2 "s_register_operand" "r"))))]
1539 "TARGET_DSP_MULTIPLY"
1540 "smulbb%?\\t%0, %1, %2"
1541 [(set_attr "insn" "smulxy")
1542 (set_attr "predicable" "yes")]
1543 )
1544
1545 (define_insn "*mulhisi3tb"
1546 [(set (match_operand:SI 0 "s_register_operand" "=r")
1547 (mult:SI (ashiftrt:SI
1548 (match_operand:SI 1 "s_register_operand" "r")
1549 (const_int 16))
1550 (sign_extend:SI
1551 (match_operand:HI 2 "s_register_operand" "r"))))]
1552 "TARGET_DSP_MULTIPLY"
1553 "smultb%?\\t%0, %1, %2"
1554 [(set_attr "insn" "smulxy")
1555 (set_attr "predicable" "yes")]
1556 )
1557
1558 (define_insn "*mulhisi3bt"
1559 [(set (match_operand:SI 0 "s_register_operand" "=r")
1560 (mult:SI (sign_extend:SI
1561 (match_operand:HI 1 "s_register_operand" "r"))
1562 (ashiftrt:SI
1563 (match_operand:SI 2 "s_register_operand" "r")
1564 (const_int 16))))]
1565 "TARGET_DSP_MULTIPLY"
1566 "smulbt%?\\t%0, %1, %2"
1567 [(set_attr "insn" "smulxy")
1568 (set_attr "predicable" "yes")]
1569 )
1570
1571 (define_insn "*mulhisi3tt"
1572 [(set (match_operand:SI 0 "s_register_operand" "=r")
1573 (mult:SI (ashiftrt:SI
1574 (match_operand:SI 1 "s_register_operand" "r")
1575 (const_int 16))
1576 (ashiftrt:SI
1577 (match_operand:SI 2 "s_register_operand" "r")
1578 (const_int 16))))]
1579 "TARGET_DSP_MULTIPLY"
1580 "smultt%?\\t%0, %1, %2"
1581 [(set_attr "insn" "smulxy")
1582 (set_attr "predicable" "yes")]
1583 )
1584
1585 (define_insn "*mulhisi3addsi"
1586 [(set (match_operand:SI 0 "s_register_operand" "=r")
1587 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1588 (mult:SI (sign_extend:SI
1589 (match_operand:HI 2 "s_register_operand" "%r"))
1590 (sign_extend:SI
1591 (match_operand:HI 3 "s_register_operand" "r")))))]
1592 "TARGET_DSP_MULTIPLY"
1593 "smlabb%?\\t%0, %2, %3, %1"
1594 [(set_attr "insn" "smlaxy")
1595 (set_attr "predicable" "yes")]
1596 )
1597
1598 (define_insn "*mulhidi3adddi"
1599 [(set (match_operand:DI 0 "s_register_operand" "=r")
1600 (plus:DI
1601 (match_operand:DI 1 "s_register_operand" "0")
1602 (mult:DI (sign_extend:DI
1603 (match_operand:HI 2 "s_register_operand" "%r"))
1604 (sign_extend:DI
1605 (match_operand:HI 3 "s_register_operand" "r")))))]
1606 "TARGET_DSP_MULTIPLY"
1607 "smlalbb%?\\t%Q0, %R0, %2, %3"
1608 [(set_attr "insn" "smlalxy")
1609 (set_attr "predicable" "yes")])
1610
1611 (define_expand "mulsf3"
1612 [(set (match_operand:SF 0 "s_register_operand" "")
1613 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1614 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1615 "TARGET_32BIT && TARGET_HARD_FLOAT"
1616 "
1617 if (TARGET_MAVERICK
1618 && !cirrus_fp_register (operands[2], SFmode))
1619 operands[2] = force_reg (SFmode, operands[2]);
1620 ")
1621
1622 (define_expand "muldf3"
1623 [(set (match_operand:DF 0 "s_register_operand" "")
1624 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1625 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1626 "TARGET_32BIT && TARGET_HARD_FLOAT"
1627 "
1628 if (TARGET_MAVERICK
1629 && !cirrus_fp_register (operands[2], DFmode))
1630 operands[2] = force_reg (DFmode, operands[2]);
1631 ")
1632 \f
1633 ;; Division insns
1634
1635 (define_expand "divsf3"
1636 [(set (match_operand:SF 0 "s_register_operand" "")
1637 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1638 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1639 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1640 "")
1641
1642 (define_expand "divdf3"
1643 [(set (match_operand:DF 0 "s_register_operand" "")
1644 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1645 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1646 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1647 "")
1648 \f
1649 ;; Modulo insns
1650
1651 (define_expand "modsf3"
1652 [(set (match_operand:SF 0 "s_register_operand" "")
1653 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1654 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1655 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1656 "")
1657
1658 (define_expand "moddf3"
1659 [(set (match_operand:DF 0 "s_register_operand" "")
1660 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1661 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1662 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1663 "")
1664 \f
1665 ;; Boolean and,ior,xor insns
1666
1667 ;; Split up double word logical operations
1668
1669 ;; Split up simple DImode logical operations. Simply perform the logical
1670 ;; operation on the upper and lower halves of the registers.
1671 (define_split
1672 [(set (match_operand:DI 0 "s_register_operand" "")
1673 (match_operator:DI 6 "logical_binary_operator"
1674 [(match_operand:DI 1 "s_register_operand" "")
1675 (match_operand:DI 2 "s_register_operand" "")]))]
1676 "TARGET_32BIT && reload_completed
1677 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1678 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1679 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1680 "
1681 {
1682 operands[3] = gen_highpart (SImode, operands[0]);
1683 operands[0] = gen_lowpart (SImode, operands[0]);
1684 operands[4] = gen_highpart (SImode, operands[1]);
1685 operands[1] = gen_lowpart (SImode, operands[1]);
1686 operands[5] = gen_highpart (SImode, operands[2]);
1687 operands[2] = gen_lowpart (SImode, operands[2]);
1688 }"
1689 )
1690
1691 (define_split
1692 [(set (match_operand:DI 0 "s_register_operand" "")
1693 (match_operator:DI 6 "logical_binary_operator"
1694 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1695 (match_operand:DI 1 "s_register_operand" "")]))]
1696 "TARGET_32BIT && reload_completed"
1697 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1698 (set (match_dup 3) (match_op_dup:SI 6
1699 [(ashiftrt:SI (match_dup 2) (const_int 31))
1700 (match_dup 4)]))]
1701 "
1702 {
1703 operands[3] = gen_highpart (SImode, operands[0]);
1704 operands[0] = gen_lowpart (SImode, operands[0]);
1705 operands[4] = gen_highpart (SImode, operands[1]);
1706 operands[1] = gen_lowpart (SImode, operands[1]);
1707 operands[5] = gen_highpart (SImode, operands[2]);
1708 operands[2] = gen_lowpart (SImode, operands[2]);
1709 }"
1710 )
1711
1712 ;; The zero extend of operand 2 means we can just copy the high part of
1713 ;; operand1 into operand0.
1714 (define_split
1715 [(set (match_operand:DI 0 "s_register_operand" "")
1716 (ior:DI
1717 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1718 (match_operand:DI 1 "s_register_operand" "")))]
1719 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1720 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1721 (set (match_dup 3) (match_dup 4))]
1722 "
1723 {
1724 operands[4] = gen_highpart (SImode, operands[1]);
1725 operands[3] = gen_highpart (SImode, operands[0]);
1726 operands[0] = gen_lowpart (SImode, operands[0]);
1727 operands[1] = gen_lowpart (SImode, operands[1]);
1728 }"
1729 )
1730
1731 ;; The zero extend of operand 2 means we can just copy the high part of
1732 ;; operand1 into operand0.
1733 (define_split
1734 [(set (match_operand:DI 0 "s_register_operand" "")
1735 (xor:DI
1736 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1737 (match_operand:DI 1 "s_register_operand" "")))]
1738 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1739 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1740 (set (match_dup 3) (match_dup 4))]
1741 "
1742 {
1743 operands[4] = gen_highpart (SImode, operands[1]);
1744 operands[3] = gen_highpart (SImode, operands[0]);
1745 operands[0] = gen_lowpart (SImode, operands[0]);
1746 operands[1] = gen_lowpart (SImode, operands[1]);
1747 }"
1748 )
1749
1750 (define_insn "anddi3"
1751 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1752 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1753 (match_operand:DI 2 "s_register_operand" "r,r")))]
1754 "TARGET_32BIT && ! TARGET_IWMMXT"
1755 "#"
1756 [(set_attr "length" "8")]
1757 )
1758
1759 (define_insn_and_split "*anddi_zesidi_di"
1760 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1761 (and:DI (zero_extend:DI
1762 (match_operand:SI 2 "s_register_operand" "r,r"))
1763 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1764 "TARGET_32BIT"
1765 "#"
1766 "TARGET_32BIT && reload_completed"
1767 ; The zero extend of operand 2 clears the high word of the output
1768 ; operand.
1769 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1770 (set (match_dup 3) (const_int 0))]
1771 "
1772 {
1773 operands[3] = gen_highpart (SImode, operands[0]);
1774 operands[0] = gen_lowpart (SImode, operands[0]);
1775 operands[1] = gen_lowpart (SImode, operands[1]);
1776 }"
1777 [(set_attr "length" "8")]
1778 )
1779
1780 (define_insn "*anddi_sesdi_di"
1781 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1782 (and:DI (sign_extend:DI
1783 (match_operand:SI 2 "s_register_operand" "r,r"))
1784 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1785 "TARGET_32BIT"
1786 "#"
1787 [(set_attr "length" "8")]
1788 )
1789
1790 (define_expand "andsi3"
1791 [(set (match_operand:SI 0 "s_register_operand" "")
1792 (and:SI (match_operand:SI 1 "s_register_operand" "")
1793 (match_operand:SI 2 "reg_or_int_operand" "")))]
1794 "TARGET_EITHER"
1795 "
1796 if (TARGET_32BIT)
1797 {
1798 if (GET_CODE (operands[2]) == CONST_INT)
1799 {
1800 arm_split_constant (AND, SImode, NULL_RTX,
1801 INTVAL (operands[2]), operands[0],
1802 operands[1], optimize && can_create_pseudo_p ());
1803
1804 DONE;
1805 }
1806 }
1807 else /* TARGET_THUMB1 */
1808 {
1809 if (GET_CODE (operands[2]) != CONST_INT)
1810 operands[2] = force_reg (SImode, operands[2]);
1811 else
1812 {
1813 int i;
1814
1815 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1816 {
1817 operands[2] = force_reg (SImode,
1818 GEN_INT (~INTVAL (operands[2])));
1819
1820 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1821
1822 DONE;
1823 }
1824
1825 for (i = 9; i <= 31; i++)
1826 {
1827 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1828 {
1829 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1830 const0_rtx));
1831 DONE;
1832 }
1833 else if ((((HOST_WIDE_INT) 1) << i) - 1
1834 == ~INTVAL (operands[2]))
1835 {
1836 rtx shift = GEN_INT (i);
1837 rtx reg = gen_reg_rtx (SImode);
1838
1839 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1840 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1841
1842 DONE;
1843 }
1844 }
1845
1846 operands[2] = force_reg (SImode, operands[2]);
1847 }
1848 }
1849 "
1850 )
1851
1852 ; ??? Check split length for Thumb-2
1853 (define_insn_and_split "*arm_andsi3_insn"
1854 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1855 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1856 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1857 "TARGET_32BIT"
1858 "@
1859 and%?\\t%0, %1, %2
1860 bic%?\\t%0, %1, #%B2
1861 #"
1862 "TARGET_32BIT
1863 && GET_CODE (operands[2]) == CONST_INT
1864 && !(const_ok_for_arm (INTVAL (operands[2]))
1865 || const_ok_for_arm (~INTVAL (operands[2])))"
1866 [(clobber (const_int 0))]
1867 "
1868 arm_split_constant (AND, SImode, curr_insn,
1869 INTVAL (operands[2]), operands[0], operands[1], 0);
1870 DONE;
1871 "
1872 [(set_attr "length" "4,4,16")
1873 (set_attr "predicable" "yes")]
1874 )
1875
1876 (define_insn "*thumb1_andsi3_insn"
1877 [(set (match_operand:SI 0 "register_operand" "=l")
1878 (and:SI (match_operand:SI 1 "register_operand" "%0")
1879 (match_operand:SI 2 "register_operand" "l")))]
1880 "TARGET_THUMB1"
1881 "and\\t%0, %0, %2"
1882 [(set_attr "length" "2")]
1883 )
1884
1885 (define_insn "*andsi3_compare0"
1886 [(set (reg:CC_NOOV CC_REGNUM)
1887 (compare:CC_NOOV
1888 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1889 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1890 (const_int 0)))
1891 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1892 (and:SI (match_dup 1) (match_dup 2)))]
1893 "TARGET_32BIT"
1894 "@
1895 and%.\\t%0, %1, %2
1896 bic%.\\t%0, %1, #%B2"
1897 [(set_attr "conds" "set")]
1898 )
1899
1900 (define_insn "*andsi3_compare0_scratch"
1901 [(set (reg:CC_NOOV CC_REGNUM)
1902 (compare:CC_NOOV
1903 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1904 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1905 (const_int 0)))
1906 (clobber (match_scratch:SI 2 "=X,r"))]
1907 "TARGET_32BIT"
1908 "@
1909 tst%?\\t%0, %1
1910 bic%.\\t%2, %0, #%B1"
1911 [(set_attr "conds" "set")]
1912 )
1913
1914 (define_insn "*zeroextractsi_compare0_scratch"
1915 [(set (reg:CC_NOOV CC_REGNUM)
1916 (compare:CC_NOOV (zero_extract:SI
1917 (match_operand:SI 0 "s_register_operand" "r")
1918 (match_operand 1 "const_int_operand" "n")
1919 (match_operand 2 "const_int_operand" "n"))
1920 (const_int 0)))]
1921 "TARGET_32BIT
1922 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1923 && INTVAL (operands[1]) > 0
1924 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1925 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1926 "*
1927 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1928 << INTVAL (operands[2]));
1929 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1930 return \"\";
1931 "
1932 [(set_attr "conds" "set")]
1933 )
1934
1935 (define_insn_and_split "*ne_zeroextractsi"
1936 [(set (match_operand:SI 0 "s_register_operand" "=r")
1937 (ne:SI (zero_extract:SI
1938 (match_operand:SI 1 "s_register_operand" "r")
1939 (match_operand:SI 2 "const_int_operand" "n")
1940 (match_operand:SI 3 "const_int_operand" "n"))
1941 (const_int 0)))
1942 (clobber (reg:CC CC_REGNUM))]
1943 "TARGET_32BIT
1944 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1945 && INTVAL (operands[2]) > 0
1946 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1947 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1948 "#"
1949 "TARGET_32BIT
1950 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1951 && INTVAL (operands[2]) > 0
1952 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1953 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1954 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1955 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1956 (const_int 0)))
1957 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1958 (set (match_dup 0)
1959 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1960 (match_dup 0) (const_int 1)))]
1961 "
1962 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1963 << INTVAL (operands[3]));
1964 "
1965 [(set_attr "conds" "clob")
1966 (set (attr "length")
1967 (if_then_else (eq_attr "is_thumb" "yes")
1968 (const_int 12)
1969 (const_int 8)))]
1970 )
1971
1972 (define_insn_and_split "*ne_zeroextractsi_shifted"
1973 [(set (match_operand:SI 0 "s_register_operand" "=r")
1974 (ne:SI (zero_extract:SI
1975 (match_operand:SI 1 "s_register_operand" "r")
1976 (match_operand:SI 2 "const_int_operand" "n")
1977 (const_int 0))
1978 (const_int 0)))
1979 (clobber (reg:CC CC_REGNUM))]
1980 "TARGET_ARM"
1981 "#"
1982 "TARGET_ARM"
1983 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1984 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1985 (const_int 0)))
1986 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1987 (set (match_dup 0)
1988 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1989 (match_dup 0) (const_int 1)))]
1990 "
1991 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1992 "
1993 [(set_attr "conds" "clob")
1994 (set_attr "length" "8")]
1995 )
1996
1997 (define_insn_and_split "*ite_ne_zeroextractsi"
1998 [(set (match_operand:SI 0 "s_register_operand" "=r")
1999 (if_then_else:SI (ne (zero_extract:SI
2000 (match_operand:SI 1 "s_register_operand" "r")
2001 (match_operand:SI 2 "const_int_operand" "n")
2002 (match_operand:SI 3 "const_int_operand" "n"))
2003 (const_int 0))
2004 (match_operand:SI 4 "arm_not_operand" "rIK")
2005 (const_int 0)))
2006 (clobber (reg:CC CC_REGNUM))]
2007 "TARGET_ARM
2008 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2009 && INTVAL (operands[2]) > 0
2010 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2011 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2012 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2013 "#"
2014 "TARGET_ARM
2015 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2016 && INTVAL (operands[2]) > 0
2017 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2018 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2019 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2020 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2021 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2022 (const_int 0)))
2023 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2024 (set (match_dup 0)
2025 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2026 (match_dup 0) (match_dup 4)))]
2027 "
2028 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2029 << INTVAL (operands[3]));
2030 "
2031 [(set_attr "conds" "clob")
2032 (set_attr "length" "8")]
2033 )
2034
2035 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2036 [(set (match_operand:SI 0 "s_register_operand" "=r")
2037 (if_then_else:SI (ne (zero_extract:SI
2038 (match_operand:SI 1 "s_register_operand" "r")
2039 (match_operand:SI 2 "const_int_operand" "n")
2040 (const_int 0))
2041 (const_int 0))
2042 (match_operand:SI 3 "arm_not_operand" "rIK")
2043 (const_int 0)))
2044 (clobber (reg:CC CC_REGNUM))]
2045 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2046 "#"
2047 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2048 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2049 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2050 (const_int 0)))
2051 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2052 (set (match_dup 0)
2053 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2054 (match_dup 0) (match_dup 3)))]
2055 "
2056 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2057 "
2058 [(set_attr "conds" "clob")
2059 (set_attr "length" "8")]
2060 )
2061
2062 (define_split
2063 [(set (match_operand:SI 0 "s_register_operand" "")
2064 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2065 (match_operand:SI 2 "const_int_operand" "")
2066 (match_operand:SI 3 "const_int_operand" "")))
2067 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2068 "TARGET_THUMB1"
2069 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2070 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2071 "{
2072 HOST_WIDE_INT temp = INTVAL (operands[2]);
2073
2074 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2075 operands[3] = GEN_INT (32 - temp);
2076 }"
2077 )
2078
2079 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2080 (define_split
2081 [(set (match_operand:SI 0 "s_register_operand" "")
2082 (match_operator:SI 1 "shiftable_operator"
2083 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2084 (match_operand:SI 3 "const_int_operand" "")
2085 (match_operand:SI 4 "const_int_operand" ""))
2086 (match_operand:SI 5 "s_register_operand" "")]))
2087 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2088 "TARGET_ARM"
2089 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2090 (set (match_dup 0)
2091 (match_op_dup 1
2092 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2093 (match_dup 5)]))]
2094 "{
2095 HOST_WIDE_INT temp = INTVAL (operands[3]);
2096
2097 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2098 operands[4] = GEN_INT (32 - temp);
2099 }"
2100 )
2101
2102 (define_split
2103 [(set (match_operand:SI 0 "s_register_operand" "")
2104 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2105 (match_operand:SI 2 "const_int_operand" "")
2106 (match_operand:SI 3 "const_int_operand" "")))]
2107 "TARGET_THUMB1"
2108 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2109 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2110 "{
2111 HOST_WIDE_INT temp = INTVAL (operands[2]);
2112
2113 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2114 operands[3] = GEN_INT (32 - temp);
2115 }"
2116 )
2117
2118 (define_split
2119 [(set (match_operand:SI 0 "s_register_operand" "")
2120 (match_operator:SI 1 "shiftable_operator"
2121 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2122 (match_operand:SI 3 "const_int_operand" "")
2123 (match_operand:SI 4 "const_int_operand" ""))
2124 (match_operand:SI 5 "s_register_operand" "")]))
2125 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2126 "TARGET_ARM"
2127 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2128 (set (match_dup 0)
2129 (match_op_dup 1
2130 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2131 (match_dup 5)]))]
2132 "{
2133 HOST_WIDE_INT temp = INTVAL (operands[3]);
2134
2135 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2136 operands[4] = GEN_INT (32 - temp);
2137 }"
2138 )
2139
2140 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2141 ;;; represented by the bitfield, then this will produce incorrect results.
2142 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2143 ;;; which have a real bit-field insert instruction, the truncation happens
2144 ;;; in the bit-field insert instruction itself. Since arm does not have a
2145 ;;; bit-field insert instruction, we would have to emit code here to truncate
2146 ;;; the value before we insert. This loses some of the advantage of having
2147 ;;; this insv pattern, so this pattern needs to be reevalutated.
2148
2149 (define_expand "insv"
2150 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2151 (match_operand:SI 1 "general_operand" "")
2152 (match_operand:SI 2 "general_operand" ""))
2153 (match_operand:SI 3 "reg_or_int_operand" ""))]
2154 "TARGET_ARM || arm_arch_thumb2"
2155 "
2156 {
2157 int start_bit = INTVAL (operands[2]);
2158 int width = INTVAL (operands[1]);
2159 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2160 rtx target, subtarget;
2161
2162 if (arm_arch_thumb2)
2163 {
2164 bool use_bfi = TRUE;
2165
2166 if (GET_CODE (operands[3]) == CONST_INT)
2167 {
2168 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2169
2170 if (val == 0)
2171 {
2172 emit_insn (gen_insv_zero (operands[0], operands[1],
2173 operands[2]));
2174 DONE;
2175 }
2176
2177 /* See if the set can be done with a single orr instruction. */
2178 if (val == mask && const_ok_for_arm (val << start_bit))
2179 use_bfi = FALSE;
2180 }
2181
2182 if (use_bfi)
2183 {
2184 if (GET_CODE (operands[3]) != REG)
2185 operands[3] = force_reg (SImode, operands[3]);
2186
2187 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2188 operands[3]));
2189 DONE;
2190 }
2191 }
2192
2193 target = operands[0];
2194 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2195 subreg as the final target. */
2196 if (GET_CODE (target) == SUBREG)
2197 {
2198 subtarget = gen_reg_rtx (SImode);
2199 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2200 < GET_MODE_SIZE (SImode))
2201 target = SUBREG_REG (target);
2202 }
2203 else
2204 subtarget = target;
2205
2206 if (GET_CODE (operands[3]) == CONST_INT)
2207 {
2208 /* Since we are inserting a known constant, we may be able to
2209 reduce the number of bits that we have to clear so that
2210 the mask becomes simple. */
2211 /* ??? This code does not check to see if the new mask is actually
2212 simpler. It may not be. */
2213 rtx op1 = gen_reg_rtx (SImode);
2214 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2215 start of this pattern. */
2216 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2217 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2218
2219 emit_insn (gen_andsi3 (op1, operands[0],
2220 gen_int_mode (~mask2, SImode)));
2221 emit_insn (gen_iorsi3 (subtarget, op1,
2222 gen_int_mode (op3_value << start_bit, SImode)));
2223 }
2224 else if (start_bit == 0
2225 && !(const_ok_for_arm (mask)
2226 || const_ok_for_arm (~mask)))
2227 {
2228 /* A Trick, since we are setting the bottom bits in the word,
2229 we can shift operand[3] up, operand[0] down, OR them together
2230 and rotate the result back again. This takes 3 insns, and
2231 the third might be mergeable into another op. */
2232 /* The shift up copes with the possibility that operand[3] is
2233 wider than the bitfield. */
2234 rtx op0 = gen_reg_rtx (SImode);
2235 rtx op1 = gen_reg_rtx (SImode);
2236
2237 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2238 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2239 emit_insn (gen_iorsi3 (op1, op1, op0));
2240 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2241 }
2242 else if ((width + start_bit == 32)
2243 && !(const_ok_for_arm (mask)
2244 || const_ok_for_arm (~mask)))
2245 {
2246 /* Similar trick, but slightly less efficient. */
2247
2248 rtx op0 = gen_reg_rtx (SImode);
2249 rtx op1 = gen_reg_rtx (SImode);
2250
2251 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2252 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2253 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2254 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2255 }
2256 else
2257 {
2258 rtx op0 = gen_int_mode (mask, SImode);
2259 rtx op1 = gen_reg_rtx (SImode);
2260 rtx op2 = gen_reg_rtx (SImode);
2261
2262 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2263 {
2264 rtx tmp = gen_reg_rtx (SImode);
2265
2266 emit_insn (gen_movsi (tmp, op0));
2267 op0 = tmp;
2268 }
2269
2270 /* Mask out any bits in operand[3] that are not needed. */
2271 emit_insn (gen_andsi3 (op1, operands[3], op0));
2272
2273 if (GET_CODE (op0) == CONST_INT
2274 && (const_ok_for_arm (mask << start_bit)
2275 || const_ok_for_arm (~(mask << start_bit))))
2276 {
2277 op0 = gen_int_mode (~(mask << start_bit), SImode);
2278 emit_insn (gen_andsi3 (op2, operands[0], op0));
2279 }
2280 else
2281 {
2282 if (GET_CODE (op0) == CONST_INT)
2283 {
2284 rtx tmp = gen_reg_rtx (SImode);
2285
2286 emit_insn (gen_movsi (tmp, op0));
2287 op0 = tmp;
2288 }
2289
2290 if (start_bit != 0)
2291 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2292
2293 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2294 }
2295
2296 if (start_bit != 0)
2297 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2298
2299 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2300 }
2301
2302 if (subtarget != target)
2303 {
2304 /* If TARGET is still a SUBREG, then it must be wider than a word,
2305 so we must be careful only to set the subword we were asked to. */
2306 if (GET_CODE (target) == SUBREG)
2307 emit_move_insn (target, subtarget);
2308 else
2309 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2310 }
2311
2312 DONE;
2313 }"
2314 )
2315
2316 (define_insn "insv_zero"
2317 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2318 (match_operand:SI 1 "const_int_operand" "M")
2319 (match_operand:SI 2 "const_int_operand" "M"))
2320 (const_int 0))]
2321 "arm_arch_thumb2"
2322 "bfc%?\t%0, %2, %1"
2323 [(set_attr "length" "4")
2324 (set_attr "predicable" "yes")]
2325 )
2326
2327 (define_insn "insv_t2"
2328 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2329 (match_operand:SI 1 "const_int_operand" "M")
2330 (match_operand:SI 2 "const_int_operand" "M"))
2331 (match_operand:SI 3 "s_register_operand" "r"))]
2332 "arm_arch_thumb2"
2333 "bfi%?\t%0, %3, %2, %1"
2334 [(set_attr "length" "4")
2335 (set_attr "predicable" "yes")]
2336 )
2337
2338 ; constants for op 2 will never be given to these patterns.
2339 (define_insn_and_split "*anddi_notdi_di"
2340 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2341 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2342 (match_operand:DI 2 "s_register_operand" "0,r")))]
2343 "TARGET_32BIT"
2344 "#"
2345 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2346 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2347 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2348 "
2349 {
2350 operands[3] = gen_highpart (SImode, operands[0]);
2351 operands[0] = gen_lowpart (SImode, operands[0]);
2352 operands[4] = gen_highpart (SImode, operands[1]);
2353 operands[1] = gen_lowpart (SImode, operands[1]);
2354 operands[5] = gen_highpart (SImode, operands[2]);
2355 operands[2] = gen_lowpart (SImode, operands[2]);
2356 }"
2357 [(set_attr "length" "8")
2358 (set_attr "predicable" "yes")]
2359 )
2360
2361 (define_insn_and_split "*anddi_notzesidi_di"
2362 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2363 (and:DI (not:DI (zero_extend:DI
2364 (match_operand:SI 2 "s_register_operand" "r,r")))
2365 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2366 "TARGET_32BIT"
2367 "@
2368 bic%?\\t%Q0, %Q1, %2
2369 #"
2370 ; (not (zero_extend ...)) allows us to just copy the high word from
2371 ; operand1 to operand0.
2372 "TARGET_32BIT
2373 && reload_completed
2374 && operands[0] != operands[1]"
2375 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2376 (set (match_dup 3) (match_dup 4))]
2377 "
2378 {
2379 operands[3] = gen_highpart (SImode, operands[0]);
2380 operands[0] = gen_lowpart (SImode, operands[0]);
2381 operands[4] = gen_highpart (SImode, operands[1]);
2382 operands[1] = gen_lowpart (SImode, operands[1]);
2383 }"
2384 [(set_attr "length" "4,8")
2385 (set_attr "predicable" "yes")]
2386 )
2387
2388 (define_insn_and_split "*anddi_notsesidi_di"
2389 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2390 (and:DI (not:DI (sign_extend:DI
2391 (match_operand:SI 2 "s_register_operand" "r,r")))
2392 (match_operand:DI 1 "s_register_operand" "0,r")))]
2393 "TARGET_32BIT"
2394 "#"
2395 "TARGET_32BIT && reload_completed"
2396 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2397 (set (match_dup 3) (and:SI (not:SI
2398 (ashiftrt:SI (match_dup 2) (const_int 31)))
2399 (match_dup 4)))]
2400 "
2401 {
2402 operands[3] = gen_highpart (SImode, operands[0]);
2403 operands[0] = gen_lowpart (SImode, operands[0]);
2404 operands[4] = gen_highpart (SImode, operands[1]);
2405 operands[1] = gen_lowpart (SImode, operands[1]);
2406 }"
2407 [(set_attr "length" "8")
2408 (set_attr "predicable" "yes")]
2409 )
2410
2411 (define_insn "andsi_notsi_si"
2412 [(set (match_operand:SI 0 "s_register_operand" "=r")
2413 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2414 (match_operand:SI 1 "s_register_operand" "r")))]
2415 "TARGET_32BIT"
2416 "bic%?\\t%0, %1, %2"
2417 [(set_attr "predicable" "yes")]
2418 )
2419
2420 (define_insn "bicsi3"
2421 [(set (match_operand:SI 0 "register_operand" "=l")
2422 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2423 (match_operand:SI 2 "register_operand" "0")))]
2424 "TARGET_THUMB1"
2425 "bic\\t%0, %0, %1"
2426 [(set_attr "length" "2")]
2427 )
2428
2429 (define_insn "andsi_not_shiftsi_si"
2430 [(set (match_operand:SI 0 "s_register_operand" "=r")
2431 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2432 [(match_operand:SI 2 "s_register_operand" "r")
2433 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2434 (match_operand:SI 1 "s_register_operand" "r")))]
2435 "TARGET_ARM"
2436 "bic%?\\t%0, %1, %2%S4"
2437 [(set_attr "predicable" "yes")
2438 (set_attr "shift" "2")
2439 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2440 (const_string "alu_shift")
2441 (const_string "alu_shift_reg")))]
2442 )
2443
2444 (define_insn "*andsi_notsi_si_compare0"
2445 [(set (reg:CC_NOOV CC_REGNUM)
2446 (compare:CC_NOOV
2447 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2448 (match_operand:SI 1 "s_register_operand" "r"))
2449 (const_int 0)))
2450 (set (match_operand:SI 0 "s_register_operand" "=r")
2451 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2452 "TARGET_32BIT"
2453 "bic%.\\t%0, %1, %2"
2454 [(set_attr "conds" "set")]
2455 )
2456
2457 (define_insn "*andsi_notsi_si_compare0_scratch"
2458 [(set (reg:CC_NOOV CC_REGNUM)
2459 (compare:CC_NOOV
2460 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2461 (match_operand:SI 1 "s_register_operand" "r"))
2462 (const_int 0)))
2463 (clobber (match_scratch:SI 0 "=r"))]
2464 "TARGET_32BIT"
2465 "bic%.\\t%0, %1, %2"
2466 [(set_attr "conds" "set")]
2467 )
2468
2469 (define_insn "iordi3"
2470 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2471 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2472 (match_operand:DI 2 "s_register_operand" "r,r")))]
2473 "TARGET_32BIT && ! TARGET_IWMMXT"
2474 "#"
2475 [(set_attr "length" "8")
2476 (set_attr "predicable" "yes")]
2477 )
2478
2479 (define_insn "*iordi_zesidi_di"
2480 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2481 (ior:DI (zero_extend:DI
2482 (match_operand:SI 2 "s_register_operand" "r,r"))
2483 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2484 "TARGET_32BIT"
2485 "@
2486 orr%?\\t%Q0, %Q1, %2
2487 #"
2488 [(set_attr "length" "4,8")
2489 (set_attr "predicable" "yes")]
2490 )
2491
2492 (define_insn "*iordi_sesidi_di"
2493 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2494 (ior:DI (sign_extend:DI
2495 (match_operand:SI 2 "s_register_operand" "r,r"))
2496 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2497 "TARGET_32BIT"
2498 "#"
2499 [(set_attr "length" "8")
2500 (set_attr "predicable" "yes")]
2501 )
2502
2503 (define_expand "iorsi3"
2504 [(set (match_operand:SI 0 "s_register_operand" "")
2505 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2506 (match_operand:SI 2 "reg_or_int_operand" "")))]
2507 "TARGET_EITHER"
2508 "
2509 if (GET_CODE (operands[2]) == CONST_INT)
2510 {
2511 if (TARGET_32BIT)
2512 {
2513 arm_split_constant (IOR, SImode, NULL_RTX,
2514 INTVAL (operands[2]), operands[0], operands[1],
2515 optimize && can_create_pseudo_p ());
2516 DONE;
2517 }
2518 else /* TARGET_THUMB1 */
2519 operands [2] = force_reg (SImode, operands [2]);
2520 }
2521 "
2522 )
2523
2524 (define_insn_and_split "*arm_iorsi3"
2525 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2526 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2527 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2528 "TARGET_32BIT"
2529 "@
2530 orr%?\\t%0, %1, %2
2531 #"
2532 "TARGET_32BIT
2533 && GET_CODE (operands[2]) == CONST_INT
2534 && !const_ok_for_arm (INTVAL (operands[2]))"
2535 [(clobber (const_int 0))]
2536 "
2537 arm_split_constant (IOR, SImode, curr_insn,
2538 INTVAL (operands[2]), operands[0], operands[1], 0);
2539 DONE;
2540 "
2541 [(set_attr "length" "4,16")
2542 (set_attr "predicable" "yes")]
2543 )
2544
2545 (define_insn "*thumb1_iorsi3"
2546 [(set (match_operand:SI 0 "register_operand" "=l")
2547 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2548 (match_operand:SI 2 "register_operand" "l")))]
2549 "TARGET_THUMB1"
2550 "orr\\t%0, %0, %2"
2551 [(set_attr "length" "2")]
2552 )
2553
2554 (define_peephole2
2555 [(match_scratch:SI 3 "r")
2556 (set (match_operand:SI 0 "arm_general_register_operand" "")
2557 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2558 (match_operand:SI 2 "const_int_operand" "")))]
2559 "TARGET_32BIT
2560 && !const_ok_for_arm (INTVAL (operands[2]))
2561 && const_ok_for_arm (~INTVAL (operands[2]))"
2562 [(set (match_dup 3) (match_dup 2))
2563 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2564 ""
2565 )
2566
2567 (define_insn "*iorsi3_compare0"
2568 [(set (reg:CC_NOOV CC_REGNUM)
2569 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2570 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2571 (const_int 0)))
2572 (set (match_operand:SI 0 "s_register_operand" "=r")
2573 (ior:SI (match_dup 1) (match_dup 2)))]
2574 "TARGET_32BIT"
2575 "orr%.\\t%0, %1, %2"
2576 [(set_attr "conds" "set")]
2577 )
2578
2579 (define_insn "*iorsi3_compare0_scratch"
2580 [(set (reg:CC_NOOV CC_REGNUM)
2581 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2582 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2583 (const_int 0)))
2584 (clobber (match_scratch:SI 0 "=r"))]
2585 "TARGET_32BIT"
2586 "orr%.\\t%0, %1, %2"
2587 [(set_attr "conds" "set")]
2588 )
2589
2590 (define_insn "xordi3"
2591 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2592 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2593 (match_operand:DI 2 "s_register_operand" "r,r")))]
2594 "TARGET_32BIT && !TARGET_IWMMXT"
2595 "#"
2596 [(set_attr "length" "8")
2597 (set_attr "predicable" "yes")]
2598 )
2599
2600 (define_insn "*xordi_zesidi_di"
2601 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2602 (xor:DI (zero_extend:DI
2603 (match_operand:SI 2 "s_register_operand" "r,r"))
2604 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2605 "TARGET_32BIT"
2606 "@
2607 eor%?\\t%Q0, %Q1, %2
2608 #"
2609 [(set_attr "length" "4,8")
2610 (set_attr "predicable" "yes")]
2611 )
2612
2613 (define_insn "*xordi_sesidi_di"
2614 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2615 (xor:DI (sign_extend:DI
2616 (match_operand:SI 2 "s_register_operand" "r,r"))
2617 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2618 "TARGET_32BIT"
2619 "#"
2620 [(set_attr "length" "8")
2621 (set_attr "predicable" "yes")]
2622 )
2623
2624 (define_expand "xorsi3"
2625 [(set (match_operand:SI 0 "s_register_operand" "")
2626 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2627 (match_operand:SI 2 "arm_rhs_operand" "")))]
2628 "TARGET_EITHER"
2629 "if (TARGET_THUMB1)
2630 if (GET_CODE (operands[2]) == CONST_INT)
2631 operands[2] = force_reg (SImode, operands[2]);
2632 "
2633 )
2634
2635 (define_insn "*arm_xorsi3"
2636 [(set (match_operand:SI 0 "s_register_operand" "=r")
2637 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2638 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2639 "TARGET_32BIT"
2640 "eor%?\\t%0, %1, %2"
2641 [(set_attr "predicable" "yes")]
2642 )
2643
2644 (define_insn "*thumb1_xorsi3"
2645 [(set (match_operand:SI 0 "register_operand" "=l")
2646 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2647 (match_operand:SI 2 "register_operand" "l")))]
2648 "TARGET_THUMB1"
2649 "eor\\t%0, %0, %2"
2650 [(set_attr "length" "2")]
2651 )
2652
2653 (define_insn "*xorsi3_compare0"
2654 [(set (reg:CC_NOOV CC_REGNUM)
2655 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2656 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2657 (const_int 0)))
2658 (set (match_operand:SI 0 "s_register_operand" "=r")
2659 (xor:SI (match_dup 1) (match_dup 2)))]
2660 "TARGET_32BIT"
2661 "eor%.\\t%0, %1, %2"
2662 [(set_attr "conds" "set")]
2663 )
2664
2665 (define_insn "*xorsi3_compare0_scratch"
2666 [(set (reg:CC_NOOV CC_REGNUM)
2667 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2668 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2669 (const_int 0)))]
2670 "TARGET_32BIT"
2671 "teq%?\\t%0, %1"
2672 [(set_attr "conds" "set")]
2673 )
2674
2675 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2676 ; (NOT D) we can sometimes merge the final NOT into one of the following
2677 ; insns.
2678
2679 (define_split
2680 [(set (match_operand:SI 0 "s_register_operand" "")
2681 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2682 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2683 (match_operand:SI 3 "arm_rhs_operand" "")))
2684 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2685 "TARGET_32BIT"
2686 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2687 (not:SI (match_dup 3))))
2688 (set (match_dup 0) (not:SI (match_dup 4)))]
2689 ""
2690 )
2691
2692 (define_insn "*andsi_iorsi3_notsi"
2693 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2694 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2695 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2696 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2697 "TARGET_32BIT"
2698 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2699 [(set_attr "length" "8")
2700 (set_attr "ce_count" "2")
2701 (set_attr "predicable" "yes")]
2702 )
2703
2704 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2705 ; insns are available?
2706 (define_split
2707 [(set (match_operand:SI 0 "s_register_operand" "")
2708 (match_operator:SI 1 "logical_binary_operator"
2709 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2710 (match_operand:SI 3 "const_int_operand" "")
2711 (match_operand:SI 4 "const_int_operand" ""))
2712 (match_operator:SI 9 "logical_binary_operator"
2713 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2714 (match_operand:SI 6 "const_int_operand" ""))
2715 (match_operand:SI 7 "s_register_operand" "")])]))
2716 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2717 "TARGET_32BIT
2718 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2719 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2720 [(set (match_dup 8)
2721 (match_op_dup 1
2722 [(ashift:SI (match_dup 2) (match_dup 4))
2723 (match_dup 5)]))
2724 (set (match_dup 0)
2725 (match_op_dup 1
2726 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2727 (match_dup 7)]))]
2728 "
2729 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2730 ")
2731
2732 (define_split
2733 [(set (match_operand:SI 0 "s_register_operand" "")
2734 (match_operator:SI 1 "logical_binary_operator"
2735 [(match_operator:SI 9 "logical_binary_operator"
2736 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2737 (match_operand:SI 6 "const_int_operand" ""))
2738 (match_operand:SI 7 "s_register_operand" "")])
2739 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2740 (match_operand:SI 3 "const_int_operand" "")
2741 (match_operand:SI 4 "const_int_operand" ""))]))
2742 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2743 "TARGET_32BIT
2744 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2745 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2746 [(set (match_dup 8)
2747 (match_op_dup 1
2748 [(ashift:SI (match_dup 2) (match_dup 4))
2749 (match_dup 5)]))
2750 (set (match_dup 0)
2751 (match_op_dup 1
2752 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2753 (match_dup 7)]))]
2754 "
2755 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2756 ")
2757
2758 (define_split
2759 [(set (match_operand:SI 0 "s_register_operand" "")
2760 (match_operator:SI 1 "logical_binary_operator"
2761 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2762 (match_operand:SI 3 "const_int_operand" "")
2763 (match_operand:SI 4 "const_int_operand" ""))
2764 (match_operator:SI 9 "logical_binary_operator"
2765 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2766 (match_operand:SI 6 "const_int_operand" ""))
2767 (match_operand:SI 7 "s_register_operand" "")])]))
2768 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2769 "TARGET_32BIT
2770 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2771 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2772 [(set (match_dup 8)
2773 (match_op_dup 1
2774 [(ashift:SI (match_dup 2) (match_dup 4))
2775 (match_dup 5)]))
2776 (set (match_dup 0)
2777 (match_op_dup 1
2778 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2779 (match_dup 7)]))]
2780 "
2781 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2782 ")
2783
2784 (define_split
2785 [(set (match_operand:SI 0 "s_register_operand" "")
2786 (match_operator:SI 1 "logical_binary_operator"
2787 [(match_operator:SI 9 "logical_binary_operator"
2788 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2789 (match_operand:SI 6 "const_int_operand" ""))
2790 (match_operand:SI 7 "s_register_operand" "")])
2791 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2792 (match_operand:SI 3 "const_int_operand" "")
2793 (match_operand:SI 4 "const_int_operand" ""))]))
2794 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2795 "TARGET_32BIT
2796 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2797 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2798 [(set (match_dup 8)
2799 (match_op_dup 1
2800 [(ashift:SI (match_dup 2) (match_dup 4))
2801 (match_dup 5)]))
2802 (set (match_dup 0)
2803 (match_op_dup 1
2804 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2805 (match_dup 7)]))]
2806 "
2807 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2808 ")
2809 \f
2810
2811 ;; Minimum and maximum insns
2812
2813 (define_expand "smaxsi3"
2814 [(parallel [
2815 (set (match_operand:SI 0 "s_register_operand" "")
2816 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2817 (match_operand:SI 2 "arm_rhs_operand" "")))
2818 (clobber (reg:CC CC_REGNUM))])]
2819 "TARGET_32BIT"
2820 "
2821 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2822 {
2823 /* No need for a clobber of the condition code register here. */
2824 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2825 gen_rtx_SMAX (SImode, operands[1],
2826 operands[2])));
2827 DONE;
2828 }
2829 ")
2830
2831 (define_insn "*smax_0"
2832 [(set (match_operand:SI 0 "s_register_operand" "=r")
2833 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2834 (const_int 0)))]
2835 "TARGET_32BIT"
2836 "bic%?\\t%0, %1, %1, asr #31"
2837 [(set_attr "predicable" "yes")]
2838 )
2839
2840 (define_insn "*smax_m1"
2841 [(set (match_operand:SI 0 "s_register_operand" "=r")
2842 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2843 (const_int -1)))]
2844 "TARGET_32BIT"
2845 "orr%?\\t%0, %1, %1, asr #31"
2846 [(set_attr "predicable" "yes")]
2847 )
2848
2849 (define_insn "*arm_smax_insn"
2850 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2851 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2852 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2853 (clobber (reg:CC CC_REGNUM))]
2854 "TARGET_ARM"
2855 "@
2856 cmp\\t%1, %2\;movlt\\t%0, %2
2857 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2858 [(set_attr "conds" "clob")
2859 (set_attr "length" "8,12")]
2860 )
2861
2862 (define_expand "sminsi3"
2863 [(parallel [
2864 (set (match_operand:SI 0 "s_register_operand" "")
2865 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2866 (match_operand:SI 2 "arm_rhs_operand" "")))
2867 (clobber (reg:CC CC_REGNUM))])]
2868 "TARGET_32BIT"
2869 "
2870 if (operands[2] == const0_rtx)
2871 {
2872 /* No need for a clobber of the condition code register here. */
2873 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2874 gen_rtx_SMIN (SImode, operands[1],
2875 operands[2])));
2876 DONE;
2877 }
2878 ")
2879
2880 (define_insn "*smin_0"
2881 [(set (match_operand:SI 0 "s_register_operand" "=r")
2882 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2883 (const_int 0)))]
2884 "TARGET_32BIT"
2885 "and%?\\t%0, %1, %1, asr #31"
2886 [(set_attr "predicable" "yes")]
2887 )
2888
2889 (define_insn "*arm_smin_insn"
2890 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2891 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2892 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2893 (clobber (reg:CC CC_REGNUM))]
2894 "TARGET_ARM"
2895 "@
2896 cmp\\t%1, %2\;movge\\t%0, %2
2897 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2898 [(set_attr "conds" "clob")
2899 (set_attr "length" "8,12")]
2900 )
2901
2902 (define_expand "umaxsi3"
2903 [(parallel [
2904 (set (match_operand:SI 0 "s_register_operand" "")
2905 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2906 (match_operand:SI 2 "arm_rhs_operand" "")))
2907 (clobber (reg:CC CC_REGNUM))])]
2908 "TARGET_32BIT"
2909 ""
2910 )
2911
2912 (define_insn "*arm_umaxsi3"
2913 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2914 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2915 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2916 (clobber (reg:CC CC_REGNUM))]
2917 "TARGET_ARM"
2918 "@
2919 cmp\\t%1, %2\;movcc\\t%0, %2
2920 cmp\\t%1, %2\;movcs\\t%0, %1
2921 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2922 [(set_attr "conds" "clob")
2923 (set_attr "length" "8,8,12")]
2924 )
2925
2926 (define_expand "uminsi3"
2927 [(parallel [
2928 (set (match_operand:SI 0 "s_register_operand" "")
2929 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2930 (match_operand:SI 2 "arm_rhs_operand" "")))
2931 (clobber (reg:CC CC_REGNUM))])]
2932 "TARGET_32BIT"
2933 ""
2934 )
2935
2936 (define_insn "*arm_uminsi3"
2937 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2938 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2939 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2940 (clobber (reg:CC CC_REGNUM))]
2941 "TARGET_ARM"
2942 "@
2943 cmp\\t%1, %2\;movcs\\t%0, %2
2944 cmp\\t%1, %2\;movcc\\t%0, %1
2945 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2946 [(set_attr "conds" "clob")
2947 (set_attr "length" "8,8,12")]
2948 )
2949
2950 (define_insn "*store_minmaxsi"
2951 [(set (match_operand:SI 0 "memory_operand" "=m")
2952 (match_operator:SI 3 "minmax_operator"
2953 [(match_operand:SI 1 "s_register_operand" "r")
2954 (match_operand:SI 2 "s_register_operand" "r")]))
2955 (clobber (reg:CC CC_REGNUM))]
2956 "TARGET_32BIT"
2957 "*
2958 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2959 operands[1], operands[2]);
2960 output_asm_insn (\"cmp\\t%1, %2\", operands);
2961 if (TARGET_THUMB2)
2962 output_asm_insn (\"ite\t%d3\", operands);
2963 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2964 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2965 return \"\";
2966 "
2967 [(set_attr "conds" "clob")
2968 (set (attr "length")
2969 (if_then_else (eq_attr "is_thumb" "yes")
2970 (const_int 14)
2971 (const_int 12)))
2972 (set_attr "type" "store1")]
2973 )
2974
2975 ; Reject the frame pointer in operand[1], since reloading this after
2976 ; it has been eliminated can cause carnage.
2977 (define_insn "*minmax_arithsi"
2978 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2979 (match_operator:SI 4 "shiftable_operator"
2980 [(match_operator:SI 5 "minmax_operator"
2981 [(match_operand:SI 2 "s_register_operand" "r,r")
2982 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2983 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2984 (clobber (reg:CC CC_REGNUM))]
2985 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2986 "*
2987 {
2988 enum rtx_code code = GET_CODE (operands[4]);
2989 bool need_else;
2990
2991 if (which_alternative != 0 || operands[3] != const0_rtx
2992 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2993 need_else = true;
2994 else
2995 need_else = false;
2996
2997 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2998 operands[2], operands[3]);
2999 output_asm_insn (\"cmp\\t%2, %3\", operands);
3000 if (TARGET_THUMB2)
3001 {
3002 if (need_else)
3003 output_asm_insn (\"ite\\t%d5\", operands);
3004 else
3005 output_asm_insn (\"it\\t%d5\", operands);
3006 }
3007 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3008 if (need_else)
3009 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3010 return \"\";
3011 }"
3012 [(set_attr "conds" "clob")
3013 (set (attr "length")
3014 (if_then_else (eq_attr "is_thumb" "yes")
3015 (const_int 14)
3016 (const_int 12)))]
3017 )
3018
3019 \f
3020 ;; Shift and rotation insns
3021
3022 (define_expand "ashldi3"
3023 [(set (match_operand:DI 0 "s_register_operand" "")
3024 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3025 (match_operand:SI 2 "reg_or_int_operand" "")))]
3026 "TARGET_32BIT"
3027 "
3028 if (GET_CODE (operands[2]) == CONST_INT)
3029 {
3030 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3031 {
3032 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3033 DONE;
3034 }
3035 /* Ideally we shouldn't fail here if we could know that operands[1]
3036 ends up already living in an iwmmxt register. Otherwise it's
3037 cheaper to have the alternate code being generated than moving
3038 values to iwmmxt regs and back. */
3039 FAIL;
3040 }
3041 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3042 FAIL;
3043 "
3044 )
3045
3046 (define_insn "arm_ashldi3_1bit"
3047 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3048 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3049 (const_int 1)))
3050 (clobber (reg:CC CC_REGNUM))]
3051 "TARGET_32BIT"
3052 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3053 [(set_attr "conds" "clob")
3054 (set_attr "length" "8")]
3055 )
3056
3057 (define_expand "ashlsi3"
3058 [(set (match_operand:SI 0 "s_register_operand" "")
3059 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3060 (match_operand:SI 2 "arm_rhs_operand" "")))]
3061 "TARGET_EITHER"
3062 "
3063 if (GET_CODE (operands[2]) == CONST_INT
3064 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3065 {
3066 emit_insn (gen_movsi (operands[0], const0_rtx));
3067 DONE;
3068 }
3069 "
3070 )
3071
3072 (define_insn "*thumb1_ashlsi3"
3073 [(set (match_operand:SI 0 "register_operand" "=l,l")
3074 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3075 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3076 "TARGET_THUMB1"
3077 "lsl\\t%0, %1, %2"
3078 [(set_attr "length" "2")]
3079 )
3080
3081 (define_expand "ashrdi3"
3082 [(set (match_operand:DI 0 "s_register_operand" "")
3083 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3084 (match_operand:SI 2 "reg_or_int_operand" "")))]
3085 "TARGET_32BIT"
3086 "
3087 if (GET_CODE (operands[2]) == CONST_INT)
3088 {
3089 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3090 {
3091 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3092 DONE;
3093 }
3094 /* Ideally we shouldn't fail here if we could know that operands[1]
3095 ends up already living in an iwmmxt register. Otherwise it's
3096 cheaper to have the alternate code being generated than moving
3097 values to iwmmxt regs and back. */
3098 FAIL;
3099 }
3100 else if (!TARGET_REALLY_IWMMXT)
3101 FAIL;
3102 "
3103 )
3104
3105 (define_insn "arm_ashrdi3_1bit"
3106 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3107 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3108 (const_int 1)))
3109 (clobber (reg:CC CC_REGNUM))]
3110 "TARGET_32BIT"
3111 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3112 [(set_attr "conds" "clob")
3113 (set_attr "length" "8")]
3114 )
3115
3116 (define_expand "ashrsi3"
3117 [(set (match_operand:SI 0 "s_register_operand" "")
3118 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3119 (match_operand:SI 2 "arm_rhs_operand" "")))]
3120 "TARGET_EITHER"
3121 "
3122 if (GET_CODE (operands[2]) == CONST_INT
3123 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3124 operands[2] = GEN_INT (31);
3125 "
3126 )
3127
3128 (define_insn "*thumb1_ashrsi3"
3129 [(set (match_operand:SI 0 "register_operand" "=l,l")
3130 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3131 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3132 "TARGET_THUMB1"
3133 "asr\\t%0, %1, %2"
3134 [(set_attr "length" "2")]
3135 )
3136
3137 (define_expand "lshrdi3"
3138 [(set (match_operand:DI 0 "s_register_operand" "")
3139 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3140 (match_operand:SI 2 "reg_or_int_operand" "")))]
3141 "TARGET_32BIT"
3142 "
3143 if (GET_CODE (operands[2]) == CONST_INT)
3144 {
3145 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3146 {
3147 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3148 DONE;
3149 }
3150 /* Ideally we shouldn't fail here if we could know that operands[1]
3151 ends up already living in an iwmmxt register. Otherwise it's
3152 cheaper to have the alternate code being generated than moving
3153 values to iwmmxt regs and back. */
3154 FAIL;
3155 }
3156 else if (!TARGET_REALLY_IWMMXT)
3157 FAIL;
3158 "
3159 )
3160
3161 (define_insn "arm_lshrdi3_1bit"
3162 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3163 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3164 (const_int 1)))
3165 (clobber (reg:CC CC_REGNUM))]
3166 "TARGET_32BIT"
3167 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3168 [(set_attr "conds" "clob")
3169 (set_attr "length" "8")]
3170 )
3171
3172 (define_expand "lshrsi3"
3173 [(set (match_operand:SI 0 "s_register_operand" "")
3174 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3175 (match_operand:SI 2 "arm_rhs_operand" "")))]
3176 "TARGET_EITHER"
3177 "
3178 if (GET_CODE (operands[2]) == CONST_INT
3179 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3180 {
3181 emit_insn (gen_movsi (operands[0], const0_rtx));
3182 DONE;
3183 }
3184 "
3185 )
3186
3187 (define_insn "*thumb1_lshrsi3"
3188 [(set (match_operand:SI 0 "register_operand" "=l,l")
3189 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3190 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3191 "TARGET_THUMB1"
3192 "lsr\\t%0, %1, %2"
3193 [(set_attr "length" "2")]
3194 )
3195
3196 (define_expand "rotlsi3"
3197 [(set (match_operand:SI 0 "s_register_operand" "")
3198 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3199 (match_operand:SI 2 "reg_or_int_operand" "")))]
3200 "TARGET_32BIT"
3201 "
3202 if (GET_CODE (operands[2]) == CONST_INT)
3203 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3204 else
3205 {
3206 rtx reg = gen_reg_rtx (SImode);
3207 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3208 operands[2] = reg;
3209 }
3210 "
3211 )
3212
3213 (define_expand "rotrsi3"
3214 [(set (match_operand:SI 0 "s_register_operand" "")
3215 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3216 (match_operand:SI 2 "arm_rhs_operand" "")))]
3217 "TARGET_EITHER"
3218 "
3219 if (TARGET_32BIT)
3220 {
3221 if (GET_CODE (operands[2]) == CONST_INT
3222 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3223 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3224 }
3225 else /* TARGET_THUMB1 */
3226 {
3227 if (GET_CODE (operands [2]) == CONST_INT)
3228 operands [2] = force_reg (SImode, operands[2]);
3229 }
3230 "
3231 )
3232
3233 (define_insn "*thumb1_rotrsi3"
3234 [(set (match_operand:SI 0 "register_operand" "=l")
3235 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3236 (match_operand:SI 2 "register_operand" "l")))]
3237 "TARGET_THUMB1"
3238 "ror\\t%0, %0, %2"
3239 [(set_attr "length" "2")]
3240 )
3241
3242 (define_insn "*arm_shiftsi3"
3243 [(set (match_operand:SI 0 "s_register_operand" "=r")
3244 (match_operator:SI 3 "shift_operator"
3245 [(match_operand:SI 1 "s_register_operand" "r")
3246 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3247 "TARGET_32BIT"
3248 "* return arm_output_shift(operands, 0);"
3249 [(set_attr "predicable" "yes")
3250 (set_attr "shift" "1")
3251 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3252 (const_string "alu_shift")
3253 (const_string "alu_shift_reg")))]
3254 )
3255
3256 (define_insn "*shiftsi3_compare0"
3257 [(set (reg:CC_NOOV CC_REGNUM)
3258 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3259 [(match_operand:SI 1 "s_register_operand" "r")
3260 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3261 (const_int 0)))
3262 (set (match_operand:SI 0 "s_register_operand" "=r")
3263 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3264 "TARGET_32BIT"
3265 "* return arm_output_shift(operands, 1);"
3266 [(set_attr "conds" "set")
3267 (set_attr "shift" "1")
3268 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3269 (const_string "alu_shift")
3270 (const_string "alu_shift_reg")))]
3271 )
3272
3273 (define_insn "*shiftsi3_compare0_scratch"
3274 [(set (reg:CC_NOOV CC_REGNUM)
3275 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3276 [(match_operand:SI 1 "s_register_operand" "r")
3277 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3278 (const_int 0)))
3279 (clobber (match_scratch:SI 0 "=r"))]
3280 "TARGET_32BIT"
3281 "* return arm_output_shift(operands, 1);"
3282 [(set_attr "conds" "set")
3283 (set_attr "shift" "1")]
3284 )
3285
3286 (define_insn "*arm_notsi_shiftsi"
3287 [(set (match_operand:SI 0 "s_register_operand" "=r")
3288 (not:SI (match_operator:SI 3 "shift_operator"
3289 [(match_operand:SI 1 "s_register_operand" "r")
3290 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3291 "TARGET_ARM"
3292 "mvn%?\\t%0, %1%S3"
3293 [(set_attr "predicable" "yes")
3294 (set_attr "shift" "1")
3295 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3296 (const_string "alu_shift")
3297 (const_string "alu_shift_reg")))]
3298 )
3299
3300 (define_insn "*arm_notsi_shiftsi_compare0"
3301 [(set (reg:CC_NOOV CC_REGNUM)
3302 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3303 [(match_operand:SI 1 "s_register_operand" "r")
3304 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3305 (const_int 0)))
3306 (set (match_operand:SI 0 "s_register_operand" "=r")
3307 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3308 "TARGET_ARM"
3309 "mvn%.\\t%0, %1%S3"
3310 [(set_attr "conds" "set")
3311 (set_attr "shift" "1")
3312 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3313 (const_string "alu_shift")
3314 (const_string "alu_shift_reg")))]
3315 )
3316
3317 (define_insn "*arm_not_shiftsi_compare0_scratch"
3318 [(set (reg:CC_NOOV CC_REGNUM)
3319 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3320 [(match_operand:SI 1 "s_register_operand" "r")
3321 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3322 (const_int 0)))
3323 (clobber (match_scratch:SI 0 "=r"))]
3324 "TARGET_ARM"
3325 "mvn%.\\t%0, %1%S3"
3326 [(set_attr "conds" "set")
3327 (set_attr "shift" "1")
3328 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3329 (const_string "alu_shift")
3330 (const_string "alu_shift_reg")))]
3331 )
3332
3333 ;; We don't really have extzv, but defining this using shifts helps
3334 ;; to reduce register pressure later on.
3335
3336 (define_expand "extzv"
3337 [(set (match_dup 4)
3338 (ashift:SI (match_operand:SI 1 "register_operand" "")
3339 (match_operand:SI 2 "const_int_operand" "")))
3340 (set (match_operand:SI 0 "register_operand" "")
3341 (lshiftrt:SI (match_dup 4)
3342 (match_operand:SI 3 "const_int_operand" "")))]
3343 "TARGET_THUMB1 || arm_arch_thumb2"
3344 "
3345 {
3346 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3347 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3348
3349 if (arm_arch_thumb2)
3350 {
3351 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3352 operands[3]));
3353 DONE;
3354 }
3355
3356 operands[3] = GEN_INT (rshift);
3357
3358 if (lshift == 0)
3359 {
3360 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3361 DONE;
3362 }
3363
3364 operands[2] = GEN_INT (lshift);
3365 operands[4] = gen_reg_rtx (SImode);
3366 }"
3367 )
3368
3369 (define_insn "extv"
3370 [(set (match_operand:SI 0 "s_register_operand" "=r")
3371 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3372 (match_operand:SI 2 "const_int_operand" "M")
3373 (match_operand:SI 3 "const_int_operand" "M")))]
3374 "arm_arch_thumb2"
3375 "sbfx%?\t%0, %1, %3, %2"
3376 [(set_attr "length" "4")
3377 (set_attr "predicable" "yes")]
3378 )
3379
3380 (define_insn "extzv_t2"
3381 [(set (match_operand:SI 0 "s_register_operand" "=r")
3382 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3383 (match_operand:SI 2 "const_int_operand" "M")
3384 (match_operand:SI 3 "const_int_operand" "M")))]
3385 "arm_arch_thumb2"
3386 "ubfx%?\t%0, %1, %3, %2"
3387 [(set_attr "length" "4")
3388 (set_attr "predicable" "yes")]
3389 )
3390
3391 \f
3392 ;; Unary arithmetic insns
3393
3394 (define_expand "negdi2"
3395 [(parallel
3396 [(set (match_operand:DI 0 "s_register_operand" "")
3397 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3398 (clobber (reg:CC CC_REGNUM))])]
3399 "TARGET_EITHER"
3400 "
3401 if (TARGET_THUMB1)
3402 {
3403 if (GET_CODE (operands[1]) != REG)
3404 operands[1] = force_reg (SImode, operands[1]);
3405 }
3406 "
3407 )
3408
3409 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3410 ;; The second alternative is to allow the common case of a *full* overlap.
3411 (define_insn "*arm_negdi2"
3412 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3413 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3414 (clobber (reg:CC CC_REGNUM))]
3415 "TARGET_ARM"
3416 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3417 [(set_attr "conds" "clob")
3418 (set_attr "length" "8")]
3419 )
3420
3421 (define_insn "*thumb1_negdi2"
3422 [(set (match_operand:DI 0 "register_operand" "=&l")
3423 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3424 (clobber (reg:CC CC_REGNUM))]
3425 "TARGET_THUMB1"
3426 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3427 [(set_attr "length" "6")]
3428 )
3429
3430 (define_expand "negsi2"
3431 [(set (match_operand:SI 0 "s_register_operand" "")
3432 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3433 "TARGET_EITHER"
3434 ""
3435 )
3436
3437 (define_insn "*arm_negsi2"
3438 [(set (match_operand:SI 0 "s_register_operand" "=r")
3439 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3440 "TARGET_32BIT"
3441 "rsb%?\\t%0, %1, #0"
3442 [(set_attr "predicable" "yes")]
3443 )
3444
3445 (define_insn "*thumb1_negsi2"
3446 [(set (match_operand:SI 0 "register_operand" "=l")
3447 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3448 "TARGET_THUMB1"
3449 "neg\\t%0, %1"
3450 [(set_attr "length" "2")]
3451 )
3452
3453 (define_expand "negsf2"
3454 [(set (match_operand:SF 0 "s_register_operand" "")
3455 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3456 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3457 ""
3458 )
3459
3460 (define_expand "negdf2"
3461 [(set (match_operand:DF 0 "s_register_operand" "")
3462 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3463 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3464 "")
3465
3466 ;; abssi2 doesn't really clobber the condition codes if a different register
3467 ;; is being set. To keep things simple, assume during rtl manipulations that
3468 ;; it does, but tell the final scan operator the truth. Similarly for
3469 ;; (neg (abs...))
3470
3471 (define_expand "abssi2"
3472 [(parallel
3473 [(set (match_operand:SI 0 "s_register_operand" "")
3474 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3475 (clobber (match_dup 2))])]
3476 "TARGET_EITHER"
3477 "
3478 if (TARGET_THUMB1)
3479 operands[2] = gen_rtx_SCRATCH (SImode);
3480 else
3481 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3482 ")
3483
3484 (define_insn "*arm_abssi2"
3485 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3486 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3487 (clobber (reg:CC CC_REGNUM))]
3488 "TARGET_ARM"
3489 "@
3490 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3491 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3492 [(set_attr "conds" "clob,*")
3493 (set_attr "shift" "1")
3494 ;; predicable can't be set based on the variant, so left as no
3495 (set_attr "length" "8")]
3496 )
3497
3498 (define_insn_and_split "*thumb1_abssi2"
3499 [(set (match_operand:SI 0 "s_register_operand" "=l")
3500 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3501 (clobber (match_scratch:SI 2 "=&l"))]
3502 "TARGET_THUMB1"
3503 "#"
3504 "TARGET_THUMB1 && reload_completed"
3505 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3506 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3507 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3508 ""
3509 [(set_attr "length" "6")]
3510 )
3511
3512 (define_insn "*arm_neg_abssi2"
3513 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3514 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3515 (clobber (reg:CC CC_REGNUM))]
3516 "TARGET_ARM"
3517 "@
3518 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3519 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3520 [(set_attr "conds" "clob,*")
3521 (set_attr "shift" "1")
3522 ;; predicable can't be set based on the variant, so left as no
3523 (set_attr "length" "8")]
3524 )
3525
3526 (define_insn_and_split "*thumb1_neg_abssi2"
3527 [(set (match_operand:SI 0 "s_register_operand" "=l")
3528 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3529 (clobber (match_scratch:SI 2 "=&l"))]
3530 "TARGET_THUMB1"
3531 "#"
3532 "TARGET_THUMB1 && reload_completed"
3533 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3534 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3535 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3536 ""
3537 [(set_attr "length" "6")]
3538 )
3539
3540 (define_expand "abssf2"
3541 [(set (match_operand:SF 0 "s_register_operand" "")
3542 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3543 "TARGET_32BIT && TARGET_HARD_FLOAT"
3544 "")
3545
3546 (define_expand "absdf2"
3547 [(set (match_operand:DF 0 "s_register_operand" "")
3548 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3549 "TARGET_32BIT && TARGET_HARD_FLOAT"
3550 "")
3551
3552 (define_expand "sqrtsf2"
3553 [(set (match_operand:SF 0 "s_register_operand" "")
3554 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3555 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3556 "")
3557
3558 (define_expand "sqrtdf2"
3559 [(set (match_operand:DF 0 "s_register_operand" "")
3560 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3561 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3562 "")
3563
3564 (define_insn_and_split "one_cmpldi2"
3565 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3566 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3567 "TARGET_32BIT"
3568 "#"
3569 "TARGET_32BIT && reload_completed"
3570 [(set (match_dup 0) (not:SI (match_dup 1)))
3571 (set (match_dup 2) (not:SI (match_dup 3)))]
3572 "
3573 {
3574 operands[2] = gen_highpart (SImode, operands[0]);
3575 operands[0] = gen_lowpart (SImode, operands[0]);
3576 operands[3] = gen_highpart (SImode, operands[1]);
3577 operands[1] = gen_lowpart (SImode, operands[1]);
3578 }"
3579 [(set_attr "length" "8")
3580 (set_attr "predicable" "yes")]
3581 )
3582
3583 (define_expand "one_cmplsi2"
3584 [(set (match_operand:SI 0 "s_register_operand" "")
3585 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3586 "TARGET_EITHER"
3587 ""
3588 )
3589
3590 (define_insn "*arm_one_cmplsi2"
3591 [(set (match_operand:SI 0 "s_register_operand" "=r")
3592 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3593 "TARGET_32BIT"
3594 "mvn%?\\t%0, %1"
3595 [(set_attr "predicable" "yes")]
3596 )
3597
3598 (define_insn "*thumb1_one_cmplsi2"
3599 [(set (match_operand:SI 0 "register_operand" "=l")
3600 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3601 "TARGET_THUMB1"
3602 "mvn\\t%0, %1"
3603 [(set_attr "length" "2")]
3604 )
3605
3606 (define_insn "*notsi_compare0"
3607 [(set (reg:CC_NOOV CC_REGNUM)
3608 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3609 (const_int 0)))
3610 (set (match_operand:SI 0 "s_register_operand" "=r")
3611 (not:SI (match_dup 1)))]
3612 "TARGET_32BIT"
3613 "mvn%.\\t%0, %1"
3614 [(set_attr "conds" "set")]
3615 )
3616
3617 (define_insn "*notsi_compare0_scratch"
3618 [(set (reg:CC_NOOV CC_REGNUM)
3619 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3620 (const_int 0)))
3621 (clobber (match_scratch:SI 0 "=r"))]
3622 "TARGET_32BIT"
3623 "mvn%.\\t%0, %1"
3624 [(set_attr "conds" "set")]
3625 )
3626 \f
3627 ;; Fixed <--> Floating conversion insns
3628
3629 (define_expand "floatsisf2"
3630 [(set (match_operand:SF 0 "s_register_operand" "")
3631 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3632 "TARGET_32BIT && TARGET_HARD_FLOAT"
3633 "
3634 if (TARGET_MAVERICK)
3635 {
3636 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3637 DONE;
3638 }
3639 ")
3640
3641 (define_expand "floatsidf2"
3642 [(set (match_operand:DF 0 "s_register_operand" "")
3643 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3644 "TARGET_32BIT && TARGET_HARD_FLOAT"
3645 "
3646 if (TARGET_MAVERICK)
3647 {
3648 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3649 DONE;
3650 }
3651 ")
3652
3653 (define_expand "fix_truncsfsi2"
3654 [(set (match_operand:SI 0 "s_register_operand" "")
3655 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3656 "TARGET_32BIT && TARGET_HARD_FLOAT"
3657 "
3658 if (TARGET_MAVERICK)
3659 {
3660 if (!cirrus_fp_register (operands[0], SImode))
3661 operands[0] = force_reg (SImode, operands[0]);
3662 if (!cirrus_fp_register (operands[1], SFmode))
3663 operands[1] = force_reg (SFmode, operands[0]);
3664 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3665 DONE;
3666 }
3667 ")
3668
3669 (define_expand "fix_truncdfsi2"
3670 [(set (match_operand:SI 0 "s_register_operand" "")
3671 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3672 "TARGET_32BIT && TARGET_HARD_FLOAT"
3673 "
3674 if (TARGET_MAVERICK)
3675 {
3676 if (!cirrus_fp_register (operands[1], DFmode))
3677 operands[1] = force_reg (DFmode, operands[0]);
3678 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3679 DONE;
3680 }
3681 ")
3682
3683 ;; Truncation insns
3684
3685 (define_expand "truncdfsf2"
3686 [(set (match_operand:SF 0 "s_register_operand" "")
3687 (float_truncate:SF
3688 (match_operand:DF 1 "s_register_operand" "")))]
3689 "TARGET_32BIT && TARGET_HARD_FLOAT"
3690 ""
3691 )
3692 \f
3693 ;; Zero and sign extension instructions.
3694
3695 (define_expand "zero_extendsidi2"
3696 [(set (match_operand:DI 0 "s_register_operand" "")
3697 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3698 "TARGET_32BIT"
3699 ""
3700 )
3701
3702 (define_insn "*arm_zero_extendsidi2"
3703 [(set (match_operand:DI 0 "s_register_operand" "=r")
3704 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3705 "TARGET_ARM"
3706 "*
3707 if (REGNO (operands[1])
3708 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3709 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3710 return \"mov%?\\t%R0, #0\";
3711 "
3712 [(set_attr "length" "8")
3713 (set_attr "predicable" "yes")]
3714 )
3715
3716 (define_expand "zero_extendqidi2"
3717 [(set (match_operand:DI 0 "s_register_operand" "")
3718 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3719 "TARGET_32BIT"
3720 ""
3721 )
3722
3723 (define_insn "*arm_zero_extendqidi2"
3724 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3725 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3726 "TARGET_ARM"
3727 "@
3728 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3729 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3730 [(set_attr "length" "8")
3731 (set_attr "predicable" "yes")
3732 (set_attr "type" "*,load_byte")
3733 (set_attr "pool_range" "*,4092")
3734 (set_attr "neg_pool_range" "*,4084")]
3735 )
3736
3737 (define_expand "extendsidi2"
3738 [(set (match_operand:DI 0 "s_register_operand" "")
3739 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3740 "TARGET_32BIT"
3741 ""
3742 )
3743
3744 (define_insn "*arm_extendsidi2"
3745 [(set (match_operand:DI 0 "s_register_operand" "=r")
3746 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3747 "TARGET_ARM"
3748 "*
3749 if (REGNO (operands[1])
3750 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3751 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3752 return \"mov%?\\t%R0, %Q0, asr #31\";
3753 "
3754 [(set_attr "length" "8")
3755 (set_attr "shift" "1")
3756 (set_attr "predicable" "yes")]
3757 )
3758
3759 (define_expand "zero_extendhisi2"
3760 [(set (match_dup 2)
3761 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3762 (const_int 16)))
3763 (set (match_operand:SI 0 "s_register_operand" "")
3764 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3765 "TARGET_EITHER"
3766 "
3767 {
3768 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3769 {
3770 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3771 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3772 DONE;
3773 }
3774
3775 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3776 {
3777 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3778 DONE;
3779 }
3780
3781 if (!s_register_operand (operands[1], HImode))
3782 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3783
3784 if (arm_arch6)
3785 {
3786 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3787 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3788 DONE;
3789 }
3790
3791 operands[1] = gen_lowpart (SImode, operands[1]);
3792 operands[2] = gen_reg_rtx (SImode);
3793 }"
3794 )
3795
3796 (define_insn "*thumb1_zero_extendhisi2"
3797 [(set (match_operand:SI 0 "register_operand" "=l")
3798 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3799 "TARGET_THUMB1 && !arm_arch6"
3800 "*
3801 rtx mem = XEXP (operands[1], 0);
3802
3803 if (GET_CODE (mem) == CONST)
3804 mem = XEXP (mem, 0);
3805
3806 if (GET_CODE (mem) == LABEL_REF)
3807 return \"ldr\\t%0, %1\";
3808
3809 if (GET_CODE (mem) == PLUS)
3810 {
3811 rtx a = XEXP (mem, 0);
3812 rtx b = XEXP (mem, 1);
3813
3814 /* This can happen due to bugs in reload. */
3815 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3816 {
3817 rtx ops[2];
3818 ops[0] = operands[0];
3819 ops[1] = a;
3820
3821 output_asm_insn (\"mov %0, %1\", ops);
3822
3823 XEXP (mem, 0) = operands[0];
3824 }
3825
3826 else if ( GET_CODE (a) == LABEL_REF
3827 && GET_CODE (b) == CONST_INT)
3828 return \"ldr\\t%0, %1\";
3829 }
3830
3831 return \"ldrh\\t%0, %1\";
3832 "
3833 [(set_attr "length" "4")
3834 (set_attr "type" "load_byte")
3835 (set_attr "pool_range" "60")]
3836 )
3837
3838 (define_insn "*thumb1_zero_extendhisi2_v6"
3839 [(set (match_operand:SI 0 "register_operand" "=l,l")
3840 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3841 "TARGET_THUMB1 && arm_arch6"
3842 "*
3843 rtx mem;
3844
3845 if (which_alternative == 0)
3846 return \"uxth\\t%0, %1\";
3847
3848 mem = XEXP (operands[1], 0);
3849
3850 if (GET_CODE (mem) == CONST)
3851 mem = XEXP (mem, 0);
3852
3853 if (GET_CODE (mem) == LABEL_REF)
3854 return \"ldr\\t%0, %1\";
3855
3856 if (GET_CODE (mem) == PLUS)
3857 {
3858 rtx a = XEXP (mem, 0);
3859 rtx b = XEXP (mem, 1);
3860
3861 /* This can happen due to bugs in reload. */
3862 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3863 {
3864 rtx ops[2];
3865 ops[0] = operands[0];
3866 ops[1] = a;
3867
3868 output_asm_insn (\"mov %0, %1\", ops);
3869
3870 XEXP (mem, 0) = operands[0];
3871 }
3872
3873 else if ( GET_CODE (a) == LABEL_REF
3874 && GET_CODE (b) == CONST_INT)
3875 return \"ldr\\t%0, %1\";
3876 }
3877
3878 return \"ldrh\\t%0, %1\";
3879 "
3880 [(set_attr "length" "2,4")
3881 (set_attr "type" "alu_shift,load_byte")
3882 (set_attr "pool_range" "*,60")]
3883 )
3884
3885 (define_insn "*arm_zero_extendhisi2"
3886 [(set (match_operand:SI 0 "s_register_operand" "=r")
3887 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3888 "TARGET_ARM && arm_arch4 && !arm_arch6"
3889 "ldr%(h%)\\t%0, %1"
3890 [(set_attr "type" "load_byte")
3891 (set_attr "predicable" "yes")
3892 (set_attr "pool_range" "256")
3893 (set_attr "neg_pool_range" "244")]
3894 )
3895
3896 (define_insn "*arm_zero_extendhisi2_v6"
3897 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3898 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3899 "TARGET_ARM && arm_arch6"
3900 "@
3901 uxth%?\\t%0, %1
3902 ldr%(h%)\\t%0, %1"
3903 [(set_attr "type" "alu_shift,load_byte")
3904 (set_attr "predicable" "yes")
3905 (set_attr "pool_range" "*,256")
3906 (set_attr "neg_pool_range" "*,244")]
3907 )
3908
3909 (define_insn "*arm_zero_extendhisi2addsi"
3910 [(set (match_operand:SI 0 "s_register_operand" "=r")
3911 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3912 (match_operand:SI 2 "s_register_operand" "r")))]
3913 "TARGET_INT_SIMD"
3914 "uxtah%?\\t%0, %2, %1"
3915 [(set_attr "type" "alu_shift")
3916 (set_attr "predicable" "yes")]
3917 )
3918
3919 (define_expand "zero_extendqisi2"
3920 [(set (match_operand:SI 0 "s_register_operand" "")
3921 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3922 "TARGET_EITHER"
3923 "
3924 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3925 {
3926 if (TARGET_ARM)
3927 {
3928 emit_insn (gen_andsi3 (operands[0],
3929 gen_lowpart (SImode, operands[1]),
3930 GEN_INT (255)));
3931 }
3932 else /* TARGET_THUMB */
3933 {
3934 rtx temp = gen_reg_rtx (SImode);
3935 rtx ops[3];
3936
3937 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3938 operands[1] = gen_lowpart (SImode, operands[1]);
3939
3940 ops[0] = temp;
3941 ops[1] = operands[1];
3942 ops[2] = GEN_INT (24);
3943
3944 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3945 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3946
3947 ops[0] = operands[0];
3948 ops[1] = temp;
3949 ops[2] = GEN_INT (24);
3950
3951 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3952 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3953 }
3954 DONE;
3955 }
3956 "
3957 )
3958
3959 (define_insn "*thumb1_zero_extendqisi2"
3960 [(set (match_operand:SI 0 "register_operand" "=l")
3961 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3962 "TARGET_THUMB1 && !arm_arch6"
3963 "ldrb\\t%0, %1"
3964 [(set_attr "length" "2")
3965 (set_attr "type" "load_byte")
3966 (set_attr "pool_range" "32")]
3967 )
3968
3969 (define_insn "*thumb1_zero_extendqisi2_v6"
3970 [(set (match_operand:SI 0 "register_operand" "=l,l")
3971 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3972 "TARGET_THUMB1 && arm_arch6"
3973 "@
3974 uxtb\\t%0, %1
3975 ldrb\\t%0, %1"
3976 [(set_attr "length" "2,2")
3977 (set_attr "type" "alu_shift,load_byte")
3978 (set_attr "pool_range" "*,32")]
3979 )
3980
3981 (define_insn "*arm_zero_extendqisi2"
3982 [(set (match_operand:SI 0 "s_register_operand" "=r")
3983 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3984 "TARGET_ARM && !arm_arch6"
3985 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3986 [(set_attr "type" "load_byte")
3987 (set_attr "predicable" "yes")
3988 (set_attr "pool_range" "4096")
3989 (set_attr "neg_pool_range" "4084")]
3990 )
3991
3992 (define_insn "*arm_zero_extendqisi2_v6"
3993 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3994 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3995 "TARGET_ARM && arm_arch6"
3996 "@
3997 uxtb%(%)\\t%0, %1
3998 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3999 [(set_attr "type" "alu_shift,load_byte")
4000 (set_attr "predicable" "yes")
4001 (set_attr "pool_range" "*,4096")
4002 (set_attr "neg_pool_range" "*,4084")]
4003 )
4004
4005 (define_insn "*arm_zero_extendqisi2addsi"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r")
4007 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4008 (match_operand:SI 2 "s_register_operand" "r")))]
4009 "TARGET_INT_SIMD"
4010 "uxtab%?\\t%0, %2, %1"
4011 [(set_attr "predicable" "yes")
4012 (set_attr "insn" "xtab")
4013 (set_attr "type" "alu_shift")]
4014 )
4015
4016 (define_split
4017 [(set (match_operand:SI 0 "s_register_operand" "")
4018 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4019 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4020 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4021 [(set (match_dup 2) (match_dup 1))
4022 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4023 ""
4024 )
4025
4026 (define_split
4027 [(set (match_operand:SI 0 "s_register_operand" "")
4028 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4029 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4030 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4031 [(set (match_dup 2) (match_dup 1))
4032 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4033 ""
4034 )
4035
4036 (define_insn "*compareqi_eq0"
4037 [(set (reg:CC_Z CC_REGNUM)
4038 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4039 (const_int 0)))]
4040 "TARGET_32BIT"
4041 "tst\\t%0, #255"
4042 [(set_attr "conds" "set")]
4043 )
4044
4045 (define_expand "extendhisi2"
4046 [(set (match_dup 2)
4047 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4048 (const_int 16)))
4049 (set (match_operand:SI 0 "s_register_operand" "")
4050 (ashiftrt:SI (match_dup 2)
4051 (const_int 16)))]
4052 "TARGET_EITHER"
4053 "
4054 {
4055 if (GET_CODE (operands[1]) == MEM)
4056 {
4057 if (TARGET_THUMB1)
4058 {
4059 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4060 DONE;
4061 }
4062 else if (arm_arch4)
4063 {
4064 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4065 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4066 DONE;
4067 }
4068 }
4069
4070 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4071 {
4072 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4073 DONE;
4074 }
4075
4076 if (!s_register_operand (operands[1], HImode))
4077 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4078
4079 if (arm_arch6)
4080 {
4081 if (TARGET_THUMB1)
4082 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4083 else
4084 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4085 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4086
4087 DONE;
4088 }
4089
4090 operands[1] = gen_lowpart (SImode, operands[1]);
4091 operands[2] = gen_reg_rtx (SImode);
4092 }"
4093 )
4094
4095 (define_insn "thumb1_extendhisi2"
4096 [(set (match_operand:SI 0 "register_operand" "=l")
4097 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4098 (clobber (match_scratch:SI 2 "=&l"))]
4099 "TARGET_THUMB1 && !arm_arch6"
4100 "*
4101 {
4102 rtx ops[4];
4103 rtx mem = XEXP (operands[1], 0);
4104
4105 /* This code used to try to use 'V', and fix the address only if it was
4106 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4107 range of QImode offsets, and offsettable_address_p does a QImode
4108 address check. */
4109
4110 if (GET_CODE (mem) == CONST)
4111 mem = XEXP (mem, 0);
4112
4113 if (GET_CODE (mem) == LABEL_REF)
4114 return \"ldr\\t%0, %1\";
4115
4116 if (GET_CODE (mem) == PLUS)
4117 {
4118 rtx a = XEXP (mem, 0);
4119 rtx b = XEXP (mem, 1);
4120
4121 if (GET_CODE (a) == LABEL_REF
4122 && GET_CODE (b) == CONST_INT)
4123 return \"ldr\\t%0, %1\";
4124
4125 if (GET_CODE (b) == REG)
4126 return \"ldrsh\\t%0, %1\";
4127
4128 ops[1] = a;
4129 ops[2] = b;
4130 }
4131 else
4132 {
4133 ops[1] = mem;
4134 ops[2] = const0_rtx;
4135 }
4136
4137 gcc_assert (GET_CODE (ops[1]) == REG);
4138
4139 ops[0] = operands[0];
4140 ops[3] = operands[2];
4141 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4142 return \"\";
4143 }"
4144 [(set_attr "length" "4")
4145 (set_attr "type" "load_byte")
4146 (set_attr "pool_range" "1020")]
4147 )
4148
4149 ;; We used to have an early-clobber on the scratch register here.
4150 ;; However, there's a bug somewhere in reload which means that this
4151 ;; can be partially ignored during spill allocation if the memory
4152 ;; address also needs reloading; this causes us to die later on when
4153 ;; we try to verify the operands. Fortunately, we don't really need
4154 ;; the early-clobber: we can always use operand 0 if operand 2
4155 ;; overlaps the address.
4156 (define_insn "*thumb1_extendhisi2_insn_v6"
4157 [(set (match_operand:SI 0 "register_operand" "=l,l")
4158 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4159 (clobber (match_scratch:SI 2 "=X,l"))]
4160 "TARGET_THUMB1 && arm_arch6"
4161 "*
4162 {
4163 rtx ops[4];
4164 rtx mem;
4165
4166 if (which_alternative == 0)
4167 return \"sxth\\t%0, %1\";
4168
4169 mem = XEXP (operands[1], 0);
4170
4171 /* This code used to try to use 'V', and fix the address only if it was
4172 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4173 range of QImode offsets, and offsettable_address_p does a QImode
4174 address check. */
4175
4176 if (GET_CODE (mem) == CONST)
4177 mem = XEXP (mem, 0);
4178
4179 if (GET_CODE (mem) == LABEL_REF)
4180 return \"ldr\\t%0, %1\";
4181
4182 if (GET_CODE (mem) == PLUS)
4183 {
4184 rtx a = XEXP (mem, 0);
4185 rtx b = XEXP (mem, 1);
4186
4187 if (GET_CODE (a) == LABEL_REF
4188 && GET_CODE (b) == CONST_INT)
4189 return \"ldr\\t%0, %1\";
4190
4191 if (GET_CODE (b) == REG)
4192 return \"ldrsh\\t%0, %1\";
4193
4194 ops[1] = a;
4195 ops[2] = b;
4196 }
4197 else
4198 {
4199 ops[1] = mem;
4200 ops[2] = const0_rtx;
4201 }
4202
4203 gcc_assert (GET_CODE (ops[1]) == REG);
4204
4205 ops[0] = operands[0];
4206 if (reg_mentioned_p (operands[2], ops[1]))
4207 ops[3] = ops[0];
4208 else
4209 ops[3] = operands[2];
4210 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4211 return \"\";
4212 }"
4213 [(set_attr "length" "2,4")
4214 (set_attr "type" "alu_shift,load_byte")
4215 (set_attr "pool_range" "*,1020")]
4216 )
4217
4218 ;; This pattern will only be used when ldsh is not available
4219 (define_expand "extendhisi2_mem"
4220 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4221 (set (match_dup 3)
4222 (zero_extend:SI (match_dup 7)))
4223 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4224 (set (match_operand:SI 0 "" "")
4225 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4226 "TARGET_ARM"
4227 "
4228 {
4229 rtx mem1, mem2;
4230 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4231
4232 mem1 = change_address (operands[1], QImode, addr);
4233 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4234 operands[0] = gen_lowpart (SImode, operands[0]);
4235 operands[1] = mem1;
4236 operands[2] = gen_reg_rtx (SImode);
4237 operands[3] = gen_reg_rtx (SImode);
4238 operands[6] = gen_reg_rtx (SImode);
4239 operands[7] = mem2;
4240
4241 if (BYTES_BIG_ENDIAN)
4242 {
4243 operands[4] = operands[2];
4244 operands[5] = operands[3];
4245 }
4246 else
4247 {
4248 operands[4] = operands[3];
4249 operands[5] = operands[2];
4250 }
4251 }"
4252 )
4253
4254 (define_insn "*arm_extendhisi2"
4255 [(set (match_operand:SI 0 "s_register_operand" "=r")
4256 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4257 "TARGET_ARM && arm_arch4 && !arm_arch6"
4258 "ldr%(sh%)\\t%0, %1"
4259 [(set_attr "type" "load_byte")
4260 (set_attr "predicable" "yes")
4261 (set_attr "pool_range" "256")
4262 (set_attr "neg_pool_range" "244")]
4263 )
4264
4265 ;; ??? Check Thumb-2 pool range
4266 (define_insn "*arm_extendhisi2_v6"
4267 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4268 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4269 "TARGET_32BIT && arm_arch6"
4270 "@
4271 sxth%?\\t%0, %1
4272 ldr%(sh%)\\t%0, %1"
4273 [(set_attr "type" "alu_shift,load_byte")
4274 (set_attr "predicable" "yes")
4275 (set_attr "pool_range" "*,256")
4276 (set_attr "neg_pool_range" "*,244")]
4277 )
4278
4279 (define_insn "*arm_extendhisi2addsi"
4280 [(set (match_operand:SI 0 "s_register_operand" "=r")
4281 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4282 (match_operand:SI 2 "s_register_operand" "r")))]
4283 "TARGET_INT_SIMD"
4284 "sxtah%?\\t%0, %2, %1"
4285 )
4286
4287 (define_expand "extendqihi2"
4288 [(set (match_dup 2)
4289 (ashift:SI (match_operand:QI 1 "general_operand" "")
4290 (const_int 24)))
4291 (set (match_operand:HI 0 "s_register_operand" "")
4292 (ashiftrt:SI (match_dup 2)
4293 (const_int 24)))]
4294 "TARGET_ARM"
4295 "
4296 {
4297 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4298 {
4299 emit_insn (gen_rtx_SET (VOIDmode,
4300 operands[0],
4301 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4302 DONE;
4303 }
4304 if (!s_register_operand (operands[1], QImode))
4305 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4306 operands[0] = gen_lowpart (SImode, operands[0]);
4307 operands[1] = gen_lowpart (SImode, operands[1]);
4308 operands[2] = gen_reg_rtx (SImode);
4309 }"
4310 )
4311
4312 (define_insn "*arm_extendqihi_insn"
4313 [(set (match_operand:HI 0 "s_register_operand" "=r")
4314 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4315 "TARGET_ARM && arm_arch4"
4316 "ldr%(sb%)\\t%0, %1"
4317 [(set_attr "type" "load_byte")
4318 (set_attr "predicable" "yes")
4319 (set_attr "pool_range" "256")
4320 (set_attr "neg_pool_range" "244")]
4321 )
4322
4323 (define_expand "extendqisi2"
4324 [(set (match_dup 2)
4325 (ashift:SI (match_operand:QI 1 "general_operand" "")
4326 (const_int 24)))
4327 (set (match_operand:SI 0 "s_register_operand" "")
4328 (ashiftrt:SI (match_dup 2)
4329 (const_int 24)))]
4330 "TARGET_EITHER"
4331 "
4332 {
4333 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4334 {
4335 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4336 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4337 DONE;
4338 }
4339
4340 if (!s_register_operand (operands[1], QImode))
4341 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4342
4343 if (arm_arch6)
4344 {
4345 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4346 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4347 DONE;
4348 }
4349
4350 operands[1] = gen_lowpart (SImode, operands[1]);
4351 operands[2] = gen_reg_rtx (SImode);
4352 }"
4353 )
4354
4355 (define_insn "*arm_extendqisi"
4356 [(set (match_operand:SI 0 "s_register_operand" "=r")
4357 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4358 "TARGET_ARM && arm_arch4 && !arm_arch6"
4359 "ldr%(sb%)\\t%0, %1"
4360 [(set_attr "type" "load_byte")
4361 (set_attr "predicable" "yes")
4362 (set_attr "pool_range" "256")
4363 (set_attr "neg_pool_range" "244")]
4364 )
4365
4366 (define_insn "*arm_extendqisi_v6"
4367 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4368 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4369 "TARGET_ARM && arm_arch6"
4370 "@
4371 sxtb%?\\t%0, %1
4372 ldr%(sb%)\\t%0, %1"
4373 [(set_attr "type" "alu_shift,load_byte")
4374 (set_attr "predicable" "yes")
4375 (set_attr "pool_range" "*,256")
4376 (set_attr "neg_pool_range" "*,244")]
4377 )
4378
4379 (define_insn "*arm_extendqisi2addsi"
4380 [(set (match_operand:SI 0 "s_register_operand" "=r")
4381 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4382 (match_operand:SI 2 "s_register_operand" "r")))]
4383 "TARGET_INT_SIMD"
4384 "sxtab%?\\t%0, %2, %1"
4385 [(set_attr "type" "alu_shift")
4386 (set_attr "insn" "xtab")
4387 (set_attr "predicable" "yes")]
4388 )
4389
4390 (define_insn "*thumb1_extendqisi2"
4391 [(set (match_operand:SI 0 "register_operand" "=l,l")
4392 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4393 "TARGET_THUMB1 && !arm_arch6"
4394 "*
4395 {
4396 rtx ops[3];
4397 rtx mem = XEXP (operands[1], 0);
4398
4399 if (GET_CODE (mem) == CONST)
4400 mem = XEXP (mem, 0);
4401
4402 if (GET_CODE (mem) == LABEL_REF)
4403 return \"ldr\\t%0, %1\";
4404
4405 if (GET_CODE (mem) == PLUS
4406 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4407 return \"ldr\\t%0, %1\";
4408
4409 if (which_alternative == 0)
4410 return \"ldrsb\\t%0, %1\";
4411
4412 ops[0] = operands[0];
4413
4414 if (GET_CODE (mem) == PLUS)
4415 {
4416 rtx a = XEXP (mem, 0);
4417 rtx b = XEXP (mem, 1);
4418
4419 ops[1] = a;
4420 ops[2] = b;
4421
4422 if (GET_CODE (a) == REG)
4423 {
4424 if (GET_CODE (b) == REG)
4425 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4426 else if (REGNO (a) == REGNO (ops[0]))
4427 {
4428 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4429 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4430 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4431 }
4432 else
4433 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4434 }
4435 else
4436 {
4437 gcc_assert (GET_CODE (b) == REG);
4438 if (REGNO (b) == REGNO (ops[0]))
4439 {
4440 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4441 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4442 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4443 }
4444 else
4445 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4446 }
4447 }
4448 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4449 {
4450 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4451 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4452 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4453 }
4454 else
4455 {
4456 ops[1] = mem;
4457 ops[2] = const0_rtx;
4458
4459 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4460 }
4461 return \"\";
4462 }"
4463 [(set_attr "length" "2,6")
4464 (set_attr "type" "load_byte,load_byte")
4465 (set_attr "pool_range" "32,32")]
4466 )
4467
4468 (define_insn "*thumb1_extendqisi2_v6"
4469 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4470 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4471 "TARGET_THUMB1 && arm_arch6"
4472 "*
4473 {
4474 rtx ops[3];
4475 rtx mem;
4476
4477 if (which_alternative == 0)
4478 return \"sxtb\\t%0, %1\";
4479
4480 mem = XEXP (operands[1], 0);
4481
4482 if (GET_CODE (mem) == CONST)
4483 mem = XEXP (mem, 0);
4484
4485 if (GET_CODE (mem) == LABEL_REF)
4486 return \"ldr\\t%0, %1\";
4487
4488 if (GET_CODE (mem) == PLUS
4489 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4490 return \"ldr\\t%0, %1\";
4491
4492 if (which_alternative == 0)
4493 return \"ldrsb\\t%0, %1\";
4494
4495 ops[0] = operands[0];
4496
4497 if (GET_CODE (mem) == PLUS)
4498 {
4499 rtx a = XEXP (mem, 0);
4500 rtx b = XEXP (mem, 1);
4501
4502 ops[1] = a;
4503 ops[2] = b;
4504
4505 if (GET_CODE (a) == REG)
4506 {
4507 if (GET_CODE (b) == REG)
4508 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4509 else if (REGNO (a) == REGNO (ops[0]))
4510 {
4511 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4512 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4513 }
4514 else
4515 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4516 }
4517 else
4518 {
4519 gcc_assert (GET_CODE (b) == REG);
4520 if (REGNO (b) == REGNO (ops[0]))
4521 {
4522 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4523 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4524 }
4525 else
4526 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4527 }
4528 }
4529 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4530 {
4531 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4532 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4533 }
4534 else
4535 {
4536 ops[1] = mem;
4537 ops[2] = const0_rtx;
4538
4539 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4540 }
4541 return \"\";
4542 }"
4543 [(set_attr "length" "2,2,4")
4544 (set_attr "type" "alu_shift,load_byte,load_byte")
4545 (set_attr "pool_range" "*,32,32")]
4546 )
4547
4548 (define_expand "extendsfdf2"
4549 [(set (match_operand:DF 0 "s_register_operand" "")
4550 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4551 "TARGET_32BIT && TARGET_HARD_FLOAT"
4552 ""
4553 )
4554 \f
4555 ;; Move insns (including loads and stores)
4556
4557 ;; XXX Just some ideas about movti.
4558 ;; I don't think these are a good idea on the arm, there just aren't enough
4559 ;; registers
4560 ;;(define_expand "loadti"
4561 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4562 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4563 ;; "" "")
4564
4565 ;;(define_expand "storeti"
4566 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4567 ;; (match_operand:TI 1 "s_register_operand" ""))]
4568 ;; "" "")
4569
4570 ;;(define_expand "movti"
4571 ;; [(set (match_operand:TI 0 "general_operand" "")
4572 ;; (match_operand:TI 1 "general_operand" ""))]
4573 ;; ""
4574 ;; "
4575 ;;{
4576 ;; rtx insn;
4577 ;;
4578 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4579 ;; operands[1] = copy_to_reg (operands[1]);
4580 ;; if (GET_CODE (operands[0]) == MEM)
4581 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4582 ;; else if (GET_CODE (operands[1]) == MEM)
4583 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4584 ;; else
4585 ;; FAIL;
4586 ;;
4587 ;; emit_insn (insn);
4588 ;; DONE;
4589 ;;}")
4590
4591 ;; Recognize garbage generated above.
4592
4593 ;;(define_insn ""
4594 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4595 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4596 ;; ""
4597 ;; "*
4598 ;; {
4599 ;; register mem = (which_alternative < 3);
4600 ;; register const char *template;
4601 ;;
4602 ;; operands[mem] = XEXP (operands[mem], 0);
4603 ;; switch (which_alternative)
4604 ;; {
4605 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4606 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4607 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4608 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4609 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4610 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4611 ;; }
4612 ;; output_asm_insn (template, operands);
4613 ;; return \"\";
4614 ;; }")
4615
4616 (define_expand "movdi"
4617 [(set (match_operand:DI 0 "general_operand" "")
4618 (match_operand:DI 1 "general_operand" ""))]
4619 "TARGET_EITHER"
4620 "
4621 if (can_create_pseudo_p ())
4622 {
4623 if (GET_CODE (operands[0]) != REG)
4624 operands[1] = force_reg (DImode, operands[1]);
4625 }
4626 "
4627 )
4628
4629 (define_insn "*arm_movdi"
4630 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4631 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4632 "TARGET_ARM
4633 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4634 && !TARGET_IWMMXT
4635 && ( register_operand (operands[0], DImode)
4636 || register_operand (operands[1], DImode))"
4637 "*
4638 switch (which_alternative)
4639 {
4640 case 0:
4641 case 1:
4642 case 2:
4643 return \"#\";
4644 default:
4645 return output_move_double (operands);
4646 }
4647 "
4648 [(set_attr "length" "8,12,16,8,8")
4649 (set_attr "type" "*,*,*,load2,store2")
4650 (set_attr "pool_range" "*,*,*,1020,*")
4651 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4652 )
4653
4654 (define_split
4655 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4656 (match_operand:ANY64 1 "const_double_operand" ""))]
4657 "TARGET_32BIT
4658 && reload_completed
4659 && (arm_const_double_inline_cost (operands[1])
4660 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4661 [(const_int 0)]
4662 "
4663 arm_split_constant (SET, SImode, curr_insn,
4664 INTVAL (gen_lowpart (SImode, operands[1])),
4665 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4666 arm_split_constant (SET, SImode, curr_insn,
4667 INTVAL (gen_highpart_mode (SImode,
4668 GET_MODE (operands[0]),
4669 operands[1])),
4670 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4671 DONE;
4672 "
4673 )
4674
4675 ; If optimizing for size, or if we have load delay slots, then
4676 ; we want to split the constant into two separate operations.
4677 ; In both cases this may split a trivial part into a single data op
4678 ; leaving a single complex constant to load. We can also get longer
4679 ; offsets in a LDR which means we get better chances of sharing the pool
4680 ; entries. Finally, we can normally do a better job of scheduling
4681 ; LDR instructions than we can with LDM.
4682 ; This pattern will only match if the one above did not.
4683 (define_split
4684 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4685 (match_operand:ANY64 1 "const_double_operand" ""))]
4686 "TARGET_ARM && reload_completed
4687 && arm_const_double_by_parts (operands[1])"
4688 [(set (match_dup 0) (match_dup 1))
4689 (set (match_dup 2) (match_dup 3))]
4690 "
4691 operands[2] = gen_highpart (SImode, operands[0]);
4692 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4693 operands[1]);
4694 operands[0] = gen_lowpart (SImode, operands[0]);
4695 operands[1] = gen_lowpart (SImode, operands[1]);
4696 "
4697 )
4698
4699 (define_split
4700 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4701 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4702 "TARGET_EITHER && reload_completed"
4703 [(set (match_dup 0) (match_dup 1))
4704 (set (match_dup 2) (match_dup 3))]
4705 "
4706 operands[2] = gen_highpart (SImode, operands[0]);
4707 operands[3] = gen_highpart (SImode, operands[1]);
4708 operands[0] = gen_lowpart (SImode, operands[0]);
4709 operands[1] = gen_lowpart (SImode, operands[1]);
4710
4711 /* Handle a partial overlap. */
4712 if (rtx_equal_p (operands[0], operands[3]))
4713 {
4714 rtx tmp0 = operands[0];
4715 rtx tmp1 = operands[1];
4716
4717 operands[0] = operands[2];
4718 operands[1] = operands[3];
4719 operands[2] = tmp0;
4720 operands[3] = tmp1;
4721 }
4722 "
4723 )
4724
4725 ;; We can't actually do base+index doubleword loads if the index and
4726 ;; destination overlap. Split here so that we at least have chance to
4727 ;; schedule.
4728 (define_split
4729 [(set (match_operand:DI 0 "s_register_operand" "")
4730 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4731 (match_operand:SI 2 "s_register_operand" ""))))]
4732 "TARGET_LDRD
4733 && reg_overlap_mentioned_p (operands[0], operands[1])
4734 && reg_overlap_mentioned_p (operands[0], operands[2])"
4735 [(set (match_dup 4)
4736 (plus:SI (match_dup 1)
4737 (match_dup 2)))
4738 (set (match_dup 0)
4739 (mem:DI (match_dup 4)))]
4740 "
4741 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4742 "
4743 )
4744
4745 ;;; ??? This should have alternatives for constants.
4746 ;;; ??? This was originally identical to the movdf_insn pattern.
4747 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4748 ;;; thumb_reorg with a memory reference.
4749 (define_insn "*thumb1_movdi_insn"
4750 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4751 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4752 "TARGET_THUMB1
4753 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4754 && ( register_operand (operands[0], DImode)
4755 || register_operand (operands[1], DImode))"
4756 "*
4757 {
4758 switch (which_alternative)
4759 {
4760 default:
4761 case 0:
4762 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4763 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4764 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4765 case 1:
4766 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4767 case 2:
4768 operands[1] = GEN_INT (- INTVAL (operands[1]));
4769 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4770 case 3:
4771 return \"ldmia\\t%1, {%0, %H0}\";
4772 case 4:
4773 return \"stmia\\t%0, {%1, %H1}\";
4774 case 5:
4775 return thumb_load_double_from_address (operands);
4776 case 6:
4777 operands[2] = gen_rtx_MEM (SImode,
4778 plus_constant (XEXP (operands[0], 0), 4));
4779 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4780 return \"\";
4781 case 7:
4782 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4783 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4784 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4785 }
4786 }"
4787 [(set_attr "length" "4,4,6,2,2,6,4,4")
4788 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4789 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4790 )
4791
4792 (define_expand "movsi"
4793 [(set (match_operand:SI 0 "general_operand" "")
4794 (match_operand:SI 1 "general_operand" ""))]
4795 "TARGET_EITHER"
4796 "
4797 {
4798 rtx base, offset, tmp;
4799
4800 if (TARGET_32BIT)
4801 {
4802 /* Everything except mem = const or mem = mem can be done easily. */
4803 if (GET_CODE (operands[0]) == MEM)
4804 operands[1] = force_reg (SImode, operands[1]);
4805 if (arm_general_register_operand (operands[0], SImode)
4806 && GET_CODE (operands[1]) == CONST_INT
4807 && !(const_ok_for_arm (INTVAL (operands[1]))
4808 || const_ok_for_arm (~INTVAL (operands[1]))))
4809 {
4810 arm_split_constant (SET, SImode, NULL_RTX,
4811 INTVAL (operands[1]), operands[0], NULL_RTX,
4812 optimize && can_create_pseudo_p ());
4813 DONE;
4814 }
4815 }
4816 else /* TARGET_THUMB1... */
4817 {
4818 if (can_create_pseudo_p ())
4819 {
4820 if (GET_CODE (operands[0]) != REG)
4821 operands[1] = force_reg (SImode, operands[1]);
4822 }
4823 }
4824
4825 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4826 {
4827 split_const (operands[1], &base, &offset);
4828 if (GET_CODE (base) == SYMBOL_REF
4829 && !offset_within_block_p (base, INTVAL (offset)))
4830 {
4831 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4832 emit_move_insn (tmp, base);
4833 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4834 DONE;
4835 }
4836 }
4837
4838 /* Recognize the case where operand[1] is a reference to thread-local
4839 data and load its address to a register. */
4840 if (arm_tls_referenced_p (operands[1]))
4841 {
4842 rtx tmp = operands[1];
4843 rtx addend = NULL;
4844
4845 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4846 {
4847 addend = XEXP (XEXP (tmp, 0), 1);
4848 tmp = XEXP (XEXP (tmp, 0), 0);
4849 }
4850
4851 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4852 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4853
4854 tmp = legitimize_tls_address (tmp,
4855 !can_create_pseudo_p () ? operands[0] : 0);
4856 if (addend)
4857 {
4858 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4859 tmp = force_operand (tmp, operands[0]);
4860 }
4861 operands[1] = tmp;
4862 }
4863 else if (flag_pic
4864 && (CONSTANT_P (operands[1])
4865 || symbol_mentioned_p (operands[1])
4866 || label_mentioned_p (operands[1])))
4867 operands[1] = legitimize_pic_address (operands[1], SImode,
4868 (!can_create_pseudo_p ()
4869 ? operands[0]
4870 : 0));
4871 }
4872 "
4873 )
4874
4875 (define_insn "*arm_movsi_insn"
4876 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4877 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4878 "TARGET_ARM && ! TARGET_IWMMXT
4879 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4880 && ( register_operand (operands[0], SImode)
4881 || register_operand (operands[1], SImode))"
4882 "@
4883 mov%?\\t%0, %1
4884 mov%?\\t%0, %1
4885 mvn%?\\t%0, #%B1
4886 movw%?\\t%0, %1
4887 ldr%?\\t%0, %1
4888 str%?\\t%1, %0"
4889 [(set_attr "type" "*,*,*,*,load1,store1")
4890 (set_attr "predicable" "yes")
4891 (set_attr "pool_range" "*,*,*,*,4096,*")
4892 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
4893 )
4894
4895 (define_split
4896 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4897 (match_operand:SI 1 "const_int_operand" ""))]
4898 "TARGET_32BIT
4899 && (!(const_ok_for_arm (INTVAL (operands[1]))
4900 || const_ok_for_arm (~INTVAL (operands[1]))))"
4901 [(clobber (const_int 0))]
4902 "
4903 arm_split_constant (SET, SImode, NULL_RTX,
4904 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4905 DONE;
4906 "
4907 )
4908
4909 (define_insn "*thumb1_movsi_insn"
4910 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
4911 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
4912 "TARGET_THUMB1
4913 && ( register_operand (operands[0], SImode)
4914 || register_operand (operands[1], SImode))"
4915 "@
4916 mov %0, %1
4917 mov %0, %1
4918 #
4919 #
4920 ldmia\\t%1, {%0}
4921 stmia\\t%0, {%1}
4922 ldr\\t%0, %1
4923 str\\t%1, %0
4924 mov\\t%0, %1"
4925 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4926 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4927 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4928 )
4929
4930 (define_split
4931 [(set (match_operand:SI 0 "register_operand" "")
4932 (match_operand:SI 1 "const_int_operand" ""))]
4933 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4934 [(set (match_dup 0) (match_dup 1))
4935 (set (match_dup 0) (neg:SI (match_dup 0)))]
4936 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4937 )
4938
4939 (define_split
4940 [(set (match_operand:SI 0 "register_operand" "")
4941 (match_operand:SI 1 "const_int_operand" ""))]
4942 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4943 [(set (match_dup 0) (match_dup 1))
4944 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4945 "
4946 {
4947 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4948 unsigned HOST_WIDE_INT mask = 0xff;
4949 int i;
4950
4951 for (i = 0; i < 25; i++)
4952 if ((val & (mask << i)) == val)
4953 break;
4954
4955 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4956 if (i == 0)
4957 FAIL;
4958
4959 operands[1] = GEN_INT (val >> i);
4960 operands[2] = GEN_INT (i);
4961 }"
4962 )
4963
4964 ;; When generating pic, we need to load the symbol offset into a register.
4965 ;; So that the optimizer does not confuse this with a normal symbol load
4966 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4967 ;; since that is the only type of relocation we can use.
4968
4969 ;; The rather odd constraints on the following are to force reload to leave
4970 ;; the insn alone, and to force the minipool generation pass to then move
4971 ;; the GOT symbol to memory.
4972
4973 (define_insn "pic_load_addr_arm"
4974 [(set (match_operand:SI 0 "s_register_operand" "=r")
4975 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4976 "TARGET_ARM && flag_pic"
4977 "ldr%?\\t%0, %1"
4978 [(set_attr "type" "load1")
4979 (set (attr "pool_range") (const_int 4096))
4980 (set (attr "neg_pool_range") (const_int 4084))]
4981 )
4982
4983 (define_insn "pic_load_addr_thumb1"
4984 [(set (match_operand:SI 0 "s_register_operand" "=l")
4985 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4986 "TARGET_THUMB1 && flag_pic"
4987 "ldr\\t%0, %1"
4988 [(set_attr "type" "load1")
4989 (set (attr "pool_range") (const_int 1024))]
4990 )
4991
4992 (define_insn "pic_add_dot_plus_four"
4993 [(set (match_operand:SI 0 "register_operand" "=r")
4994 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4995 (const (plus:SI (pc) (const_int 4))))
4996 (match_operand 2 "" "")]
4997 UNSPEC_PIC_BASE))]
4998 "TARGET_THUMB1"
4999 "*
5000 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5001 INTVAL (operands[2]));
5002 return \"add\\t%0, %|pc\";
5003 "
5004 [(set_attr "length" "2")]
5005 )
5006
5007 (define_insn "pic_add_dot_plus_eight"
5008 [(set (match_operand:SI 0 "register_operand" "=r")
5009 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5010 (const (plus:SI (pc) (const_int 8))))
5011 (match_operand 2 "" "")]
5012 UNSPEC_PIC_BASE))]
5013 "TARGET_ARM"
5014 "*
5015 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5016 INTVAL (operands[2]));
5017 return \"add%?\\t%0, %|pc, %1\";
5018 "
5019 [(set_attr "predicable" "yes")]
5020 )
5021
5022 (define_insn "tls_load_dot_plus_eight"
5023 [(set (match_operand:SI 0 "register_operand" "+r")
5024 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5025 (const (plus:SI (pc) (const_int 8))))
5026 (match_operand 2 "" "")]
5027 UNSPEC_PIC_BASE)))]
5028 "TARGET_ARM"
5029 "*
5030 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5031 INTVAL (operands[2]));
5032 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5033 "
5034 [(set_attr "predicable" "yes")]
5035 )
5036
5037 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5038 ;; followed by a load. These sequences can be crunched down to
5039 ;; tls_load_dot_plus_eight by a peephole.
5040
5041 (define_peephole2
5042 [(parallel [(set (match_operand:SI 0 "register_operand" "")
5043 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
5044 (const (plus:SI (pc) (const_int 8))))]
5045 UNSPEC_PIC_BASE))
5046 (use (label_ref (match_operand 1 "" "")))])
5047 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5048 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5049 [(parallel [(set (match_dup 2)
5050 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
5051 (const (plus:SI (pc) (const_int 8))))]
5052 UNSPEC_PIC_BASE)))
5053 (use (label_ref (match_dup 1)))])]
5054 ""
5055 )
5056
5057 (define_insn "pic_offset_arm"
5058 [(set (match_operand:SI 0 "register_operand" "=r")
5059 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5060 (unspec:SI [(match_operand:SI 2 "" "X")]
5061 UNSPEC_PIC_OFFSET))))]
5062 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5063 "ldr%?\\t%0, [%1,%2]"
5064 [(set_attr "type" "load1")]
5065 )
5066
5067 (define_expand "builtin_setjmp_receiver"
5068 [(label_ref (match_operand 0 "" ""))]
5069 "flag_pic"
5070 "
5071 {
5072 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5073 register. */
5074 if (arm_pic_register != INVALID_REGNUM)
5075 arm_load_pic_register (1UL << 3);
5076 DONE;
5077 }")
5078
5079 ;; If copying one reg to another we can set the condition codes according to
5080 ;; its value. Such a move is common after a return from subroutine and the
5081 ;; result is being tested against zero.
5082
5083 (define_insn "*movsi_compare0"
5084 [(set (reg:CC CC_REGNUM)
5085 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5086 (const_int 0)))
5087 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5088 (match_dup 1))]
5089 "TARGET_32BIT"
5090 "@
5091 cmp%?\\t%0, #0
5092 sub%.\\t%0, %1, #0"
5093 [(set_attr "conds" "set")]
5094 )
5095
5096 ;; Subroutine to store a half word from a register into memory.
5097 ;; Operand 0 is the source register (HImode)
5098 ;; Operand 1 is the destination address in a register (SImode)
5099
5100 ;; In both this routine and the next, we must be careful not to spill
5101 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5102 ;; can generate unrecognizable rtl.
5103
5104 (define_expand "storehi"
5105 [;; store the low byte
5106 (set (match_operand 1 "" "") (match_dup 3))
5107 ;; extract the high byte
5108 (set (match_dup 2)
5109 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5110 ;; store the high byte
5111 (set (match_dup 4) (match_dup 5))]
5112 "TARGET_ARM"
5113 "
5114 {
5115 rtx op1 = operands[1];
5116 rtx addr = XEXP (op1, 0);
5117 enum rtx_code code = GET_CODE (addr);
5118
5119 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5120 || code == MINUS)
5121 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5122
5123 operands[4] = adjust_address (op1, QImode, 1);
5124 operands[1] = adjust_address (operands[1], QImode, 0);
5125 operands[3] = gen_lowpart (QImode, operands[0]);
5126 operands[0] = gen_lowpart (SImode, operands[0]);
5127 operands[2] = gen_reg_rtx (SImode);
5128 operands[5] = gen_lowpart (QImode, operands[2]);
5129 }"
5130 )
5131
5132 (define_expand "storehi_bigend"
5133 [(set (match_dup 4) (match_dup 3))
5134 (set (match_dup 2)
5135 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5136 (set (match_operand 1 "" "") (match_dup 5))]
5137 "TARGET_ARM"
5138 "
5139 {
5140 rtx op1 = operands[1];
5141 rtx addr = XEXP (op1, 0);
5142 enum rtx_code code = GET_CODE (addr);
5143
5144 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5145 || code == MINUS)
5146 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5147
5148 operands[4] = adjust_address (op1, QImode, 1);
5149 operands[1] = adjust_address (operands[1], QImode, 0);
5150 operands[3] = gen_lowpart (QImode, operands[0]);
5151 operands[0] = gen_lowpart (SImode, operands[0]);
5152 operands[2] = gen_reg_rtx (SImode);
5153 operands[5] = gen_lowpart (QImode, operands[2]);
5154 }"
5155 )
5156
5157 ;; Subroutine to store a half word integer constant into memory.
5158 (define_expand "storeinthi"
5159 [(set (match_operand 0 "" "")
5160 (match_operand 1 "" ""))
5161 (set (match_dup 3) (match_dup 2))]
5162 "TARGET_ARM"
5163 "
5164 {
5165 HOST_WIDE_INT value = INTVAL (operands[1]);
5166 rtx addr = XEXP (operands[0], 0);
5167 rtx op0 = operands[0];
5168 enum rtx_code code = GET_CODE (addr);
5169
5170 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5171 || code == MINUS)
5172 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5173
5174 operands[1] = gen_reg_rtx (SImode);
5175 if (BYTES_BIG_ENDIAN)
5176 {
5177 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5178 if ((value & 255) == ((value >> 8) & 255))
5179 operands[2] = operands[1];
5180 else
5181 {
5182 operands[2] = gen_reg_rtx (SImode);
5183 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5184 }
5185 }
5186 else
5187 {
5188 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5189 if ((value & 255) == ((value >> 8) & 255))
5190 operands[2] = operands[1];
5191 else
5192 {
5193 operands[2] = gen_reg_rtx (SImode);
5194 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5195 }
5196 }
5197
5198 operands[3] = adjust_address (op0, QImode, 1);
5199 operands[0] = adjust_address (operands[0], QImode, 0);
5200 operands[2] = gen_lowpart (QImode, operands[2]);
5201 operands[1] = gen_lowpart (QImode, operands[1]);
5202 }"
5203 )
5204
5205 (define_expand "storehi_single_op"
5206 [(set (match_operand:HI 0 "memory_operand" "")
5207 (match_operand:HI 1 "general_operand" ""))]
5208 "TARGET_32BIT && arm_arch4"
5209 "
5210 if (!s_register_operand (operands[1], HImode))
5211 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5212 "
5213 )
5214
5215 (define_expand "movhi"
5216 [(set (match_operand:HI 0 "general_operand" "")
5217 (match_operand:HI 1 "general_operand" ""))]
5218 "TARGET_EITHER"
5219 "
5220 if (TARGET_ARM)
5221 {
5222 if (can_create_pseudo_p ())
5223 {
5224 if (GET_CODE (operands[0]) == MEM)
5225 {
5226 if (arm_arch4)
5227 {
5228 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5229 DONE;
5230 }
5231 if (GET_CODE (operands[1]) == CONST_INT)
5232 emit_insn (gen_storeinthi (operands[0], operands[1]));
5233 else
5234 {
5235 if (GET_CODE (operands[1]) == MEM)
5236 operands[1] = force_reg (HImode, operands[1]);
5237 if (BYTES_BIG_ENDIAN)
5238 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5239 else
5240 emit_insn (gen_storehi (operands[1], operands[0]));
5241 }
5242 DONE;
5243 }
5244 /* Sign extend a constant, and keep it in an SImode reg. */
5245 else if (GET_CODE (operands[1]) == CONST_INT)
5246 {
5247 rtx reg = gen_reg_rtx (SImode);
5248 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5249
5250 /* If the constant is already valid, leave it alone. */
5251 if (!const_ok_for_arm (val))
5252 {
5253 /* If setting all the top bits will make the constant
5254 loadable in a single instruction, then set them.
5255 Otherwise, sign extend the number. */
5256
5257 if (const_ok_for_arm (~(val | ~0xffff)))
5258 val |= ~0xffff;
5259 else if (val & 0x8000)
5260 val |= ~0xffff;
5261 }
5262
5263 emit_insn (gen_movsi (reg, GEN_INT (val)));
5264 operands[1] = gen_lowpart (HImode, reg);
5265 }
5266 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5267 && GET_CODE (operands[1]) == MEM)
5268 {
5269 rtx reg = gen_reg_rtx (SImode);
5270
5271 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5272 operands[1] = gen_lowpart (HImode, reg);
5273 }
5274 else if (!arm_arch4)
5275 {
5276 if (GET_CODE (operands[1]) == MEM)
5277 {
5278 rtx base;
5279 rtx offset = const0_rtx;
5280 rtx reg = gen_reg_rtx (SImode);
5281
5282 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5283 || (GET_CODE (base) == PLUS
5284 && (GET_CODE (offset = XEXP (base, 1))
5285 == CONST_INT)
5286 && ((INTVAL(offset) & 1) != 1)
5287 && GET_CODE (base = XEXP (base, 0)) == REG))
5288 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5289 {
5290 rtx new_rtx;
5291
5292 new_rtx = widen_memory_access (operands[1], SImode,
5293 ((INTVAL (offset) & ~3)
5294 - INTVAL (offset)));
5295 emit_insn (gen_movsi (reg, new_rtx));
5296 if (((INTVAL (offset) & 2) != 0)
5297 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5298 {
5299 rtx reg2 = gen_reg_rtx (SImode);
5300
5301 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5302 reg = reg2;
5303 }
5304 }
5305 else
5306 emit_insn (gen_movhi_bytes (reg, operands[1]));
5307
5308 operands[1] = gen_lowpart (HImode, reg);
5309 }
5310 }
5311 }
5312 /* Handle loading a large integer during reload. */
5313 else if (GET_CODE (operands[1]) == CONST_INT
5314 && !const_ok_for_arm (INTVAL (operands[1]))
5315 && !const_ok_for_arm (~INTVAL (operands[1])))
5316 {
5317 /* Writing a constant to memory needs a scratch, which should
5318 be handled with SECONDARY_RELOADs. */
5319 gcc_assert (GET_CODE (operands[0]) == REG);
5320
5321 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5322 emit_insn (gen_movsi (operands[0], operands[1]));
5323 DONE;
5324 }
5325 }
5326 else if (TARGET_THUMB2)
5327 {
5328 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5329 if (can_create_pseudo_p ())
5330 {
5331 if (GET_CODE (operands[0]) != REG)
5332 operands[1] = force_reg (HImode, operands[1]);
5333 /* Zero extend a constant, and keep it in an SImode reg. */
5334 else if (GET_CODE (operands[1]) == CONST_INT)
5335 {
5336 rtx reg = gen_reg_rtx (SImode);
5337 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5338
5339 emit_insn (gen_movsi (reg, GEN_INT (val)));
5340 operands[1] = gen_lowpart (HImode, reg);
5341 }
5342 }
5343 }
5344 else /* TARGET_THUMB1 */
5345 {
5346 if (can_create_pseudo_p ())
5347 {
5348 if (GET_CODE (operands[1]) == CONST_INT)
5349 {
5350 rtx reg = gen_reg_rtx (SImode);
5351
5352 emit_insn (gen_movsi (reg, operands[1]));
5353 operands[1] = gen_lowpart (HImode, reg);
5354 }
5355
5356 /* ??? We shouldn't really get invalid addresses here, but this can
5357 happen if we are passed a SP (never OK for HImode/QImode) or
5358 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5359 HImode/QImode) relative address. */
5360 /* ??? This should perhaps be fixed elsewhere, for instance, in
5361 fixup_stack_1, by checking for other kinds of invalid addresses,
5362 e.g. a bare reference to a virtual register. This may confuse the
5363 alpha though, which must handle this case differently. */
5364 if (GET_CODE (operands[0]) == MEM
5365 && !memory_address_p (GET_MODE (operands[0]),
5366 XEXP (operands[0], 0)))
5367 operands[0]
5368 = replace_equiv_address (operands[0],
5369 copy_to_reg (XEXP (operands[0], 0)));
5370
5371 if (GET_CODE (operands[1]) == MEM
5372 && !memory_address_p (GET_MODE (operands[1]),
5373 XEXP (operands[1], 0)))
5374 operands[1]
5375 = replace_equiv_address (operands[1],
5376 copy_to_reg (XEXP (operands[1], 0)));
5377
5378 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5379 {
5380 rtx reg = gen_reg_rtx (SImode);
5381
5382 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5383 operands[1] = gen_lowpart (HImode, reg);
5384 }
5385
5386 if (GET_CODE (operands[0]) == MEM)
5387 operands[1] = force_reg (HImode, operands[1]);
5388 }
5389 else if (GET_CODE (operands[1]) == CONST_INT
5390 && !satisfies_constraint_I (operands[1]))
5391 {
5392 /* Handle loading a large integer during reload. */
5393
5394 /* Writing a constant to memory needs a scratch, which should
5395 be handled with SECONDARY_RELOADs. */
5396 gcc_assert (GET_CODE (operands[0]) == REG);
5397
5398 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5399 emit_insn (gen_movsi (operands[0], operands[1]));
5400 DONE;
5401 }
5402 }
5403 "
5404 )
5405
5406 (define_insn "*thumb1_movhi_insn"
5407 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5408 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5409 "TARGET_THUMB1
5410 && ( register_operand (operands[0], HImode)
5411 || register_operand (operands[1], HImode))"
5412 "*
5413 switch (which_alternative)
5414 {
5415 case 0: return \"add %0, %1, #0\";
5416 case 2: return \"strh %1, %0\";
5417 case 3: return \"mov %0, %1\";
5418 case 4: return \"mov %0, %1\";
5419 case 5: return \"mov %0, %1\";
5420 default: gcc_unreachable ();
5421 case 1:
5422 /* The stack pointer can end up being taken as an index register.
5423 Catch this case here and deal with it. */
5424 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5425 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5426 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5427 {
5428 rtx ops[2];
5429 ops[0] = operands[0];
5430 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5431
5432 output_asm_insn (\"mov %0, %1\", ops);
5433
5434 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5435
5436 }
5437 return \"ldrh %0, %1\";
5438 }"
5439 [(set_attr "length" "2,4,2,2,2,2")
5440 (set_attr "type" "*,load1,store1,*,*,*")]
5441 )
5442
5443
5444 (define_expand "movhi_bytes"
5445 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5446 (set (match_dup 3)
5447 (zero_extend:SI (match_dup 6)))
5448 (set (match_operand:SI 0 "" "")
5449 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5450 "TARGET_ARM"
5451 "
5452 {
5453 rtx mem1, mem2;
5454 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5455
5456 mem1 = change_address (operands[1], QImode, addr);
5457 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5458 operands[0] = gen_lowpart (SImode, operands[0]);
5459 operands[1] = mem1;
5460 operands[2] = gen_reg_rtx (SImode);
5461 operands[3] = gen_reg_rtx (SImode);
5462 operands[6] = mem2;
5463
5464 if (BYTES_BIG_ENDIAN)
5465 {
5466 operands[4] = operands[2];
5467 operands[5] = operands[3];
5468 }
5469 else
5470 {
5471 operands[4] = operands[3];
5472 operands[5] = operands[2];
5473 }
5474 }"
5475 )
5476
5477 (define_expand "movhi_bigend"
5478 [(set (match_dup 2)
5479 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5480 (const_int 16)))
5481 (set (match_dup 3)
5482 (ashiftrt:SI (match_dup 2) (const_int 16)))
5483 (set (match_operand:HI 0 "s_register_operand" "")
5484 (match_dup 4))]
5485 "TARGET_ARM"
5486 "
5487 operands[2] = gen_reg_rtx (SImode);
5488 operands[3] = gen_reg_rtx (SImode);
5489 operands[4] = gen_lowpart (HImode, operands[3]);
5490 "
5491 )
5492
5493 ;; Pattern to recognize insn generated default case above
5494 (define_insn "*movhi_insn_arch4"
5495 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5496 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5497 "TARGET_ARM
5498 && arm_arch4
5499 && (GET_CODE (operands[1]) != CONST_INT
5500 || const_ok_for_arm (INTVAL (operands[1]))
5501 || const_ok_for_arm (~INTVAL (operands[1])))"
5502 "@
5503 mov%?\\t%0, %1\\t%@ movhi
5504 mvn%?\\t%0, #%B1\\t%@ movhi
5505 str%(h%)\\t%1, %0\\t%@ movhi
5506 ldr%(h%)\\t%0, %1\\t%@ movhi"
5507 [(set_attr "type" "*,*,store1,load1")
5508 (set_attr "predicable" "yes")
5509 (set_attr "pool_range" "*,*,*,256")
5510 (set_attr "neg_pool_range" "*,*,*,244")]
5511 )
5512
5513 (define_insn "*movhi_bytes"
5514 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5515 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5516 "TARGET_ARM"
5517 "@
5518 mov%?\\t%0, %1\\t%@ movhi
5519 mvn%?\\t%0, #%B1\\t%@ movhi"
5520 [(set_attr "predicable" "yes")]
5521 )
5522
5523 (define_expand "thumb_movhi_clobber"
5524 [(set (match_operand:HI 0 "memory_operand" "")
5525 (match_operand:HI 1 "register_operand" ""))
5526 (clobber (match_operand:DI 2 "register_operand" ""))]
5527 "TARGET_THUMB1"
5528 "
5529 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5530 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5531 {
5532 emit_insn (gen_movhi (operands[0], operands[1]));
5533 DONE;
5534 }
5535 /* XXX Fixme, need to handle other cases here as well. */
5536 gcc_unreachable ();
5537 "
5538 )
5539
5540 ;; We use a DImode scratch because we may occasionally need an additional
5541 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5542 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5543 (define_expand "reload_outhi"
5544 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5545 (match_operand:HI 1 "s_register_operand" "r")
5546 (match_operand:DI 2 "s_register_operand" "=&l")])]
5547 "TARGET_EITHER"
5548 "if (TARGET_ARM)
5549 arm_reload_out_hi (operands);
5550 else
5551 thumb_reload_out_hi (operands);
5552 DONE;
5553 "
5554 )
5555
5556 (define_expand "reload_inhi"
5557 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5558 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5559 (match_operand:DI 2 "s_register_operand" "=&r")])]
5560 "TARGET_EITHER"
5561 "
5562 if (TARGET_ARM)
5563 arm_reload_in_hi (operands);
5564 else
5565 thumb_reload_out_hi (operands);
5566 DONE;
5567 ")
5568
5569 (define_expand "movqi"
5570 [(set (match_operand:QI 0 "general_operand" "")
5571 (match_operand:QI 1 "general_operand" ""))]
5572 "TARGET_EITHER"
5573 "
5574 /* Everything except mem = const or mem = mem can be done easily */
5575
5576 if (can_create_pseudo_p ())
5577 {
5578 if (GET_CODE (operands[1]) == CONST_INT)
5579 {
5580 rtx reg = gen_reg_rtx (SImode);
5581
5582 emit_insn (gen_movsi (reg, operands[1]));
5583 operands[1] = gen_lowpart (QImode, reg);
5584 }
5585
5586 if (TARGET_THUMB)
5587 {
5588 /* ??? We shouldn't really get invalid addresses here, but this can
5589 happen if we are passed a SP (never OK for HImode/QImode) or
5590 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5591 HImode/QImode) relative address. */
5592 /* ??? This should perhaps be fixed elsewhere, for instance, in
5593 fixup_stack_1, by checking for other kinds of invalid addresses,
5594 e.g. a bare reference to a virtual register. This may confuse the
5595 alpha though, which must handle this case differently. */
5596 if (GET_CODE (operands[0]) == MEM
5597 && !memory_address_p (GET_MODE (operands[0]),
5598 XEXP (operands[0], 0)))
5599 operands[0]
5600 = replace_equiv_address (operands[0],
5601 copy_to_reg (XEXP (operands[0], 0)));
5602 if (GET_CODE (operands[1]) == MEM
5603 && !memory_address_p (GET_MODE (operands[1]),
5604 XEXP (operands[1], 0)))
5605 operands[1]
5606 = replace_equiv_address (operands[1],
5607 copy_to_reg (XEXP (operands[1], 0)));
5608 }
5609
5610 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5611 {
5612 rtx reg = gen_reg_rtx (SImode);
5613
5614 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5615 operands[1] = gen_lowpart (QImode, reg);
5616 }
5617
5618 if (GET_CODE (operands[0]) == MEM)
5619 operands[1] = force_reg (QImode, operands[1]);
5620 }
5621 else if (TARGET_THUMB
5622 && GET_CODE (operands[1]) == CONST_INT
5623 && !satisfies_constraint_I (operands[1]))
5624 {
5625 /* Handle loading a large integer during reload. */
5626
5627 /* Writing a constant to memory needs a scratch, which should
5628 be handled with SECONDARY_RELOADs. */
5629 gcc_assert (GET_CODE (operands[0]) == REG);
5630
5631 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5632 emit_insn (gen_movsi (operands[0], operands[1]));
5633 DONE;
5634 }
5635 "
5636 )
5637
5638
5639 (define_insn "*arm_movqi_insn"
5640 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5641 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5642 "TARGET_32BIT
5643 && ( register_operand (operands[0], QImode)
5644 || register_operand (operands[1], QImode))"
5645 "@
5646 mov%?\\t%0, %1
5647 mvn%?\\t%0, #%B1
5648 ldr%(b%)\\t%0, %1
5649 str%(b%)\\t%1, %0"
5650 [(set_attr "type" "*,*,load1,store1")
5651 (set_attr "predicable" "yes")]
5652 )
5653
5654 (define_insn "*thumb1_movqi_insn"
5655 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5656 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5657 "TARGET_THUMB1
5658 && ( register_operand (operands[0], QImode)
5659 || register_operand (operands[1], QImode))"
5660 "@
5661 add\\t%0, %1, #0
5662 ldrb\\t%0, %1
5663 strb\\t%1, %0
5664 mov\\t%0, %1
5665 mov\\t%0, %1
5666 mov\\t%0, %1"
5667 [(set_attr "length" "2")
5668 (set_attr "type" "*,load1,store1,*,*,*")
5669 (set_attr "pool_range" "*,32,*,*,*,*")]
5670 )
5671
5672 (define_expand "movsf"
5673 [(set (match_operand:SF 0 "general_operand" "")
5674 (match_operand:SF 1 "general_operand" ""))]
5675 "TARGET_EITHER"
5676 "
5677 if (TARGET_32BIT)
5678 {
5679 if (GET_CODE (operands[0]) == MEM)
5680 operands[1] = force_reg (SFmode, operands[1]);
5681 }
5682 else /* TARGET_THUMB1 */
5683 {
5684 if (can_create_pseudo_p ())
5685 {
5686 if (GET_CODE (operands[0]) != REG)
5687 operands[1] = force_reg (SFmode, operands[1]);
5688 }
5689 }
5690 "
5691 )
5692
5693 ;; Transform a floating-point move of a constant into a core register into
5694 ;; an SImode operation.
5695 (define_split
5696 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5697 (match_operand:SF 1 "immediate_operand" ""))]
5698 "TARGET_32BIT
5699 && reload_completed
5700 && GET_CODE (operands[1]) == CONST_DOUBLE"
5701 [(set (match_dup 2) (match_dup 3))]
5702 "
5703 operands[2] = gen_lowpart (SImode, operands[0]);
5704 operands[3] = gen_lowpart (SImode, operands[1]);
5705 if (operands[2] == 0 || operands[3] == 0)
5706 FAIL;
5707 "
5708 )
5709
5710 (define_insn "*arm_movsf_soft_insn"
5711 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5712 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5713 "TARGET_ARM
5714 && TARGET_SOFT_FLOAT
5715 && (GET_CODE (operands[0]) != MEM
5716 || register_operand (operands[1], SFmode))"
5717 "@
5718 mov%?\\t%0, %1
5719 ldr%?\\t%0, %1\\t%@ float
5720 str%?\\t%1, %0\\t%@ float"
5721 [(set_attr "length" "4,4,4")
5722 (set_attr "predicable" "yes")
5723 (set_attr "type" "*,load1,store1")
5724 (set_attr "pool_range" "*,4096,*")
5725 (set_attr "neg_pool_range" "*,4084,*")]
5726 )
5727
5728 ;;; ??? This should have alternatives for constants.
5729 (define_insn "*thumb1_movsf_insn"
5730 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5731 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5732 "TARGET_THUMB1
5733 && ( register_operand (operands[0], SFmode)
5734 || register_operand (operands[1], SFmode))"
5735 "@
5736 add\\t%0, %1, #0
5737 ldmia\\t%1, {%0}
5738 stmia\\t%0, {%1}
5739 ldr\\t%0, %1
5740 str\\t%1, %0
5741 mov\\t%0, %1
5742 mov\\t%0, %1"
5743 [(set_attr "length" "2")
5744 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5745 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5746 )
5747
5748 (define_expand "movdf"
5749 [(set (match_operand:DF 0 "general_operand" "")
5750 (match_operand:DF 1 "general_operand" ""))]
5751 "TARGET_EITHER"
5752 "
5753 if (TARGET_32BIT)
5754 {
5755 if (GET_CODE (operands[0]) == MEM)
5756 operands[1] = force_reg (DFmode, operands[1]);
5757 }
5758 else /* TARGET_THUMB */
5759 {
5760 if (can_create_pseudo_p ())
5761 {
5762 if (GET_CODE (operands[0]) != REG)
5763 operands[1] = force_reg (DFmode, operands[1]);
5764 }
5765 }
5766 "
5767 )
5768
5769 ;; Reloading a df mode value stored in integer regs to memory can require a
5770 ;; scratch reg.
5771 (define_expand "reload_outdf"
5772 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5773 (match_operand:DF 1 "s_register_operand" "r")
5774 (match_operand:SI 2 "s_register_operand" "=&r")]
5775 "TARGET_32BIT"
5776 "
5777 {
5778 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5779
5780 if (code == REG)
5781 operands[2] = XEXP (operands[0], 0);
5782 else if (code == POST_INC || code == PRE_DEC)
5783 {
5784 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5785 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5786 emit_insn (gen_movdi (operands[0], operands[1]));
5787 DONE;
5788 }
5789 else if (code == PRE_INC)
5790 {
5791 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5792
5793 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5794 operands[2] = reg;
5795 }
5796 else if (code == POST_DEC)
5797 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5798 else
5799 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5800 XEXP (XEXP (operands[0], 0), 1)));
5801
5802 emit_insn (gen_rtx_SET (VOIDmode,
5803 replace_equiv_address (operands[0], operands[2]),
5804 operands[1]));
5805
5806 if (code == POST_DEC)
5807 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5808
5809 DONE;
5810 }"
5811 )
5812
5813 (define_insn "*movdf_soft_insn"
5814 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5815 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5816 "TARGET_ARM && TARGET_SOFT_FLOAT
5817 && ( register_operand (operands[0], DFmode)
5818 || register_operand (operands[1], DFmode))"
5819 "*
5820 switch (which_alternative)
5821 {
5822 case 0:
5823 case 1:
5824 case 2:
5825 return \"#\";
5826 default:
5827 return output_move_double (operands);
5828 }
5829 "
5830 [(set_attr "length" "8,12,16,8,8")
5831 (set_attr "type" "*,*,*,load2,store2")
5832 (set_attr "pool_range" "1020")
5833 (set_attr "neg_pool_range" "1008")]
5834 )
5835
5836 ;;; ??? This should have alternatives for constants.
5837 ;;; ??? This was originally identical to the movdi_insn pattern.
5838 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5839 ;;; thumb_reorg with a memory reference.
5840 (define_insn "*thumb_movdf_insn"
5841 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5842 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5843 "TARGET_THUMB1
5844 && ( register_operand (operands[0], DFmode)
5845 || register_operand (operands[1], DFmode))"
5846 "*
5847 switch (which_alternative)
5848 {
5849 default:
5850 case 0:
5851 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5852 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5853 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5854 case 1:
5855 return \"ldmia\\t%1, {%0, %H0}\";
5856 case 2:
5857 return \"stmia\\t%0, {%1, %H1}\";
5858 case 3:
5859 return thumb_load_double_from_address (operands);
5860 case 4:
5861 operands[2] = gen_rtx_MEM (SImode,
5862 plus_constant (XEXP (operands[0], 0), 4));
5863 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5864 return \"\";
5865 case 5:
5866 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5867 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5868 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5869 }
5870 "
5871 [(set_attr "length" "4,2,2,6,4,4")
5872 (set_attr "type" "*,load2,store2,load2,store2,*")
5873 (set_attr "pool_range" "*,*,*,1020,*,*")]
5874 )
5875
5876 (define_expand "movxf"
5877 [(set (match_operand:XF 0 "general_operand" "")
5878 (match_operand:XF 1 "general_operand" ""))]
5879 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5880 "
5881 if (GET_CODE (operands[0]) == MEM)
5882 operands[1] = force_reg (XFmode, operands[1]);
5883 "
5884 )
5885
5886 \f
5887
5888 ;; load- and store-multiple insns
5889 ;; The arm can load/store any set of registers, provided that they are in
5890 ;; ascending order; but that is beyond GCC so stick with what it knows.
5891
5892 (define_expand "load_multiple"
5893 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5894 (match_operand:SI 1 "" ""))
5895 (use (match_operand:SI 2 "" ""))])]
5896 "TARGET_32BIT"
5897 {
5898 HOST_WIDE_INT offset = 0;
5899
5900 /* Support only fixed point registers. */
5901 if (GET_CODE (operands[2]) != CONST_INT
5902 || INTVAL (operands[2]) > 14
5903 || INTVAL (operands[2]) < 2
5904 || GET_CODE (operands[1]) != MEM
5905 || GET_CODE (operands[0]) != REG
5906 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5907 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5908 FAIL;
5909
5910 operands[3]
5911 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5912 force_reg (SImode, XEXP (operands[1], 0)),
5913 TRUE, FALSE, operands[1], &offset);
5914 })
5915
5916 ;; Load multiple with write-back
5917
5918 (define_insn "*ldmsi_postinc4"
5919 [(match_parallel 0 "load_multiple_operation"
5920 [(set (match_operand:SI 1 "s_register_operand" "=r")
5921 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5922 (const_int 16)))
5923 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5924 (mem:SI (match_dup 2)))
5925 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5926 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5927 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5928 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5929 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5930 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5931 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5932 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5933 [(set_attr "type" "load4")
5934 (set_attr "predicable" "yes")]
5935 )
5936
5937 (define_insn "*ldmsi_postinc4_thumb1"
5938 [(match_parallel 0 "load_multiple_operation"
5939 [(set (match_operand:SI 1 "s_register_operand" "=l")
5940 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5941 (const_int 16)))
5942 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5943 (mem:SI (match_dup 2)))
5944 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5945 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5946 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5947 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5948 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5949 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5950 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5951 "ldmia\\t%1!, {%3, %4, %5, %6}"
5952 [(set_attr "type" "load4")]
5953 )
5954
5955 (define_insn "*ldmsi_postinc3"
5956 [(match_parallel 0 "load_multiple_operation"
5957 [(set (match_operand:SI 1 "s_register_operand" "=r")
5958 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5959 (const_int 12)))
5960 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5961 (mem:SI (match_dup 2)))
5962 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5963 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5964 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5965 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5966 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5967 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5968 [(set_attr "type" "load3")
5969 (set_attr "predicable" "yes")]
5970 )
5971
5972 (define_insn "*ldmsi_postinc2"
5973 [(match_parallel 0 "load_multiple_operation"
5974 [(set (match_operand:SI 1 "s_register_operand" "=r")
5975 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5976 (const_int 8)))
5977 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5978 (mem:SI (match_dup 2)))
5979 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5980 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5981 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5982 "ldm%(ia%)\\t%1!, {%3, %4}"
5983 [(set_attr "type" "load2")
5984 (set_attr "predicable" "yes")]
5985 )
5986
5987 ;; Ordinary load multiple
5988
5989 (define_insn "*ldmsi4"
5990 [(match_parallel 0 "load_multiple_operation"
5991 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5992 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5993 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5994 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5995 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5996 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5997 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5998 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5999 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6000 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6001 [(set_attr "type" "load4")
6002 (set_attr "predicable" "yes")]
6003 )
6004
6005 (define_insn "*ldmsi3"
6006 [(match_parallel 0 "load_multiple_operation"
6007 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6008 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6009 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6010 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6011 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6012 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6013 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6014 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6015 [(set_attr "type" "load3")
6016 (set_attr "predicable" "yes")]
6017 )
6018
6019 (define_insn "*ldmsi2"
6020 [(match_parallel 0 "load_multiple_operation"
6021 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6022 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6023 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6024 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6025 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6026 "ldm%(ia%)\\t%1, {%2, %3}"
6027 [(set_attr "type" "load2")
6028 (set_attr "predicable" "yes")]
6029 )
6030
6031 (define_expand "store_multiple"
6032 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6033 (match_operand:SI 1 "" ""))
6034 (use (match_operand:SI 2 "" ""))])]
6035 "TARGET_32BIT"
6036 {
6037 HOST_WIDE_INT offset = 0;
6038
6039 /* Support only fixed point registers. */
6040 if (GET_CODE (operands[2]) != CONST_INT
6041 || INTVAL (operands[2]) > 14
6042 || INTVAL (operands[2]) < 2
6043 || GET_CODE (operands[1]) != REG
6044 || GET_CODE (operands[0]) != MEM
6045 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6046 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6047 FAIL;
6048
6049 operands[3]
6050 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6051 force_reg (SImode, XEXP (operands[0], 0)),
6052 TRUE, FALSE, operands[0], &offset);
6053 })
6054
6055 ;; Store multiple with write-back
6056
6057 (define_insn "*stmsi_postinc4"
6058 [(match_parallel 0 "store_multiple_operation"
6059 [(set (match_operand:SI 1 "s_register_operand" "=r")
6060 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6061 (const_int 16)))
6062 (set (mem:SI (match_dup 2))
6063 (match_operand:SI 3 "arm_hard_register_operand" ""))
6064 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6065 (match_operand:SI 4 "arm_hard_register_operand" ""))
6066 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6067 (match_operand:SI 5 "arm_hard_register_operand" ""))
6068 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6069 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6070 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6071 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6072 [(set_attr "predicable" "yes")
6073 (set_attr "type" "store4")]
6074 )
6075
6076 (define_insn "*stmsi_postinc4_thumb1"
6077 [(match_parallel 0 "store_multiple_operation"
6078 [(set (match_operand:SI 1 "s_register_operand" "=l")
6079 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6080 (const_int 16)))
6081 (set (mem:SI (match_dup 2))
6082 (match_operand:SI 3 "arm_hard_register_operand" ""))
6083 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6084 (match_operand:SI 4 "arm_hard_register_operand" ""))
6085 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6086 (match_operand:SI 5 "arm_hard_register_operand" ""))
6087 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6088 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6089 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6090 "stmia\\t%1!, {%3, %4, %5, %6}"
6091 [(set_attr "type" "store4")]
6092 )
6093
6094 (define_insn "*stmsi_postinc3"
6095 [(match_parallel 0 "store_multiple_operation"
6096 [(set (match_operand:SI 1 "s_register_operand" "=r")
6097 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6098 (const_int 12)))
6099 (set (mem:SI (match_dup 2))
6100 (match_operand:SI 3 "arm_hard_register_operand" ""))
6101 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6102 (match_operand:SI 4 "arm_hard_register_operand" ""))
6103 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6104 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6105 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6106 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6107 [(set_attr "predicable" "yes")
6108 (set_attr "type" "store3")]
6109 )
6110
6111 (define_insn "*stmsi_postinc2"
6112 [(match_parallel 0 "store_multiple_operation"
6113 [(set (match_operand:SI 1 "s_register_operand" "=r")
6114 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6115 (const_int 8)))
6116 (set (mem:SI (match_dup 2))
6117 (match_operand:SI 3 "arm_hard_register_operand" ""))
6118 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6119 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6120 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6121 "stm%(ia%)\\t%1!, {%3, %4}"
6122 [(set_attr "predicable" "yes")
6123 (set_attr "type" "store2")]
6124 )
6125
6126 ;; Ordinary store multiple
6127
6128 (define_insn "*stmsi4"
6129 [(match_parallel 0 "store_multiple_operation"
6130 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6131 (match_operand:SI 2 "arm_hard_register_operand" ""))
6132 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6133 (match_operand:SI 3 "arm_hard_register_operand" ""))
6134 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6135 (match_operand:SI 4 "arm_hard_register_operand" ""))
6136 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6137 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6138 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6139 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6140 [(set_attr "predicable" "yes")
6141 (set_attr "type" "store4")]
6142 )
6143
6144 (define_insn "*stmsi3"
6145 [(match_parallel 0 "store_multiple_operation"
6146 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6147 (match_operand:SI 2 "arm_hard_register_operand" ""))
6148 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6149 (match_operand:SI 3 "arm_hard_register_operand" ""))
6150 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6151 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6152 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6153 "stm%(ia%)\\t%1, {%2, %3, %4}"
6154 [(set_attr "predicable" "yes")
6155 (set_attr "type" "store3")]
6156 )
6157
6158 (define_insn "*stmsi2"
6159 [(match_parallel 0 "store_multiple_operation"
6160 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6161 (match_operand:SI 2 "arm_hard_register_operand" ""))
6162 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6163 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6164 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6165 "stm%(ia%)\\t%1, {%2, %3}"
6166 [(set_attr "predicable" "yes")
6167 (set_attr "type" "store2")]
6168 )
6169
6170 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6171 ;; We could let this apply for blocks of less than this, but it clobbers so
6172 ;; many registers that there is then probably a better way.
6173
6174 (define_expand "movmemqi"
6175 [(match_operand:BLK 0 "general_operand" "")
6176 (match_operand:BLK 1 "general_operand" "")
6177 (match_operand:SI 2 "const_int_operand" "")
6178 (match_operand:SI 3 "const_int_operand" "")]
6179 "TARGET_EITHER"
6180 "
6181 if (TARGET_32BIT)
6182 {
6183 if (arm_gen_movmemqi (operands))
6184 DONE;
6185 FAIL;
6186 }
6187 else /* TARGET_THUMB1 */
6188 {
6189 if ( INTVAL (operands[3]) != 4
6190 || INTVAL (operands[2]) > 48)
6191 FAIL;
6192
6193 thumb_expand_movmemqi (operands);
6194 DONE;
6195 }
6196 "
6197 )
6198
6199 ;; Thumb block-move insns
6200
6201 (define_insn "movmem12b"
6202 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6203 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6204 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6205 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6206 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6207 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6208 (set (match_operand:SI 0 "register_operand" "=l")
6209 (plus:SI (match_dup 2) (const_int 12)))
6210 (set (match_operand:SI 1 "register_operand" "=l")
6211 (plus:SI (match_dup 3) (const_int 12)))
6212 (clobber (match_scratch:SI 4 "=&l"))
6213 (clobber (match_scratch:SI 5 "=&l"))
6214 (clobber (match_scratch:SI 6 "=&l"))]
6215 "TARGET_THUMB1"
6216 "* return thumb_output_move_mem_multiple (3, operands);"
6217 [(set_attr "length" "4")
6218 ; This isn't entirely accurate... It loads as well, but in terms of
6219 ; scheduling the following insn it is better to consider it as a store
6220 (set_attr "type" "store3")]
6221 )
6222
6223 (define_insn "movmem8b"
6224 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6225 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6226 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6227 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6228 (set (match_operand:SI 0 "register_operand" "=l")
6229 (plus:SI (match_dup 2) (const_int 8)))
6230 (set (match_operand:SI 1 "register_operand" "=l")
6231 (plus:SI (match_dup 3) (const_int 8)))
6232 (clobber (match_scratch:SI 4 "=&l"))
6233 (clobber (match_scratch:SI 5 "=&l"))]
6234 "TARGET_THUMB1"
6235 "* return thumb_output_move_mem_multiple (2, operands);"
6236 [(set_attr "length" "4")
6237 ; This isn't entirely accurate... It loads as well, but in terms of
6238 ; scheduling the following insn it is better to consider it as a store
6239 (set_attr "type" "store2")]
6240 )
6241
6242 \f
6243
6244 ;; Compare & branch insns
6245 ;; The range calculations are based as follows:
6246 ;; For forward branches, the address calculation returns the address of
6247 ;; the next instruction. This is 2 beyond the branch instruction.
6248 ;; For backward branches, the address calculation returns the address of
6249 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6250 ;; instruction for the shortest sequence, and 4 before the branch instruction
6251 ;; if we have to jump around an unconditional branch.
6252 ;; To the basic branch range the PC offset must be added (this is +4).
6253 ;; So for forward branches we have
6254 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6255 ;; And for backward branches we have
6256 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6257 ;;
6258 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6259 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6260
6261 (define_expand "cbranchsi4"
6262 [(set (pc) (if_then_else
6263 (match_operator 0 "arm_comparison_operator"
6264 [(match_operand:SI 1 "s_register_operand" "")
6265 (match_operand:SI 2 "nonmemory_operand" "")])
6266 (label_ref (match_operand 3 "" ""))
6267 (pc)))]
6268 "TARGET_THUMB1"
6269 "
6270 if (thumb1_cmpneg_operand (operands[2], SImode))
6271 {
6272 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6273 operands[3], operands[0]));
6274 DONE;
6275 }
6276 if (!thumb1_cmp_operand (operands[2], SImode))
6277 operands[2] = force_reg (SImode, operands[2]);
6278 ")
6279
6280 (define_insn "*cbranchsi4_insn"
6281 [(set (pc) (if_then_else
6282 (match_operator 0 "arm_comparison_operator"
6283 [(match_operand:SI 1 "s_register_operand" "l,*h")
6284 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6285 (label_ref (match_operand 3 "" ""))
6286 (pc)))]
6287 "TARGET_THUMB1"
6288 "*
6289 output_asm_insn (\"cmp\\t%1, %2\", operands);
6290
6291 switch (get_attr_length (insn))
6292 {
6293 case 4: return \"b%d0\\t%l3\";
6294 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6295 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6296 }
6297 "
6298 [(set (attr "far_jump")
6299 (if_then_else
6300 (eq_attr "length" "8")
6301 (const_string "yes")
6302 (const_string "no")))
6303 (set (attr "length")
6304 (if_then_else
6305 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6306 (le (minus (match_dup 3) (pc)) (const_int 256)))
6307 (const_int 4)
6308 (if_then_else
6309 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6310 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6311 (const_int 6)
6312 (const_int 8))))]
6313 )
6314
6315 (define_insn "cbranchsi4_scratch"
6316 [(set (pc) (if_then_else
6317 (match_operator 4 "arm_comparison_operator"
6318 [(match_operand:SI 1 "s_register_operand" "l,0")
6319 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6320 (label_ref (match_operand 3 "" ""))
6321 (pc)))
6322 (clobber (match_scratch:SI 0 "=l,l"))]
6323 "TARGET_THUMB1"
6324 "*
6325 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6326
6327 switch (get_attr_length (insn))
6328 {
6329 case 4: return \"b%d4\\t%l3\";
6330 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6331 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6332 }
6333 "
6334 [(set (attr "far_jump")
6335 (if_then_else
6336 (eq_attr "length" "8")
6337 (const_string "yes")
6338 (const_string "no")))
6339 (set (attr "length")
6340 (if_then_else
6341 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6342 (le (minus (match_dup 3) (pc)) (const_int 256)))
6343 (const_int 4)
6344 (if_then_else
6345 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6346 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6347 (const_int 6)
6348 (const_int 8))))]
6349 )
6350 (define_insn "*movsi_cbranchsi4"
6351 [(set (pc)
6352 (if_then_else
6353 (match_operator 3 "arm_comparison_operator"
6354 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6355 (const_int 0)])
6356 (label_ref (match_operand 2 "" ""))
6357 (pc)))
6358 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6359 (match_dup 1))]
6360 "TARGET_THUMB1"
6361 "*{
6362 if (which_alternative == 0)
6363 output_asm_insn (\"cmp\t%0, #0\", operands);
6364 else if (which_alternative == 1)
6365 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6366 else
6367 {
6368 output_asm_insn (\"cmp\t%1, #0\", operands);
6369 if (which_alternative == 2)
6370 output_asm_insn (\"mov\t%0, %1\", operands);
6371 else
6372 output_asm_insn (\"str\t%1, %0\", operands);
6373 }
6374 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6375 {
6376 case 4: return \"b%d3\\t%l2\";
6377 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6378 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6379 }
6380 }"
6381 [(set (attr "far_jump")
6382 (if_then_else
6383 (ior (and (gt (symbol_ref ("which_alternative"))
6384 (const_int 1))
6385 (eq_attr "length" "8"))
6386 (eq_attr "length" "10"))
6387 (const_string "yes")
6388 (const_string "no")))
6389 (set (attr "length")
6390 (if_then_else
6391 (le (symbol_ref ("which_alternative"))
6392 (const_int 1))
6393 (if_then_else
6394 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6395 (le (minus (match_dup 2) (pc)) (const_int 256)))
6396 (const_int 4)
6397 (if_then_else
6398 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6399 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6400 (const_int 6)
6401 (const_int 8)))
6402 (if_then_else
6403 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6404 (le (minus (match_dup 2) (pc)) (const_int 256)))
6405 (const_int 6)
6406 (if_then_else
6407 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6408 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6409 (const_int 8)
6410 (const_int 10)))))]
6411 )
6412
6413 (define_insn "*negated_cbranchsi4"
6414 [(set (pc)
6415 (if_then_else
6416 (match_operator 0 "equality_operator"
6417 [(match_operand:SI 1 "s_register_operand" "l")
6418 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6419 (label_ref (match_operand 3 "" ""))
6420 (pc)))]
6421 "TARGET_THUMB1"
6422 "*
6423 output_asm_insn (\"cmn\\t%1, %2\", operands);
6424 switch (get_attr_length (insn))
6425 {
6426 case 4: return \"b%d0\\t%l3\";
6427 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6428 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6429 }
6430 "
6431 [(set (attr "far_jump")
6432 (if_then_else
6433 (eq_attr "length" "8")
6434 (const_string "yes")
6435 (const_string "no")))
6436 (set (attr "length")
6437 (if_then_else
6438 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6439 (le (minus (match_dup 3) (pc)) (const_int 256)))
6440 (const_int 4)
6441 (if_then_else
6442 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6443 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6444 (const_int 6)
6445 (const_int 8))))]
6446 )
6447
6448 (define_insn "*tbit_cbranch"
6449 [(set (pc)
6450 (if_then_else
6451 (match_operator 0 "equality_operator"
6452 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6453 (const_int 1)
6454 (match_operand:SI 2 "const_int_operand" "i"))
6455 (const_int 0)])
6456 (label_ref (match_operand 3 "" ""))
6457 (pc)))
6458 (clobber (match_scratch:SI 4 "=l"))]
6459 "TARGET_THUMB1"
6460 "*
6461 {
6462 rtx op[3];
6463 op[0] = operands[4];
6464 op[1] = operands[1];
6465 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6466
6467 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6468 switch (get_attr_length (insn))
6469 {
6470 case 4: return \"b%d0\\t%l3\";
6471 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6472 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6473 }
6474 }"
6475 [(set (attr "far_jump")
6476 (if_then_else
6477 (eq_attr "length" "8")
6478 (const_string "yes")
6479 (const_string "no")))
6480 (set (attr "length")
6481 (if_then_else
6482 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6483 (le (minus (match_dup 3) (pc)) (const_int 256)))
6484 (const_int 4)
6485 (if_then_else
6486 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6487 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6488 (const_int 6)
6489 (const_int 8))))]
6490 )
6491
6492 (define_insn "*tlobits_cbranch"
6493 [(set (pc)
6494 (if_then_else
6495 (match_operator 0 "equality_operator"
6496 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6497 (match_operand:SI 2 "const_int_operand" "i")
6498 (const_int 0))
6499 (const_int 0)])
6500 (label_ref (match_operand 3 "" ""))
6501 (pc)))
6502 (clobber (match_scratch:SI 4 "=l"))]
6503 "TARGET_THUMB1"
6504 "*
6505 {
6506 rtx op[3];
6507 op[0] = operands[4];
6508 op[1] = operands[1];
6509 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6510
6511 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6512 switch (get_attr_length (insn))
6513 {
6514 case 4: return \"b%d0\\t%l3\";
6515 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6516 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6517 }
6518 }"
6519 [(set (attr "far_jump")
6520 (if_then_else
6521 (eq_attr "length" "8")
6522 (const_string "yes")
6523 (const_string "no")))
6524 (set (attr "length")
6525 (if_then_else
6526 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6527 (le (minus (match_dup 3) (pc)) (const_int 256)))
6528 (const_int 4)
6529 (if_then_else
6530 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6531 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6532 (const_int 6)
6533 (const_int 8))))]
6534 )
6535
6536 (define_insn "*tstsi3_cbranch"
6537 [(set (pc)
6538 (if_then_else
6539 (match_operator 3 "equality_operator"
6540 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6541 (match_operand:SI 1 "s_register_operand" "l"))
6542 (const_int 0)])
6543 (label_ref (match_operand 2 "" ""))
6544 (pc)))]
6545 "TARGET_THUMB1"
6546 "*
6547 {
6548 output_asm_insn (\"tst\\t%0, %1\", operands);
6549 switch (get_attr_length (insn))
6550 {
6551 case 4: return \"b%d3\\t%l2\";
6552 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6553 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6554 }
6555 }"
6556 [(set (attr "far_jump")
6557 (if_then_else
6558 (eq_attr "length" "8")
6559 (const_string "yes")
6560 (const_string "no")))
6561 (set (attr "length")
6562 (if_then_else
6563 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6564 (le (minus (match_dup 2) (pc)) (const_int 256)))
6565 (const_int 4)
6566 (if_then_else
6567 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6568 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6569 (const_int 6)
6570 (const_int 8))))]
6571 )
6572
6573 (define_insn "*andsi3_cbranch"
6574 [(set (pc)
6575 (if_then_else
6576 (match_operator 5 "equality_operator"
6577 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6578 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6579 (const_int 0)])
6580 (label_ref (match_operand 4 "" ""))
6581 (pc)))
6582 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6583 (and:SI (match_dup 2) (match_dup 3)))
6584 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6585 "TARGET_THUMB1"
6586 "*
6587 {
6588 if (which_alternative == 0)
6589 output_asm_insn (\"and\\t%0, %3\", operands);
6590 else if (which_alternative == 1)
6591 {
6592 output_asm_insn (\"and\\t%1, %3\", operands);
6593 output_asm_insn (\"mov\\t%0, %1\", operands);
6594 }
6595 else
6596 {
6597 output_asm_insn (\"and\\t%1, %3\", operands);
6598 output_asm_insn (\"str\\t%1, %0\", operands);
6599 }
6600
6601 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6602 {
6603 case 4: return \"b%d5\\t%l4\";
6604 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6605 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6606 }
6607 }"
6608 [(set (attr "far_jump")
6609 (if_then_else
6610 (ior (and (eq (symbol_ref ("which_alternative"))
6611 (const_int 0))
6612 (eq_attr "length" "8"))
6613 (eq_attr "length" "10"))
6614 (const_string "yes")
6615 (const_string "no")))
6616 (set (attr "length")
6617 (if_then_else
6618 (eq (symbol_ref ("which_alternative"))
6619 (const_int 0))
6620 (if_then_else
6621 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6622 (le (minus (match_dup 4) (pc)) (const_int 256)))
6623 (const_int 4)
6624 (if_then_else
6625 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6626 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6627 (const_int 6)
6628 (const_int 8)))
6629 (if_then_else
6630 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6631 (le (minus (match_dup 4) (pc)) (const_int 256)))
6632 (const_int 6)
6633 (if_then_else
6634 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6635 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6636 (const_int 8)
6637 (const_int 10)))))]
6638 )
6639
6640 (define_insn "*orrsi3_cbranch_scratch"
6641 [(set (pc)
6642 (if_then_else
6643 (match_operator 4 "equality_operator"
6644 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6645 (match_operand:SI 2 "s_register_operand" "l"))
6646 (const_int 0)])
6647 (label_ref (match_operand 3 "" ""))
6648 (pc)))
6649 (clobber (match_scratch:SI 0 "=l"))]
6650 "TARGET_THUMB1"
6651 "*
6652 {
6653 output_asm_insn (\"orr\\t%0, %2\", operands);
6654 switch (get_attr_length (insn))
6655 {
6656 case 4: return \"b%d4\\t%l3\";
6657 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6658 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6659 }
6660 }"
6661 [(set (attr "far_jump")
6662 (if_then_else
6663 (eq_attr "length" "8")
6664 (const_string "yes")
6665 (const_string "no")))
6666 (set (attr "length")
6667 (if_then_else
6668 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6669 (le (minus (match_dup 3) (pc)) (const_int 256)))
6670 (const_int 4)
6671 (if_then_else
6672 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6673 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6674 (const_int 6)
6675 (const_int 8))))]
6676 )
6677
6678 (define_insn "*orrsi3_cbranch"
6679 [(set (pc)
6680 (if_then_else
6681 (match_operator 5 "equality_operator"
6682 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6683 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6684 (const_int 0)])
6685 (label_ref (match_operand 4 "" ""))
6686 (pc)))
6687 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6688 (ior:SI (match_dup 2) (match_dup 3)))
6689 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6690 "TARGET_THUMB1"
6691 "*
6692 {
6693 if (which_alternative == 0)
6694 output_asm_insn (\"orr\\t%0, %3\", operands);
6695 else if (which_alternative == 1)
6696 {
6697 output_asm_insn (\"orr\\t%1, %3\", operands);
6698 output_asm_insn (\"mov\\t%0, %1\", operands);
6699 }
6700 else
6701 {
6702 output_asm_insn (\"orr\\t%1, %3\", operands);
6703 output_asm_insn (\"str\\t%1, %0\", operands);
6704 }
6705
6706 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6707 {
6708 case 4: return \"b%d5\\t%l4\";
6709 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6710 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6711 }
6712 }"
6713 [(set (attr "far_jump")
6714 (if_then_else
6715 (ior (and (eq (symbol_ref ("which_alternative"))
6716 (const_int 0))
6717 (eq_attr "length" "8"))
6718 (eq_attr "length" "10"))
6719 (const_string "yes")
6720 (const_string "no")))
6721 (set (attr "length")
6722 (if_then_else
6723 (eq (symbol_ref ("which_alternative"))
6724 (const_int 0))
6725 (if_then_else
6726 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6727 (le (minus (match_dup 4) (pc)) (const_int 256)))
6728 (const_int 4)
6729 (if_then_else
6730 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6731 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6732 (const_int 6)
6733 (const_int 8)))
6734 (if_then_else
6735 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6736 (le (minus (match_dup 4) (pc)) (const_int 256)))
6737 (const_int 6)
6738 (if_then_else
6739 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6740 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6741 (const_int 8)
6742 (const_int 10)))))]
6743 )
6744
6745 (define_insn "*xorsi3_cbranch_scratch"
6746 [(set (pc)
6747 (if_then_else
6748 (match_operator 4 "equality_operator"
6749 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6750 (match_operand:SI 2 "s_register_operand" "l"))
6751 (const_int 0)])
6752 (label_ref (match_operand 3 "" ""))
6753 (pc)))
6754 (clobber (match_scratch:SI 0 "=l"))]
6755 "TARGET_THUMB1"
6756 "*
6757 {
6758 output_asm_insn (\"eor\\t%0, %2\", operands);
6759 switch (get_attr_length (insn))
6760 {
6761 case 4: return \"b%d4\\t%l3\";
6762 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6763 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6764 }
6765 }"
6766 [(set (attr "far_jump")
6767 (if_then_else
6768 (eq_attr "length" "8")
6769 (const_string "yes")
6770 (const_string "no")))
6771 (set (attr "length")
6772 (if_then_else
6773 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6774 (le (minus (match_dup 3) (pc)) (const_int 256)))
6775 (const_int 4)
6776 (if_then_else
6777 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6778 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6779 (const_int 6)
6780 (const_int 8))))]
6781 )
6782
6783 (define_insn "*xorsi3_cbranch"
6784 [(set (pc)
6785 (if_then_else
6786 (match_operator 5 "equality_operator"
6787 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6788 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6789 (const_int 0)])
6790 (label_ref (match_operand 4 "" ""))
6791 (pc)))
6792 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6793 (xor:SI (match_dup 2) (match_dup 3)))
6794 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6795 "TARGET_THUMB1"
6796 "*
6797 {
6798 if (which_alternative == 0)
6799 output_asm_insn (\"eor\\t%0, %3\", operands);
6800 else if (which_alternative == 1)
6801 {
6802 output_asm_insn (\"eor\\t%1, %3\", operands);
6803 output_asm_insn (\"mov\\t%0, %1\", operands);
6804 }
6805 else
6806 {
6807 output_asm_insn (\"eor\\t%1, %3\", operands);
6808 output_asm_insn (\"str\\t%1, %0\", operands);
6809 }
6810
6811 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6812 {
6813 case 4: return \"b%d5\\t%l4\";
6814 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6815 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6816 }
6817 }"
6818 [(set (attr "far_jump")
6819 (if_then_else
6820 (ior (and (eq (symbol_ref ("which_alternative"))
6821 (const_int 0))
6822 (eq_attr "length" "8"))
6823 (eq_attr "length" "10"))
6824 (const_string "yes")
6825 (const_string "no")))
6826 (set (attr "length")
6827 (if_then_else
6828 (eq (symbol_ref ("which_alternative"))
6829 (const_int 0))
6830 (if_then_else
6831 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6832 (le (minus (match_dup 4) (pc)) (const_int 256)))
6833 (const_int 4)
6834 (if_then_else
6835 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6836 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6837 (const_int 6)
6838 (const_int 8)))
6839 (if_then_else
6840 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6841 (le (minus (match_dup 4) (pc)) (const_int 256)))
6842 (const_int 6)
6843 (if_then_else
6844 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6845 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6846 (const_int 8)
6847 (const_int 10)))))]
6848 )
6849
6850 (define_insn "*bicsi3_cbranch_scratch"
6851 [(set (pc)
6852 (if_then_else
6853 (match_operator 4 "equality_operator"
6854 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6855 (match_operand:SI 1 "s_register_operand" "0"))
6856 (const_int 0)])
6857 (label_ref (match_operand 3 "" ""))
6858 (pc)))
6859 (clobber (match_scratch:SI 0 "=l"))]
6860 "TARGET_THUMB1"
6861 "*
6862 {
6863 output_asm_insn (\"bic\\t%0, %2\", operands);
6864 switch (get_attr_length (insn))
6865 {
6866 case 4: return \"b%d4\\t%l3\";
6867 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6868 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6869 }
6870 }"
6871 [(set (attr "far_jump")
6872 (if_then_else
6873 (eq_attr "length" "8")
6874 (const_string "yes")
6875 (const_string "no")))
6876 (set (attr "length")
6877 (if_then_else
6878 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6879 (le (minus (match_dup 3) (pc)) (const_int 256)))
6880 (const_int 4)
6881 (if_then_else
6882 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6883 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6884 (const_int 6)
6885 (const_int 8))))]
6886 )
6887
6888 (define_insn "*bicsi3_cbranch"
6889 [(set (pc)
6890 (if_then_else
6891 (match_operator 5 "equality_operator"
6892 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6893 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6894 (const_int 0)])
6895 (label_ref (match_operand 4 "" ""))
6896 (pc)))
6897 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6898 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6899 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6900 "TARGET_THUMB1"
6901 "*
6902 {
6903 if (which_alternative == 0)
6904 output_asm_insn (\"bic\\t%0, %3\", operands);
6905 else if (which_alternative <= 2)
6906 {
6907 output_asm_insn (\"bic\\t%1, %3\", operands);
6908 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6909 conditions again, since we're only testing for equality. */
6910 output_asm_insn (\"mov\\t%0, %1\", operands);
6911 }
6912 else
6913 {
6914 output_asm_insn (\"bic\\t%1, %3\", operands);
6915 output_asm_insn (\"str\\t%1, %0\", operands);
6916 }
6917
6918 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6919 {
6920 case 4: return \"b%d5\\t%l4\";
6921 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6922 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6923 }
6924 }"
6925 [(set (attr "far_jump")
6926 (if_then_else
6927 (ior (and (eq (symbol_ref ("which_alternative"))
6928 (const_int 0))
6929 (eq_attr "length" "8"))
6930 (eq_attr "length" "10"))
6931 (const_string "yes")
6932 (const_string "no")))
6933 (set (attr "length")
6934 (if_then_else
6935 (eq (symbol_ref ("which_alternative"))
6936 (const_int 0))
6937 (if_then_else
6938 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6939 (le (minus (match_dup 4) (pc)) (const_int 256)))
6940 (const_int 4)
6941 (if_then_else
6942 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6943 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6944 (const_int 6)
6945 (const_int 8)))
6946 (if_then_else
6947 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6948 (le (minus (match_dup 4) (pc)) (const_int 256)))
6949 (const_int 6)
6950 (if_then_else
6951 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6952 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6953 (const_int 8)
6954 (const_int 10)))))]
6955 )
6956
6957 (define_insn "*cbranchne_decr1"
6958 [(set (pc)
6959 (if_then_else (match_operator 3 "equality_operator"
6960 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6961 (const_int 0)])
6962 (label_ref (match_operand 4 "" ""))
6963 (pc)))
6964 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6965 (plus:SI (match_dup 2) (const_int -1)))
6966 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6967 "TARGET_THUMB1"
6968 "*
6969 {
6970 rtx cond[2];
6971 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6972 ? GEU : LTU),
6973 VOIDmode, operands[2], const1_rtx);
6974 cond[1] = operands[4];
6975
6976 if (which_alternative == 0)
6977 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6978 else if (which_alternative == 1)
6979 {
6980 /* We must provide an alternative for a hi reg because reload
6981 cannot handle output reloads on a jump instruction, but we
6982 can't subtract into that. Fortunately a mov from lo to hi
6983 does not clobber the condition codes. */
6984 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6985 output_asm_insn (\"mov\\t%0, %1\", operands);
6986 }
6987 else
6988 {
6989 /* Similarly, but the target is memory. */
6990 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6991 output_asm_insn (\"str\\t%1, %0\", operands);
6992 }
6993
6994 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6995 {
6996 case 4:
6997 output_asm_insn (\"b%d0\\t%l1\", cond);
6998 return \"\";
6999 case 6:
7000 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7001 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7002 default:
7003 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7004 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7005 }
7006 }
7007 "
7008 [(set (attr "far_jump")
7009 (if_then_else
7010 (ior (and (eq (symbol_ref ("which_alternative"))
7011 (const_int 0))
7012 (eq_attr "length" "8"))
7013 (eq_attr "length" "10"))
7014 (const_string "yes")
7015 (const_string "no")))
7016 (set_attr_alternative "length"
7017 [
7018 ;; Alternative 0
7019 (if_then_else
7020 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7021 (le (minus (match_dup 4) (pc)) (const_int 256)))
7022 (const_int 4)
7023 (if_then_else
7024 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7025 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7026 (const_int 6)
7027 (const_int 8)))
7028 ;; Alternative 1
7029 (if_then_else
7030 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7031 (le (minus (match_dup 4) (pc)) (const_int 256)))
7032 (const_int 6)
7033 (if_then_else
7034 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7035 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7036 (const_int 8)
7037 (const_int 10)))
7038 ;; Alternative 2
7039 (if_then_else
7040 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7041 (le (minus (match_dup 4) (pc)) (const_int 256)))
7042 (const_int 6)
7043 (if_then_else
7044 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7045 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7046 (const_int 8)
7047 (const_int 10)))
7048 ;; Alternative 3
7049 (if_then_else
7050 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7051 (le (minus (match_dup 4) (pc)) (const_int 256)))
7052 (const_int 6)
7053 (if_then_else
7054 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7055 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7056 (const_int 8)
7057 (const_int 10)))])]
7058 )
7059
7060 (define_insn "*addsi3_cbranch"
7061 [(set (pc)
7062 (if_then_else
7063 (match_operator 4 "comparison_operator"
7064 [(plus:SI
7065 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7066 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7067 (const_int 0)])
7068 (label_ref (match_operand 5 "" ""))
7069 (pc)))
7070 (set
7071 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7072 (plus:SI (match_dup 2) (match_dup 3)))
7073 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7074 "TARGET_THUMB1
7075 && (GET_CODE (operands[4]) == EQ
7076 || GET_CODE (operands[4]) == NE
7077 || GET_CODE (operands[4]) == GE
7078 || GET_CODE (operands[4]) == LT)"
7079 "*
7080 {
7081 rtx cond[3];
7082
7083
7084 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7085 cond[1] = operands[2];
7086 cond[2] = operands[3];
7087
7088 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7089 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7090 else
7091 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7092
7093 if (which_alternative >= 3
7094 && which_alternative < 4)
7095 output_asm_insn (\"mov\\t%0, %1\", operands);
7096 else if (which_alternative >= 4)
7097 output_asm_insn (\"str\\t%1, %0\", operands);
7098
7099 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7100 {
7101 case 4:
7102 return \"b%d4\\t%l5\";
7103 case 6:
7104 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7105 default:
7106 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7107 }
7108 }
7109 "
7110 [(set (attr "far_jump")
7111 (if_then_else
7112 (ior (and (lt (symbol_ref ("which_alternative"))
7113 (const_int 3))
7114 (eq_attr "length" "8"))
7115 (eq_attr "length" "10"))
7116 (const_string "yes")
7117 (const_string "no")))
7118 (set (attr "length")
7119 (if_then_else
7120 (lt (symbol_ref ("which_alternative"))
7121 (const_int 3))
7122 (if_then_else
7123 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7124 (le (minus (match_dup 5) (pc)) (const_int 256)))
7125 (const_int 4)
7126 (if_then_else
7127 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7128 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7129 (const_int 6)
7130 (const_int 8)))
7131 (if_then_else
7132 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7133 (le (minus (match_dup 5) (pc)) (const_int 256)))
7134 (const_int 6)
7135 (if_then_else
7136 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7137 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7138 (const_int 8)
7139 (const_int 10)))))]
7140 )
7141
7142 (define_insn "*addsi3_cbranch_scratch"
7143 [(set (pc)
7144 (if_then_else
7145 (match_operator 3 "comparison_operator"
7146 [(plus:SI
7147 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7148 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7149 (const_int 0)])
7150 (label_ref (match_operand 4 "" ""))
7151 (pc)))
7152 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7153 "TARGET_THUMB1
7154 && (GET_CODE (operands[3]) == EQ
7155 || GET_CODE (operands[3]) == NE
7156 || GET_CODE (operands[3]) == GE
7157 || GET_CODE (operands[3]) == LT)"
7158 "*
7159 {
7160 switch (which_alternative)
7161 {
7162 case 0:
7163 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7164 break;
7165 case 1:
7166 output_asm_insn (\"cmn\t%1, %2\", operands);
7167 break;
7168 case 2:
7169 if (INTVAL (operands[2]) < 0)
7170 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7171 else
7172 output_asm_insn (\"add\t%0, %1, %2\", operands);
7173 break;
7174 case 3:
7175 if (INTVAL (operands[2]) < 0)
7176 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7177 else
7178 output_asm_insn (\"add\t%0, %0, %2\", operands);
7179 break;
7180 }
7181
7182 switch (get_attr_length (insn))
7183 {
7184 case 4:
7185 return \"b%d3\\t%l4\";
7186 case 6:
7187 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7188 default:
7189 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7190 }
7191 }
7192 "
7193 [(set (attr "far_jump")
7194 (if_then_else
7195 (eq_attr "length" "8")
7196 (const_string "yes")
7197 (const_string "no")))
7198 (set (attr "length")
7199 (if_then_else
7200 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7201 (le (minus (match_dup 4) (pc)) (const_int 256)))
7202 (const_int 4)
7203 (if_then_else
7204 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7205 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7206 (const_int 6)
7207 (const_int 8))))]
7208 )
7209
7210 (define_insn "*subsi3_cbranch"
7211 [(set (pc)
7212 (if_then_else
7213 (match_operator 4 "comparison_operator"
7214 [(minus:SI
7215 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7216 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7217 (const_int 0)])
7218 (label_ref (match_operand 5 "" ""))
7219 (pc)))
7220 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7221 (minus:SI (match_dup 2) (match_dup 3)))
7222 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7223 "TARGET_THUMB1
7224 && (GET_CODE (operands[4]) == EQ
7225 || GET_CODE (operands[4]) == NE
7226 || GET_CODE (operands[4]) == GE
7227 || GET_CODE (operands[4]) == LT)"
7228 "*
7229 {
7230 if (which_alternative == 0)
7231 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7232 else if (which_alternative == 1)
7233 {
7234 /* We must provide an alternative for a hi reg because reload
7235 cannot handle output reloads on a jump instruction, but we
7236 can't subtract into that. Fortunately a mov from lo to hi
7237 does not clobber the condition codes. */
7238 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7239 output_asm_insn (\"mov\\t%0, %1\", operands);
7240 }
7241 else
7242 {
7243 /* Similarly, but the target is memory. */
7244 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7245 output_asm_insn (\"str\\t%1, %0\", operands);
7246 }
7247
7248 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7249 {
7250 case 4:
7251 return \"b%d4\\t%l5\";
7252 case 6:
7253 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7254 default:
7255 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7256 }
7257 }
7258 "
7259 [(set (attr "far_jump")
7260 (if_then_else
7261 (ior (and (eq (symbol_ref ("which_alternative"))
7262 (const_int 0))
7263 (eq_attr "length" "8"))
7264 (eq_attr "length" "10"))
7265 (const_string "yes")
7266 (const_string "no")))
7267 (set (attr "length")
7268 (if_then_else
7269 (eq (symbol_ref ("which_alternative"))
7270 (const_int 0))
7271 (if_then_else
7272 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7273 (le (minus (match_dup 5) (pc)) (const_int 256)))
7274 (const_int 4)
7275 (if_then_else
7276 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7277 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7278 (const_int 6)
7279 (const_int 8)))
7280 (if_then_else
7281 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7282 (le (minus (match_dup 5) (pc)) (const_int 256)))
7283 (const_int 6)
7284 (if_then_else
7285 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7286 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7287 (const_int 8)
7288 (const_int 10)))))]
7289 )
7290
7291 (define_insn "*subsi3_cbranch_scratch"
7292 [(set (pc)
7293 (if_then_else
7294 (match_operator 0 "arm_comparison_operator"
7295 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7296 (match_operand:SI 2 "nonmemory_operand" "l"))
7297 (const_int 0)])
7298 (label_ref (match_operand 3 "" ""))
7299 (pc)))]
7300 "TARGET_THUMB1
7301 && (GET_CODE (operands[0]) == EQ
7302 || GET_CODE (operands[0]) == NE
7303 || GET_CODE (operands[0]) == GE
7304 || GET_CODE (operands[0]) == LT)"
7305 "*
7306 output_asm_insn (\"cmp\\t%1, %2\", operands);
7307 switch (get_attr_length (insn))
7308 {
7309 case 4: return \"b%d0\\t%l3\";
7310 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7311 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7312 }
7313 "
7314 [(set (attr "far_jump")
7315 (if_then_else
7316 (eq_attr "length" "8")
7317 (const_string "yes")
7318 (const_string "no")))
7319 (set (attr "length")
7320 (if_then_else
7321 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7322 (le (minus (match_dup 3) (pc)) (const_int 256)))
7323 (const_int 4)
7324 (if_then_else
7325 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7326 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7327 (const_int 6)
7328 (const_int 8))))]
7329 )
7330
7331 ;; Comparison and test insns
7332
7333 (define_expand "cmpsi"
7334 [(match_operand:SI 0 "s_register_operand" "")
7335 (match_operand:SI 1 "arm_add_operand" "")]
7336 "TARGET_32BIT"
7337 "{
7338 arm_compare_op0 = operands[0];
7339 arm_compare_op1 = operands[1];
7340 DONE;
7341 }"
7342 )
7343
7344 (define_expand "cmpsf"
7345 [(match_operand:SF 0 "s_register_operand" "")
7346 (match_operand:SF 1 "arm_float_compare_operand" "")]
7347 "TARGET_32BIT && TARGET_HARD_FLOAT"
7348 "
7349 arm_compare_op0 = operands[0];
7350 arm_compare_op1 = operands[1];
7351 DONE;
7352 "
7353 )
7354
7355 (define_expand "cmpdf"
7356 [(match_operand:DF 0 "s_register_operand" "")
7357 (match_operand:DF 1 "arm_float_compare_operand" "")]
7358 "TARGET_32BIT && TARGET_HARD_FLOAT"
7359 "
7360 arm_compare_op0 = operands[0];
7361 arm_compare_op1 = operands[1];
7362 DONE;
7363 "
7364 )
7365
7366 (define_insn "*arm_cmpsi_insn"
7367 [(set (reg:CC CC_REGNUM)
7368 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7369 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7370 "TARGET_32BIT"
7371 "@
7372 cmp%?\\t%0, %1
7373 cmn%?\\t%0, #%n1"
7374 [(set_attr "conds" "set")]
7375 )
7376
7377 (define_insn "*arm_cmpsi_shiftsi"
7378 [(set (reg:CC CC_REGNUM)
7379 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7380 (match_operator:SI 3 "shift_operator"
7381 [(match_operand:SI 1 "s_register_operand" "r")
7382 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7383 "TARGET_ARM"
7384 "cmp%?\\t%0, %1%S3"
7385 [(set_attr "conds" "set")
7386 (set_attr "shift" "1")
7387 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7388 (const_string "alu_shift")
7389 (const_string "alu_shift_reg")))]
7390 )
7391
7392 (define_insn "*arm_cmpsi_shiftsi_swp"
7393 [(set (reg:CC_SWP CC_REGNUM)
7394 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7395 [(match_operand:SI 1 "s_register_operand" "r")
7396 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7397 (match_operand:SI 0 "s_register_operand" "r")))]
7398 "TARGET_ARM"
7399 "cmp%?\\t%0, %1%S3"
7400 [(set_attr "conds" "set")
7401 (set_attr "shift" "1")
7402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7403 (const_string "alu_shift")
7404 (const_string "alu_shift_reg")))]
7405 )
7406
7407 (define_insn "*arm_cmpsi_negshiftsi_si"
7408 [(set (reg:CC_Z CC_REGNUM)
7409 (compare:CC_Z
7410 (neg:SI (match_operator:SI 1 "shift_operator"
7411 [(match_operand:SI 2 "s_register_operand" "r")
7412 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7413 (match_operand:SI 0 "s_register_operand" "r")))]
7414 "TARGET_ARM"
7415 "cmn%?\\t%0, %2%S1"
7416 [(set_attr "conds" "set")
7417 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7418 (const_string "alu_shift")
7419 (const_string "alu_shift_reg")))]
7420 )
7421
7422 ;; Cirrus SF compare instruction
7423 (define_insn "*cirrus_cmpsf"
7424 [(set (reg:CCFP CC_REGNUM)
7425 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7426 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7427 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7428 "cfcmps%?\\tr15, %V0, %V1"
7429 [(set_attr "type" "mav_farith")
7430 (set_attr "cirrus" "compare")]
7431 )
7432
7433 ;; Cirrus DF compare instruction
7434 (define_insn "*cirrus_cmpdf"
7435 [(set (reg:CCFP CC_REGNUM)
7436 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7437 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7438 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7439 "cfcmpd%?\\tr15, %V0, %V1"
7440 [(set_attr "type" "mav_farith")
7441 (set_attr "cirrus" "compare")]
7442 )
7443
7444 ;; Cirrus DI compare instruction
7445 (define_expand "cmpdi"
7446 [(match_operand:DI 0 "cirrus_fp_register" "")
7447 (match_operand:DI 1 "cirrus_fp_register" "")]
7448 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7449 "{
7450 arm_compare_op0 = operands[0];
7451 arm_compare_op1 = operands[1];
7452 DONE;
7453 }")
7454
7455 (define_insn "*cirrus_cmpdi"
7456 [(set (reg:CC CC_REGNUM)
7457 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7458 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7459 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7460 "cfcmp64%?\\tr15, %V0, %V1"
7461 [(set_attr "type" "mav_farith")
7462 (set_attr "cirrus" "compare")]
7463 )
7464
7465 ; This insn allows redundant compares to be removed by cse, nothing should
7466 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7467 ; is deleted later on. The match_dup will match the mode here, so that
7468 ; mode changes of the condition codes aren't lost by this even though we don't
7469 ; specify what they are.
7470
7471 (define_insn "*deleted_compare"
7472 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7473 "TARGET_32BIT"
7474 "\\t%@ deleted compare"
7475 [(set_attr "conds" "set")
7476 (set_attr "length" "0")]
7477 )
7478
7479 \f
7480 ;; Conditional branch insns
7481
7482 (define_expand "beq"
7483 [(set (pc)
7484 (if_then_else (eq (match_dup 1) (const_int 0))
7485 (label_ref (match_operand 0 "" ""))
7486 (pc)))]
7487 "TARGET_32BIT"
7488 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7489 )
7490
7491 (define_expand "bne"
7492 [(set (pc)
7493 (if_then_else (ne (match_dup 1) (const_int 0))
7494 (label_ref (match_operand 0 "" ""))
7495 (pc)))]
7496 "TARGET_32BIT"
7497 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7498 )
7499
7500 (define_expand "bgt"
7501 [(set (pc)
7502 (if_then_else (gt (match_dup 1) (const_int 0))
7503 (label_ref (match_operand 0 "" ""))
7504 (pc)))]
7505 "TARGET_32BIT"
7506 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7507 )
7508
7509 (define_expand "ble"
7510 [(set (pc)
7511 (if_then_else (le (match_dup 1) (const_int 0))
7512 (label_ref (match_operand 0 "" ""))
7513 (pc)))]
7514 "TARGET_32BIT"
7515 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7516 )
7517
7518 (define_expand "bge"
7519 [(set (pc)
7520 (if_then_else (ge (match_dup 1) (const_int 0))
7521 (label_ref (match_operand 0 "" ""))
7522 (pc)))]
7523 "TARGET_32BIT"
7524 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7525 )
7526
7527 (define_expand "blt"
7528 [(set (pc)
7529 (if_then_else (lt (match_dup 1) (const_int 0))
7530 (label_ref (match_operand 0 "" ""))
7531 (pc)))]
7532 "TARGET_32BIT"
7533 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7534 )
7535
7536 (define_expand "bgtu"
7537 [(set (pc)
7538 (if_then_else (gtu (match_dup 1) (const_int 0))
7539 (label_ref (match_operand 0 "" ""))
7540 (pc)))]
7541 "TARGET_32BIT"
7542 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7543 )
7544
7545 (define_expand "bleu"
7546 [(set (pc)
7547 (if_then_else (leu (match_dup 1) (const_int 0))
7548 (label_ref (match_operand 0 "" ""))
7549 (pc)))]
7550 "TARGET_32BIT"
7551 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7552 )
7553
7554 (define_expand "bgeu"
7555 [(set (pc)
7556 (if_then_else (geu (match_dup 1) (const_int 0))
7557 (label_ref (match_operand 0 "" ""))
7558 (pc)))]
7559 "TARGET_32BIT"
7560 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7561 )
7562
7563 (define_expand "bltu"
7564 [(set (pc)
7565 (if_then_else (ltu (match_dup 1) (const_int 0))
7566 (label_ref (match_operand 0 "" ""))
7567 (pc)))]
7568 "TARGET_32BIT"
7569 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7570 )
7571
7572 (define_expand "bunordered"
7573 [(set (pc)
7574 (if_then_else (unordered (match_dup 1) (const_int 0))
7575 (label_ref (match_operand 0 "" ""))
7576 (pc)))]
7577 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7578 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7579 arm_compare_op1);"
7580 )
7581
7582 (define_expand "bordered"
7583 [(set (pc)
7584 (if_then_else (ordered (match_dup 1) (const_int 0))
7585 (label_ref (match_operand 0 "" ""))
7586 (pc)))]
7587 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7588 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7589 arm_compare_op1);"
7590 )
7591
7592 (define_expand "bungt"
7593 [(set (pc)
7594 (if_then_else (ungt (match_dup 1) (const_int 0))
7595 (label_ref (match_operand 0 "" ""))
7596 (pc)))]
7597 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7598 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7599 )
7600
7601 (define_expand "bunlt"
7602 [(set (pc)
7603 (if_then_else (unlt (match_dup 1) (const_int 0))
7604 (label_ref (match_operand 0 "" ""))
7605 (pc)))]
7606 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7607 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7608 )
7609
7610 (define_expand "bunge"
7611 [(set (pc)
7612 (if_then_else (unge (match_dup 1) (const_int 0))
7613 (label_ref (match_operand 0 "" ""))
7614 (pc)))]
7615 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7616 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7617 )
7618
7619 (define_expand "bunle"
7620 [(set (pc)
7621 (if_then_else (unle (match_dup 1) (const_int 0))
7622 (label_ref (match_operand 0 "" ""))
7623 (pc)))]
7624 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7625 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7626 )
7627
7628 ;; The following two patterns need two branch instructions, since there is
7629 ;; no single instruction that will handle all cases.
7630 (define_expand "buneq"
7631 [(set (pc)
7632 (if_then_else (uneq (match_dup 1) (const_int 0))
7633 (label_ref (match_operand 0 "" ""))
7634 (pc)))]
7635 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7636 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7637 )
7638
7639 (define_expand "bltgt"
7640 [(set (pc)
7641 (if_then_else (ltgt (match_dup 1) (const_int 0))
7642 (label_ref (match_operand 0 "" ""))
7643 (pc)))]
7644 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7645 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7646 )
7647
7648 ;;
7649 ;; Patterns to match conditional branch insns.
7650 ;;
7651
7652 ; Special pattern to match UNEQ.
7653 (define_insn "*arm_buneq"
7654 [(set (pc)
7655 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7656 (label_ref (match_operand 0 "" ""))
7657 (pc)))]
7658 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7659 "*
7660 gcc_assert (!arm_ccfsm_state);
7661
7662 return \"bvs\\t%l0\;beq\\t%l0\";
7663 "
7664 [(set_attr "conds" "jump_clob")
7665 (set_attr "length" "8")]
7666 )
7667
7668 ; Special pattern to match LTGT.
7669 (define_insn "*arm_bltgt"
7670 [(set (pc)
7671 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7672 (label_ref (match_operand 0 "" ""))
7673 (pc)))]
7674 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7675 "*
7676 gcc_assert (!arm_ccfsm_state);
7677
7678 return \"bmi\\t%l0\;bgt\\t%l0\";
7679 "
7680 [(set_attr "conds" "jump_clob")
7681 (set_attr "length" "8")]
7682 )
7683
7684 (define_insn "*arm_cond_branch"
7685 [(set (pc)
7686 (if_then_else (match_operator 1 "arm_comparison_operator"
7687 [(match_operand 2 "cc_register" "") (const_int 0)])
7688 (label_ref (match_operand 0 "" ""))
7689 (pc)))]
7690 "TARGET_32BIT"
7691 "*
7692 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7693 {
7694 arm_ccfsm_state += 2;
7695 return \"\";
7696 }
7697 return \"b%d1\\t%l0\";
7698 "
7699 [(set_attr "conds" "use")
7700 (set_attr "type" "branch")]
7701 )
7702
7703 ; Special pattern to match reversed UNEQ.
7704 (define_insn "*arm_buneq_reversed"
7705 [(set (pc)
7706 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7707 (pc)
7708 (label_ref (match_operand 0 "" ""))))]
7709 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7710 "*
7711 gcc_assert (!arm_ccfsm_state);
7712
7713 return \"bmi\\t%l0\;bgt\\t%l0\";
7714 "
7715 [(set_attr "conds" "jump_clob")
7716 (set_attr "length" "8")]
7717 )
7718
7719 ; Special pattern to match reversed LTGT.
7720 (define_insn "*arm_bltgt_reversed"
7721 [(set (pc)
7722 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7723 (pc)
7724 (label_ref (match_operand 0 "" ""))))]
7725 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7726 "*
7727 gcc_assert (!arm_ccfsm_state);
7728
7729 return \"bvs\\t%l0\;beq\\t%l0\";
7730 "
7731 [(set_attr "conds" "jump_clob")
7732 (set_attr "length" "8")]
7733 )
7734
7735 (define_insn "*arm_cond_branch_reversed"
7736 [(set (pc)
7737 (if_then_else (match_operator 1 "arm_comparison_operator"
7738 [(match_operand 2 "cc_register" "") (const_int 0)])
7739 (pc)
7740 (label_ref (match_operand 0 "" ""))))]
7741 "TARGET_32BIT"
7742 "*
7743 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7744 {
7745 arm_ccfsm_state += 2;
7746 return \"\";
7747 }
7748 return \"b%D1\\t%l0\";
7749 "
7750 [(set_attr "conds" "use")
7751 (set_attr "type" "branch")]
7752 )
7753
7754 \f
7755
7756 ; scc insns
7757
7758 (define_expand "seq"
7759 [(set (match_operand:SI 0 "s_register_operand" "")
7760 (eq:SI (match_dup 1) (const_int 0)))]
7761 "TARGET_32BIT"
7762 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7763 )
7764
7765 (define_expand "sne"
7766 [(set (match_operand:SI 0 "s_register_operand" "")
7767 (ne:SI (match_dup 1) (const_int 0)))]
7768 "TARGET_32BIT"
7769 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7770 )
7771
7772 (define_expand "sgt"
7773 [(set (match_operand:SI 0 "s_register_operand" "")
7774 (gt:SI (match_dup 1) (const_int 0)))]
7775 "TARGET_32BIT"
7776 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7777 )
7778
7779 (define_expand "sle"
7780 [(set (match_operand:SI 0 "s_register_operand" "")
7781 (le:SI (match_dup 1) (const_int 0)))]
7782 "TARGET_32BIT"
7783 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7784 )
7785
7786 (define_expand "sge"
7787 [(set (match_operand:SI 0 "s_register_operand" "")
7788 (ge:SI (match_dup 1) (const_int 0)))]
7789 "TARGET_32BIT"
7790 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7791 )
7792
7793 (define_expand "slt"
7794 [(set (match_operand:SI 0 "s_register_operand" "")
7795 (lt:SI (match_dup 1) (const_int 0)))]
7796 "TARGET_32BIT"
7797 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7798 )
7799
7800 (define_expand "sgtu"
7801 [(set (match_operand:SI 0 "s_register_operand" "")
7802 (gtu:SI (match_dup 1) (const_int 0)))]
7803 "TARGET_32BIT"
7804 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7805 )
7806
7807 (define_expand "sleu"
7808 [(set (match_operand:SI 0 "s_register_operand" "")
7809 (leu:SI (match_dup 1) (const_int 0)))]
7810 "TARGET_32BIT"
7811 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7812 )
7813
7814 (define_expand "sgeu"
7815 [(set (match_operand:SI 0 "s_register_operand" "")
7816 (geu:SI (match_dup 1) (const_int 0)))]
7817 "TARGET_32BIT"
7818 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7819 )
7820
7821 (define_expand "sltu"
7822 [(set (match_operand:SI 0 "s_register_operand" "")
7823 (ltu:SI (match_dup 1) (const_int 0)))]
7824 "TARGET_32BIT"
7825 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7826 )
7827
7828 (define_expand "sunordered"
7829 [(set (match_operand:SI 0 "s_register_operand" "")
7830 (unordered:SI (match_dup 1) (const_int 0)))]
7831 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7832 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7833 arm_compare_op1);"
7834 )
7835
7836 (define_expand "sordered"
7837 [(set (match_operand:SI 0 "s_register_operand" "")
7838 (ordered:SI (match_dup 1) (const_int 0)))]
7839 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7840 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7841 arm_compare_op1);"
7842 )
7843
7844 (define_expand "sungt"
7845 [(set (match_operand:SI 0 "s_register_operand" "")
7846 (ungt:SI (match_dup 1) (const_int 0)))]
7847 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7848 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7849 arm_compare_op1);"
7850 )
7851
7852 (define_expand "sunge"
7853 [(set (match_operand:SI 0 "s_register_operand" "")
7854 (unge:SI (match_dup 1) (const_int 0)))]
7855 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7856 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7857 arm_compare_op1);"
7858 )
7859
7860 (define_expand "sunlt"
7861 [(set (match_operand:SI 0 "s_register_operand" "")
7862 (unlt:SI (match_dup 1) (const_int 0)))]
7863 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7864 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7865 arm_compare_op1);"
7866 )
7867
7868 (define_expand "sunle"
7869 [(set (match_operand:SI 0 "s_register_operand" "")
7870 (unle:SI (match_dup 1) (const_int 0)))]
7871 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7872 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7873 arm_compare_op1);"
7874 )
7875
7876 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7877 ;;; simple ARM instructions.
7878 ;
7879 ; (define_expand "suneq"
7880 ; [(set (match_operand:SI 0 "s_register_operand" "")
7881 ; (uneq:SI (match_dup 1) (const_int 0)))]
7882 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7883 ; "gcc_unreachable ();"
7884 ; )
7885 ;
7886 ; (define_expand "sltgt"
7887 ; [(set (match_operand:SI 0 "s_register_operand" "")
7888 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7889 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7890 ; "gcc_unreachable ();"
7891 ; )
7892
7893 (define_insn "*mov_scc"
7894 [(set (match_operand:SI 0 "s_register_operand" "=r")
7895 (match_operator:SI 1 "arm_comparison_operator"
7896 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7897 "TARGET_ARM"
7898 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7899 [(set_attr "conds" "use")
7900 (set_attr "length" "8")]
7901 )
7902
7903 (define_insn "*mov_negscc"
7904 [(set (match_operand:SI 0 "s_register_operand" "=r")
7905 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7906 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7907 "TARGET_ARM"
7908 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7909 [(set_attr "conds" "use")
7910 (set_attr "length" "8")]
7911 )
7912
7913 (define_insn "*mov_notscc"
7914 [(set (match_operand:SI 0 "s_register_operand" "=r")
7915 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7916 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7917 "TARGET_ARM"
7918 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7919 [(set_attr "conds" "use")
7920 (set_attr "length" "8")]
7921 )
7922
7923 (define_expand "cstoresi4"
7924 [(set (match_operand:SI 0 "s_register_operand" "")
7925 (match_operator:SI 1 "arm_comparison_operator"
7926 [(match_operand:SI 2 "s_register_operand" "")
7927 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7928 "TARGET_THUMB1"
7929 "{
7930 rtx op3, scratch, scratch2;
7931
7932 if (operands[3] == const0_rtx)
7933 {
7934 switch (GET_CODE (operands[1]))
7935 {
7936 case EQ:
7937 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7938 break;
7939
7940 case NE:
7941 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7942 break;
7943
7944 case LE:
7945 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7946 NULL_RTX, 0, OPTAB_WIDEN);
7947 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7948 NULL_RTX, 0, OPTAB_WIDEN);
7949 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7950 operands[0], 1, OPTAB_WIDEN);
7951 break;
7952
7953 case GE:
7954 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7955 NULL_RTX, 1);
7956 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7957 NULL_RTX, 1, OPTAB_WIDEN);
7958 break;
7959
7960 case GT:
7961 scratch = expand_binop (SImode, ashr_optab, operands[2],
7962 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7963 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7964 NULL_RTX, 0, OPTAB_WIDEN);
7965 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7966 0, OPTAB_WIDEN);
7967 break;
7968
7969 /* LT is handled by generic code. No need for unsigned with 0. */
7970 default:
7971 FAIL;
7972 }
7973 DONE;
7974 }
7975
7976 switch (GET_CODE (operands[1]))
7977 {
7978 case EQ:
7979 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7980 NULL_RTX, 0, OPTAB_WIDEN);
7981 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7982 break;
7983
7984 case NE:
7985 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7986 NULL_RTX, 0, OPTAB_WIDEN);
7987 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7988 break;
7989
7990 case LE:
7991 op3 = force_reg (SImode, operands[3]);
7992
7993 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7994 NULL_RTX, 1, OPTAB_WIDEN);
7995 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7996 NULL_RTX, 0, OPTAB_WIDEN);
7997 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7998 op3, operands[2]));
7999 break;
8000
8001 case GE:
8002 op3 = operands[3];
8003 if (!thumb1_cmp_operand (op3, SImode))
8004 op3 = force_reg (SImode, op3);
8005 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8006 NULL_RTX, 0, OPTAB_WIDEN);
8007 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8008 NULL_RTX, 1, OPTAB_WIDEN);
8009 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8010 operands[2], op3));
8011 break;
8012
8013 case LEU:
8014 op3 = force_reg (SImode, operands[3]);
8015 scratch = force_reg (SImode, const0_rtx);
8016 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8017 op3, operands[2]));
8018 break;
8019
8020 case GEU:
8021 op3 = operands[3];
8022 if (!thumb1_cmp_operand (op3, SImode))
8023 op3 = force_reg (SImode, op3);
8024 scratch = force_reg (SImode, const0_rtx);
8025 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8026 operands[2], op3));
8027 break;
8028
8029 case LTU:
8030 op3 = operands[3];
8031 if (!thumb1_cmp_operand (op3, SImode))
8032 op3 = force_reg (SImode, op3);
8033 scratch = gen_reg_rtx (SImode);
8034 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8035 emit_insn (gen_negsi2 (operands[0], scratch));
8036 break;
8037
8038 case GTU:
8039 op3 = force_reg (SImode, operands[3]);
8040 scratch = gen_reg_rtx (SImode);
8041 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8042 emit_insn (gen_negsi2 (operands[0], scratch));
8043 break;
8044
8045 /* No good sequences for GT, LT. */
8046 default:
8047 FAIL;
8048 }
8049 DONE;
8050 }")
8051
8052 (define_expand "cstoresi_eq0_thumb1"
8053 [(parallel
8054 [(set (match_operand:SI 0 "s_register_operand" "")
8055 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8056 (const_int 0)))
8057 (clobber (match_dup:SI 2))])]
8058 "TARGET_THUMB1"
8059 "operands[2] = gen_reg_rtx (SImode);"
8060 )
8061
8062 (define_expand "cstoresi_ne0_thumb1"
8063 [(parallel
8064 [(set (match_operand:SI 0 "s_register_operand" "")
8065 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8066 (const_int 0)))
8067 (clobber (match_dup:SI 2))])]
8068 "TARGET_THUMB1"
8069 "operands[2] = gen_reg_rtx (SImode);"
8070 )
8071
8072 (define_insn "*cstoresi_eq0_thumb1_insn"
8073 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8074 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8075 (const_int 0)))
8076 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8077 "TARGET_THUMB1"
8078 "@
8079 neg\\t%0, %1\;adc\\t%0, %0, %1
8080 neg\\t%2, %1\;adc\\t%0, %1, %2"
8081 [(set_attr "length" "4")]
8082 )
8083
8084 (define_insn "*cstoresi_ne0_thumb1_insn"
8085 [(set (match_operand:SI 0 "s_register_operand" "=l")
8086 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8087 (const_int 0)))
8088 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8089 "TARGET_THUMB1"
8090 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8091 [(set_attr "length" "4")]
8092 )
8093
8094 (define_insn "cstoresi_nltu_thumb1"
8095 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8096 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8097 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8098 "TARGET_THUMB1"
8099 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8100 [(set_attr "length" "4")]
8101 )
8102
8103 ;; Used as part of the expansion of thumb les sequence.
8104 (define_insn "thumb1_addsi3_addgeu"
8105 [(set (match_operand:SI 0 "s_register_operand" "=l")
8106 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8107 (match_operand:SI 2 "s_register_operand" "l"))
8108 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8109 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8110 "TARGET_THUMB1"
8111 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8112 [(set_attr "length" "4")]
8113 )
8114
8115 \f
8116 ;; Conditional move insns
8117
8118 (define_expand "movsicc"
8119 [(set (match_operand:SI 0 "s_register_operand" "")
8120 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8121 (match_operand:SI 2 "arm_not_operand" "")
8122 (match_operand:SI 3 "arm_not_operand" "")))]
8123 "TARGET_32BIT"
8124 "
8125 {
8126 enum rtx_code code = GET_CODE (operands[1]);
8127 rtx ccreg;
8128
8129 if (code == UNEQ || code == LTGT)
8130 FAIL;
8131
8132 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8133 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8134 }"
8135 )
8136
8137 (define_expand "movsfcc"
8138 [(set (match_operand:SF 0 "s_register_operand" "")
8139 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8140 (match_operand:SF 2 "s_register_operand" "")
8141 (match_operand:SF 3 "nonmemory_operand" "")))]
8142 "TARGET_32BIT"
8143 "
8144 {
8145 enum rtx_code code = GET_CODE (operands[1]);
8146 rtx ccreg;
8147
8148 if (code == UNEQ || code == LTGT)
8149 FAIL;
8150
8151 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8152 Otherwise, ensure it is a valid FP add operand */
8153 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8154 || (!arm_float_add_operand (operands[3], SFmode)))
8155 operands[3] = force_reg (SFmode, operands[3]);
8156
8157 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8158 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8159 }"
8160 )
8161
8162 (define_expand "movdfcc"
8163 [(set (match_operand:DF 0 "s_register_operand" "")
8164 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8165 (match_operand:DF 2 "s_register_operand" "")
8166 (match_operand:DF 3 "arm_float_add_operand" "")))]
8167 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8168 "
8169 {
8170 enum rtx_code code = GET_CODE (operands[1]);
8171 rtx ccreg;
8172
8173 if (code == UNEQ || code == LTGT)
8174 FAIL;
8175
8176 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8177 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8178 }"
8179 )
8180
8181 (define_insn "*movsicc_insn"
8182 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8183 (if_then_else:SI
8184 (match_operator 3 "arm_comparison_operator"
8185 [(match_operand 4 "cc_register" "") (const_int 0)])
8186 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8187 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8188 "TARGET_ARM"
8189 "@
8190 mov%D3\\t%0, %2
8191 mvn%D3\\t%0, #%B2
8192 mov%d3\\t%0, %1
8193 mvn%d3\\t%0, #%B1
8194 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8195 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8196 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8197 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8198 [(set_attr "length" "4,4,4,4,8,8,8,8")
8199 (set_attr "conds" "use")]
8200 )
8201
8202 (define_insn "*movsfcc_soft_insn"
8203 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8204 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8205 [(match_operand 4 "cc_register" "") (const_int 0)])
8206 (match_operand:SF 1 "s_register_operand" "0,r")
8207 (match_operand:SF 2 "s_register_operand" "r,0")))]
8208 "TARGET_ARM && TARGET_SOFT_FLOAT"
8209 "@
8210 mov%D3\\t%0, %2
8211 mov%d3\\t%0, %1"
8212 [(set_attr "conds" "use")]
8213 )
8214
8215 \f
8216 ;; Jump and linkage insns
8217
8218 (define_expand "jump"
8219 [(set (pc)
8220 (label_ref (match_operand 0 "" "")))]
8221 "TARGET_EITHER"
8222 ""
8223 )
8224
8225 (define_insn "*arm_jump"
8226 [(set (pc)
8227 (label_ref (match_operand 0 "" "")))]
8228 "TARGET_32BIT"
8229 "*
8230 {
8231 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8232 {
8233 arm_ccfsm_state += 2;
8234 return \"\";
8235 }
8236 return \"b%?\\t%l0\";
8237 }
8238 "
8239 [(set_attr "predicable" "yes")]
8240 )
8241
8242 (define_insn "*thumb_jump"
8243 [(set (pc)
8244 (label_ref (match_operand 0 "" "")))]
8245 "TARGET_THUMB1"
8246 "*
8247 if (get_attr_length (insn) == 2)
8248 return \"b\\t%l0\";
8249 return \"bl\\t%l0\\t%@ far jump\";
8250 "
8251 [(set (attr "far_jump")
8252 (if_then_else
8253 (eq_attr "length" "4")
8254 (const_string "yes")
8255 (const_string "no")))
8256 (set (attr "length")
8257 (if_then_else
8258 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8259 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8260 (const_int 2)
8261 (const_int 4)))]
8262 )
8263
8264 (define_expand "call"
8265 [(parallel [(call (match_operand 0 "memory_operand" "")
8266 (match_operand 1 "general_operand" ""))
8267 (use (match_operand 2 "" ""))
8268 (clobber (reg:SI LR_REGNUM))])]
8269 "TARGET_EITHER"
8270 "
8271 {
8272 rtx callee, pat;
8273
8274 /* In an untyped call, we can get NULL for operand 2. */
8275 if (operands[2] == NULL_RTX)
8276 operands[2] = const0_rtx;
8277
8278 /* Decide if we should generate indirect calls by loading the
8279 32-bit address of the callee into a register before performing the
8280 branch and link. */
8281 callee = XEXP (operands[0], 0);
8282 if (GET_CODE (callee) == SYMBOL_REF
8283 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8284 : !REG_P (callee))
8285 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8286
8287 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8288 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8289 DONE;
8290 }"
8291 )
8292
8293 (define_expand "call_internal"
8294 [(parallel [(call (match_operand 0 "memory_operand" "")
8295 (match_operand 1 "general_operand" ""))
8296 (use (match_operand 2 "" ""))
8297 (clobber (reg:SI LR_REGNUM))])])
8298
8299 (define_insn "*call_reg_armv5"
8300 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8301 (match_operand 1 "" ""))
8302 (use (match_operand 2 "" ""))
8303 (clobber (reg:SI LR_REGNUM))]
8304 "TARGET_ARM && arm_arch5"
8305 "blx%?\\t%0"
8306 [(set_attr "type" "call")]
8307 )
8308
8309 (define_insn "*call_reg_arm"
8310 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8311 (match_operand 1 "" ""))
8312 (use (match_operand 2 "" ""))
8313 (clobber (reg:SI LR_REGNUM))]
8314 "TARGET_ARM && !arm_arch5"
8315 "*
8316 return output_call (operands);
8317 "
8318 ;; length is worst case, normally it is only two
8319 [(set_attr "length" "12")
8320 (set_attr "type" "call")]
8321 )
8322
8323 (define_insn "*call_mem"
8324 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8325 (match_operand 1 "" ""))
8326 (use (match_operand 2 "" ""))
8327 (clobber (reg:SI LR_REGNUM))]
8328 "TARGET_ARM"
8329 "*
8330 return output_call_mem (operands);
8331 "
8332 [(set_attr "length" "12")
8333 (set_attr "type" "call")]
8334 )
8335
8336 (define_insn "*call_reg_thumb1_v5"
8337 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8338 (match_operand 1 "" ""))
8339 (use (match_operand 2 "" ""))
8340 (clobber (reg:SI LR_REGNUM))]
8341 "TARGET_THUMB1 && arm_arch5"
8342 "blx\\t%0"
8343 [(set_attr "length" "2")
8344 (set_attr "type" "call")]
8345 )
8346
8347 (define_insn "*call_reg_thumb1"
8348 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8349 (match_operand 1 "" ""))
8350 (use (match_operand 2 "" ""))
8351 (clobber (reg:SI LR_REGNUM))]
8352 "TARGET_THUMB1 && !arm_arch5"
8353 "*
8354 {
8355 if (!TARGET_CALLER_INTERWORKING)
8356 return thumb_call_via_reg (operands[0]);
8357 else if (operands[1] == const0_rtx)
8358 return \"bl\\t%__interwork_call_via_%0\";
8359 else if (frame_pointer_needed)
8360 return \"bl\\t%__interwork_r7_call_via_%0\";
8361 else
8362 return \"bl\\t%__interwork_r11_call_via_%0\";
8363 }"
8364 [(set_attr "type" "call")]
8365 )
8366
8367 (define_expand "call_value"
8368 [(parallel [(set (match_operand 0 "" "")
8369 (call (match_operand 1 "memory_operand" "")
8370 (match_operand 2 "general_operand" "")))
8371 (use (match_operand 3 "" ""))
8372 (clobber (reg:SI LR_REGNUM))])]
8373 "TARGET_EITHER"
8374 "
8375 {
8376 rtx pat, callee;
8377
8378 /* In an untyped call, we can get NULL for operand 2. */
8379 if (operands[3] == 0)
8380 operands[3] = const0_rtx;
8381
8382 /* Decide if we should generate indirect calls by loading the
8383 32-bit address of the callee into a register before performing the
8384 branch and link. */
8385 callee = XEXP (operands[1], 0);
8386 if (GET_CODE (callee) == SYMBOL_REF
8387 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8388 : !REG_P (callee))
8389 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8390
8391 pat = gen_call_value_internal (operands[0], operands[1],
8392 operands[2], operands[3]);
8393 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8394 DONE;
8395 }"
8396 )
8397
8398 (define_expand "call_value_internal"
8399 [(parallel [(set (match_operand 0 "" "")
8400 (call (match_operand 1 "memory_operand" "")
8401 (match_operand 2 "general_operand" "")))
8402 (use (match_operand 3 "" ""))
8403 (clobber (reg:SI LR_REGNUM))])])
8404
8405 (define_insn "*call_value_reg_armv5"
8406 [(set (match_operand 0 "" "")
8407 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8408 (match_operand 2 "" "")))
8409 (use (match_operand 3 "" ""))
8410 (clobber (reg:SI LR_REGNUM))]
8411 "TARGET_ARM && arm_arch5"
8412 "blx%?\\t%1"
8413 [(set_attr "type" "call")]
8414 )
8415
8416 (define_insn "*call_value_reg_arm"
8417 [(set (match_operand 0 "" "")
8418 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8419 (match_operand 2 "" "")))
8420 (use (match_operand 3 "" ""))
8421 (clobber (reg:SI LR_REGNUM))]
8422 "TARGET_ARM && !arm_arch5"
8423 "*
8424 return output_call (&operands[1]);
8425 "
8426 [(set_attr "length" "12")
8427 (set_attr "type" "call")]
8428 )
8429
8430 (define_insn "*call_value_mem"
8431 [(set (match_operand 0 "" "")
8432 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8433 (match_operand 2 "" "")))
8434 (use (match_operand 3 "" ""))
8435 (clobber (reg:SI LR_REGNUM))]
8436 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8437 "*
8438 return output_call_mem (&operands[1]);
8439 "
8440 [(set_attr "length" "12")
8441 (set_attr "type" "call")]
8442 )
8443
8444 (define_insn "*call_value_reg_thumb1_v5"
8445 [(set (match_operand 0 "" "")
8446 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8447 (match_operand 2 "" "")))
8448 (use (match_operand 3 "" ""))
8449 (clobber (reg:SI LR_REGNUM))]
8450 "TARGET_THUMB1 && arm_arch5"
8451 "blx\\t%1"
8452 [(set_attr "length" "2")
8453 (set_attr "type" "call")]
8454 )
8455
8456 (define_insn "*call_value_reg_thumb1"
8457 [(set (match_operand 0 "" "")
8458 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8459 (match_operand 2 "" "")))
8460 (use (match_operand 3 "" ""))
8461 (clobber (reg:SI LR_REGNUM))]
8462 "TARGET_THUMB1 && !arm_arch5"
8463 "*
8464 {
8465 if (!TARGET_CALLER_INTERWORKING)
8466 return thumb_call_via_reg (operands[1]);
8467 else if (operands[2] == const0_rtx)
8468 return \"bl\\t%__interwork_call_via_%1\";
8469 else if (frame_pointer_needed)
8470 return \"bl\\t%__interwork_r7_call_via_%1\";
8471 else
8472 return \"bl\\t%__interwork_r11_call_via_%1\";
8473 }"
8474 [(set_attr "type" "call")]
8475 )
8476
8477 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8478 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8479
8480 (define_insn "*call_symbol"
8481 [(call (mem:SI (match_operand:SI 0 "" ""))
8482 (match_operand 1 "" ""))
8483 (use (match_operand 2 "" ""))
8484 (clobber (reg:SI LR_REGNUM))]
8485 "TARGET_ARM
8486 && (GET_CODE (operands[0]) == SYMBOL_REF)
8487 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8488 "*
8489 {
8490 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8491 }"
8492 [(set_attr "type" "call")]
8493 )
8494
8495 (define_insn "*call_value_symbol"
8496 [(set (match_operand 0 "" "")
8497 (call (mem:SI (match_operand:SI 1 "" ""))
8498 (match_operand:SI 2 "" "")))
8499 (use (match_operand 3 "" ""))
8500 (clobber (reg:SI LR_REGNUM))]
8501 "TARGET_ARM
8502 && (GET_CODE (operands[1]) == SYMBOL_REF)
8503 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8504 "*
8505 {
8506 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8507 }"
8508 [(set_attr "type" "call")]
8509 )
8510
8511 (define_insn "*call_insn"
8512 [(call (mem:SI (match_operand:SI 0 "" ""))
8513 (match_operand:SI 1 "" ""))
8514 (use (match_operand 2 "" ""))
8515 (clobber (reg:SI LR_REGNUM))]
8516 "TARGET_THUMB
8517 && GET_CODE (operands[0]) == SYMBOL_REF
8518 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8519 "bl\\t%a0"
8520 [(set_attr "length" "4")
8521 (set_attr "type" "call")]
8522 )
8523
8524 (define_insn "*call_value_insn"
8525 [(set (match_operand 0 "" "")
8526 (call (mem:SI (match_operand 1 "" ""))
8527 (match_operand 2 "" "")))
8528 (use (match_operand 3 "" ""))
8529 (clobber (reg:SI LR_REGNUM))]
8530 "TARGET_THUMB
8531 && GET_CODE (operands[1]) == SYMBOL_REF
8532 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8533 "bl\\t%a1"
8534 [(set_attr "length" "4")
8535 (set_attr "type" "call")]
8536 )
8537
8538 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8539 (define_expand "sibcall"
8540 [(parallel [(call (match_operand 0 "memory_operand" "")
8541 (match_operand 1 "general_operand" ""))
8542 (return)
8543 (use (match_operand 2 "" ""))])]
8544 "TARGET_ARM"
8545 "
8546 {
8547 if (operands[2] == NULL_RTX)
8548 operands[2] = const0_rtx;
8549 }"
8550 )
8551
8552 (define_expand "sibcall_value"
8553 [(parallel [(set (match_operand 0 "" "")
8554 (call (match_operand 1 "memory_operand" "")
8555 (match_operand 2 "general_operand" "")))
8556 (return)
8557 (use (match_operand 3 "" ""))])]
8558 "TARGET_ARM"
8559 "
8560 {
8561 if (operands[3] == NULL_RTX)
8562 operands[3] = const0_rtx;
8563 }"
8564 )
8565
8566 (define_insn "*sibcall_insn"
8567 [(call (mem:SI (match_operand:SI 0 "" "X"))
8568 (match_operand 1 "" ""))
8569 (return)
8570 (use (match_operand 2 "" ""))]
8571 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8572 "*
8573 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8574 "
8575 [(set_attr "type" "call")]
8576 )
8577
8578 (define_insn "*sibcall_value_insn"
8579 [(set (match_operand 0 "" "")
8580 (call (mem:SI (match_operand:SI 1 "" "X"))
8581 (match_operand 2 "" "")))
8582 (return)
8583 (use (match_operand 3 "" ""))]
8584 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8585 "*
8586 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8587 "
8588 [(set_attr "type" "call")]
8589 )
8590
8591 ;; Often the return insn will be the same as loading from memory, so set attr
8592 (define_insn "return"
8593 [(return)]
8594 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8595 "*
8596 {
8597 if (arm_ccfsm_state == 2)
8598 {
8599 arm_ccfsm_state += 2;
8600 return \"\";
8601 }
8602 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8603 }"
8604 [(set_attr "type" "load1")
8605 (set_attr "length" "12")
8606 (set_attr "predicable" "yes")]
8607 )
8608
8609 (define_insn "*cond_return"
8610 [(set (pc)
8611 (if_then_else (match_operator 0 "arm_comparison_operator"
8612 [(match_operand 1 "cc_register" "") (const_int 0)])
8613 (return)
8614 (pc)))]
8615 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8616 "*
8617 {
8618 if (arm_ccfsm_state == 2)
8619 {
8620 arm_ccfsm_state += 2;
8621 return \"\";
8622 }
8623 return output_return_instruction (operands[0], TRUE, FALSE);
8624 }"
8625 [(set_attr "conds" "use")
8626 (set_attr "length" "12")
8627 (set_attr "type" "load1")]
8628 )
8629
8630 (define_insn "*cond_return_inverted"
8631 [(set (pc)
8632 (if_then_else (match_operator 0 "arm_comparison_operator"
8633 [(match_operand 1 "cc_register" "") (const_int 0)])
8634 (pc)
8635 (return)))]
8636 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8637 "*
8638 {
8639 if (arm_ccfsm_state == 2)
8640 {
8641 arm_ccfsm_state += 2;
8642 return \"\";
8643 }
8644 return output_return_instruction (operands[0], TRUE, TRUE);
8645 }"
8646 [(set_attr "conds" "use")
8647 (set_attr "length" "12")
8648 (set_attr "type" "load1")]
8649 )
8650
8651 ;; Generate a sequence of instructions to determine if the processor is
8652 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8653 ;; mask.
8654
8655 (define_expand "return_addr_mask"
8656 [(set (match_dup 1)
8657 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8658 (const_int 0)))
8659 (set (match_operand:SI 0 "s_register_operand" "")
8660 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8661 (const_int -1)
8662 (const_int 67108860)))] ; 0x03fffffc
8663 "TARGET_ARM"
8664 "
8665 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8666 ")
8667
8668 (define_insn "*check_arch2"
8669 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8670 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8671 (const_int 0)))]
8672 "TARGET_ARM"
8673 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8674 [(set_attr "length" "8")
8675 (set_attr "conds" "set")]
8676 )
8677
8678 ;; Call subroutine returning any type.
8679
8680 (define_expand "untyped_call"
8681 [(parallel [(call (match_operand 0 "" "")
8682 (const_int 0))
8683 (match_operand 1 "" "")
8684 (match_operand 2 "" "")])]
8685 "TARGET_EITHER"
8686 "
8687 {
8688 int i;
8689 rtx par = gen_rtx_PARALLEL (VOIDmode,
8690 rtvec_alloc (XVECLEN (operands[2], 0)));
8691 rtx addr = gen_reg_rtx (Pmode);
8692 rtx mem;
8693 int size = 0;
8694
8695 emit_move_insn (addr, XEXP (operands[1], 0));
8696 mem = change_address (operands[1], BLKmode, addr);
8697
8698 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8699 {
8700 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8701
8702 /* Default code only uses r0 as a return value, but we could
8703 be using anything up to 4 registers. */
8704 if (REGNO (src) == R0_REGNUM)
8705 src = gen_rtx_REG (TImode, R0_REGNUM);
8706
8707 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8708 GEN_INT (size));
8709 size += GET_MODE_SIZE (GET_MODE (src));
8710 }
8711
8712 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8713 const0_rtx));
8714
8715 size = 0;
8716
8717 for (i = 0; i < XVECLEN (par, 0); i++)
8718 {
8719 HOST_WIDE_INT offset = 0;
8720 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8721
8722 if (size != 0)
8723 emit_move_insn (addr, plus_constant (addr, size));
8724
8725 mem = change_address (mem, GET_MODE (reg), NULL);
8726 if (REGNO (reg) == R0_REGNUM)
8727 {
8728 /* On thumb we have to use a write-back instruction. */
8729 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8730 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8731 size = TARGET_ARM ? 16 : 0;
8732 }
8733 else
8734 {
8735 emit_move_insn (mem, reg);
8736 size = GET_MODE_SIZE (GET_MODE (reg));
8737 }
8738 }
8739
8740 /* The optimizer does not know that the call sets the function value
8741 registers we stored in the result block. We avoid problems by
8742 claiming that all hard registers are used and clobbered at this
8743 point. */
8744 emit_insn (gen_blockage ());
8745
8746 DONE;
8747 }"
8748 )
8749
8750 (define_expand "untyped_return"
8751 [(match_operand:BLK 0 "memory_operand" "")
8752 (match_operand 1 "" "")]
8753 "TARGET_EITHER"
8754 "
8755 {
8756 int i;
8757 rtx addr = gen_reg_rtx (Pmode);
8758 rtx mem;
8759 int size = 0;
8760
8761 emit_move_insn (addr, XEXP (operands[0], 0));
8762 mem = change_address (operands[0], BLKmode, addr);
8763
8764 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8765 {
8766 HOST_WIDE_INT offset = 0;
8767 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8768
8769 if (size != 0)
8770 emit_move_insn (addr, plus_constant (addr, size));
8771
8772 mem = change_address (mem, GET_MODE (reg), NULL);
8773 if (REGNO (reg) == R0_REGNUM)
8774 {
8775 /* On thumb we have to use a write-back instruction. */
8776 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8777 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8778 size = TARGET_ARM ? 16 : 0;
8779 }
8780 else
8781 {
8782 emit_move_insn (reg, mem);
8783 size = GET_MODE_SIZE (GET_MODE (reg));
8784 }
8785 }
8786
8787 /* Emit USE insns before the return. */
8788 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8789 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8790
8791 /* Construct the return. */
8792 expand_naked_return ();
8793
8794 DONE;
8795 }"
8796 )
8797
8798 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8799 ;; all of memory. This blocks insns from being moved across this point.
8800
8801 (define_insn "blockage"
8802 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8803 "TARGET_EITHER"
8804 ""
8805 [(set_attr "length" "0")
8806 (set_attr "type" "block")]
8807 )
8808
8809 (define_expand "casesi"
8810 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8811 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8812 (match_operand:SI 2 "const_int_operand" "") ; total range
8813 (match_operand:SI 3 "" "") ; table label
8814 (match_operand:SI 4 "" "")] ; Out of range label
8815 "TARGET_32BIT"
8816 "
8817 {
8818 rtx reg;
8819 if (operands[1] != const0_rtx)
8820 {
8821 reg = gen_reg_rtx (SImode);
8822
8823 emit_insn (gen_addsi3 (reg, operands[0],
8824 GEN_INT (-INTVAL (operands[1]))));
8825 operands[0] = reg;
8826 }
8827
8828 if (!const_ok_for_arm (INTVAL (operands[2])))
8829 operands[2] = force_reg (SImode, operands[2]);
8830
8831 if (TARGET_ARM)
8832 {
8833 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8834 operands[3], operands[4]));
8835 }
8836 else if (flag_pic)
8837 {
8838 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8839 operands[2], operands[3], operands[4]));
8840 }
8841 else
8842 {
8843 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8844 operands[3], operands[4]));
8845 }
8846 DONE;
8847 }"
8848 )
8849
8850 ;; The USE in this pattern is needed to tell flow analysis that this is
8851 ;; a CASESI insn. It has no other purpose.
8852 (define_insn "arm_casesi_internal"
8853 [(parallel [(set (pc)
8854 (if_then_else
8855 (leu (match_operand:SI 0 "s_register_operand" "r")
8856 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8857 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8858 (label_ref (match_operand 2 "" ""))))
8859 (label_ref (match_operand 3 "" ""))))
8860 (clobber (reg:CC CC_REGNUM))
8861 (use (label_ref (match_dup 2)))])]
8862 "TARGET_ARM"
8863 "*
8864 if (flag_pic)
8865 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8866 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8867 "
8868 [(set_attr "conds" "clob")
8869 (set_attr "length" "12")]
8870 )
8871
8872 (define_expand "indirect_jump"
8873 [(set (pc)
8874 (match_operand:SI 0 "s_register_operand" ""))]
8875 "TARGET_EITHER"
8876 "
8877 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8878 address and use bx. */
8879 if (TARGET_THUMB2)
8880 {
8881 rtx tmp;
8882 tmp = gen_reg_rtx (SImode);
8883 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8884 operands[0] = tmp;
8885 }
8886 "
8887 )
8888
8889 ;; NB Never uses BX.
8890 (define_insn "*arm_indirect_jump"
8891 [(set (pc)
8892 (match_operand:SI 0 "s_register_operand" "r"))]
8893 "TARGET_ARM"
8894 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8895 [(set_attr "predicable" "yes")]
8896 )
8897
8898 (define_insn "*load_indirect_jump"
8899 [(set (pc)
8900 (match_operand:SI 0 "memory_operand" "m"))]
8901 "TARGET_ARM"
8902 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8903 [(set_attr "type" "load1")
8904 (set_attr "pool_range" "4096")
8905 (set_attr "neg_pool_range" "4084")
8906 (set_attr "predicable" "yes")]
8907 )
8908
8909 ;; NB Never uses BX.
8910 (define_insn "*thumb1_indirect_jump"
8911 [(set (pc)
8912 (match_operand:SI 0 "register_operand" "l*r"))]
8913 "TARGET_THUMB1"
8914 "mov\\tpc, %0"
8915 [(set_attr "conds" "clob")
8916 (set_attr "length" "2")]
8917 )
8918
8919 \f
8920 ;; Misc insns
8921
8922 (define_insn "nop"
8923 [(const_int 0)]
8924 "TARGET_EITHER"
8925 "*
8926 if (TARGET_UNIFIED_ASM)
8927 return \"nop\";
8928 if (TARGET_ARM)
8929 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8930 return \"mov\\tr8, r8\";
8931 "
8932 [(set (attr "length")
8933 (if_then_else (eq_attr "is_thumb" "yes")
8934 (const_int 2)
8935 (const_int 4)))]
8936 )
8937
8938 \f
8939 ;; Patterns to allow combination of arithmetic, cond code and shifts
8940
8941 (define_insn "*arith_shiftsi"
8942 [(set (match_operand:SI 0 "s_register_operand" "=r")
8943 (match_operator:SI 1 "shiftable_operator"
8944 [(match_operator:SI 3 "shift_operator"
8945 [(match_operand:SI 4 "s_register_operand" "r")
8946 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8947 (match_operand:SI 2 "s_register_operand" "r")]))]
8948 "TARGET_ARM"
8949 "%i1%?\\t%0, %2, %4%S3"
8950 [(set_attr "predicable" "yes")
8951 (set_attr "shift" "4")
8952 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8953 (const_string "alu_shift")
8954 (const_string "alu_shift_reg")))]
8955 )
8956
8957 (define_split
8958 [(set (match_operand:SI 0 "s_register_operand" "")
8959 (match_operator:SI 1 "shiftable_operator"
8960 [(match_operator:SI 2 "shiftable_operator"
8961 [(match_operator:SI 3 "shift_operator"
8962 [(match_operand:SI 4 "s_register_operand" "")
8963 (match_operand:SI 5 "reg_or_int_operand" "")])
8964 (match_operand:SI 6 "s_register_operand" "")])
8965 (match_operand:SI 7 "arm_rhs_operand" "")]))
8966 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8967 "TARGET_ARM"
8968 [(set (match_dup 8)
8969 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8970 (match_dup 6)]))
8971 (set (match_dup 0)
8972 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8973 "")
8974
8975 (define_insn "*arith_shiftsi_compare0"
8976 [(set (reg:CC_NOOV CC_REGNUM)
8977 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8978 [(match_operator:SI 3 "shift_operator"
8979 [(match_operand:SI 4 "s_register_operand" "r")
8980 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8981 (match_operand:SI 2 "s_register_operand" "r")])
8982 (const_int 0)))
8983 (set (match_operand:SI 0 "s_register_operand" "=r")
8984 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8985 (match_dup 2)]))]
8986 "TARGET_ARM"
8987 "%i1%.\\t%0, %2, %4%S3"
8988 [(set_attr "conds" "set")
8989 (set_attr "shift" "4")
8990 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8991 (const_string "alu_shift")
8992 (const_string "alu_shift_reg")))]
8993 )
8994
8995 (define_insn "*arith_shiftsi_compare0_scratch"
8996 [(set (reg:CC_NOOV CC_REGNUM)
8997 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8998 [(match_operator:SI 3 "shift_operator"
8999 [(match_operand:SI 4 "s_register_operand" "r")
9000 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9001 (match_operand:SI 2 "s_register_operand" "r")])
9002 (const_int 0)))
9003 (clobber (match_scratch:SI 0 "=r"))]
9004 "TARGET_ARM"
9005 "%i1%.\\t%0, %2, %4%S3"
9006 [(set_attr "conds" "set")
9007 (set_attr "shift" "4")
9008 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9009 (const_string "alu_shift")
9010 (const_string "alu_shift_reg")))]
9011 )
9012
9013 (define_insn "*sub_shiftsi"
9014 [(set (match_operand:SI 0 "s_register_operand" "=r")
9015 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9016 (match_operator:SI 2 "shift_operator"
9017 [(match_operand:SI 3 "s_register_operand" "r")
9018 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9019 "TARGET_ARM"
9020 "sub%?\\t%0, %1, %3%S2"
9021 [(set_attr "predicable" "yes")
9022 (set_attr "shift" "3")
9023 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9024 (const_string "alu_shift")
9025 (const_string "alu_shift_reg")))]
9026 )
9027
9028 (define_insn "*sub_shiftsi_compare0"
9029 [(set (reg:CC_NOOV CC_REGNUM)
9030 (compare:CC_NOOV
9031 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9032 (match_operator:SI 2 "shift_operator"
9033 [(match_operand:SI 3 "s_register_operand" "r")
9034 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9035 (const_int 0)))
9036 (set (match_operand:SI 0 "s_register_operand" "=r")
9037 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9038 (match_dup 4)])))]
9039 "TARGET_ARM"
9040 "sub%.\\t%0, %1, %3%S2"
9041 [(set_attr "conds" "set")
9042 (set_attr "shift" "3")
9043 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9044 (const_string "alu_shift")
9045 (const_string "alu_shift_reg")))]
9046 )
9047
9048 (define_insn "*sub_shiftsi_compare0_scratch"
9049 [(set (reg:CC_NOOV CC_REGNUM)
9050 (compare:CC_NOOV
9051 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9052 (match_operator:SI 2 "shift_operator"
9053 [(match_operand:SI 3 "s_register_operand" "r")
9054 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9055 (const_int 0)))
9056 (clobber (match_scratch:SI 0 "=r"))]
9057 "TARGET_ARM"
9058 "sub%.\\t%0, %1, %3%S2"
9059 [(set_attr "conds" "set")
9060 (set_attr "shift" "3")
9061 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9062 (const_string "alu_shift")
9063 (const_string "alu_shift_reg")))]
9064 )
9065
9066 \f
9067
9068 (define_insn "*and_scc"
9069 [(set (match_operand:SI 0 "s_register_operand" "=r")
9070 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9071 [(match_operand 3 "cc_register" "") (const_int 0)])
9072 (match_operand:SI 2 "s_register_operand" "r")))]
9073 "TARGET_ARM"
9074 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9075 [(set_attr "conds" "use")
9076 (set_attr "length" "8")]
9077 )
9078
9079 (define_insn "*ior_scc"
9080 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9081 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9082 [(match_operand 3 "cc_register" "") (const_int 0)])
9083 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9084 "TARGET_ARM"
9085 "@
9086 orr%d2\\t%0, %1, #1
9087 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9088 [(set_attr "conds" "use")
9089 (set_attr "length" "4,8")]
9090 )
9091
9092 (define_insn "*compare_scc"
9093 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9094 (match_operator:SI 1 "arm_comparison_operator"
9095 [(match_operand:SI 2 "s_register_operand" "r,r")
9096 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9097 (clobber (reg:CC CC_REGNUM))]
9098 "TARGET_ARM"
9099 "*
9100 if (operands[3] == const0_rtx)
9101 {
9102 if (GET_CODE (operands[1]) == LT)
9103 return \"mov\\t%0, %2, lsr #31\";
9104
9105 if (GET_CODE (operands[1]) == GE)
9106 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9107
9108 if (GET_CODE (operands[1]) == EQ)
9109 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9110 }
9111
9112 if (GET_CODE (operands[1]) == NE)
9113 {
9114 if (which_alternative == 1)
9115 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9116 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9117 }
9118 if (which_alternative == 1)
9119 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9120 else
9121 output_asm_insn (\"cmp\\t%2, %3\", operands);
9122 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9123 "
9124 [(set_attr "conds" "clob")
9125 (set_attr "length" "12")]
9126 )
9127
9128 (define_insn "*cond_move"
9129 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9130 (if_then_else:SI (match_operator 3 "equality_operator"
9131 [(match_operator 4 "arm_comparison_operator"
9132 [(match_operand 5 "cc_register" "") (const_int 0)])
9133 (const_int 0)])
9134 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9135 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9136 "TARGET_ARM"
9137 "*
9138 if (GET_CODE (operands[3]) == NE)
9139 {
9140 if (which_alternative != 1)
9141 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9142 if (which_alternative != 0)
9143 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9144 return \"\";
9145 }
9146 if (which_alternative != 0)
9147 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9148 if (which_alternative != 1)
9149 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9150 return \"\";
9151 "
9152 [(set_attr "conds" "use")
9153 (set_attr "length" "4,4,8")]
9154 )
9155
9156 (define_insn "*cond_arith"
9157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9158 (match_operator:SI 5 "shiftable_operator"
9159 [(match_operator:SI 4 "arm_comparison_operator"
9160 [(match_operand:SI 2 "s_register_operand" "r,r")
9161 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9162 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9163 (clobber (reg:CC CC_REGNUM))]
9164 "TARGET_ARM"
9165 "*
9166 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9167 return \"%i5\\t%0, %1, %2, lsr #31\";
9168
9169 output_asm_insn (\"cmp\\t%2, %3\", operands);
9170 if (GET_CODE (operands[5]) == AND)
9171 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9172 else if (GET_CODE (operands[5]) == MINUS)
9173 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9174 else if (which_alternative != 0)
9175 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9176 return \"%i5%d4\\t%0, %1, #1\";
9177 "
9178 [(set_attr "conds" "clob")
9179 (set_attr "length" "12")]
9180 )
9181
9182 (define_insn "*cond_sub"
9183 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9184 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9185 (match_operator:SI 4 "arm_comparison_operator"
9186 [(match_operand:SI 2 "s_register_operand" "r,r")
9187 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9188 (clobber (reg:CC CC_REGNUM))]
9189 "TARGET_ARM"
9190 "*
9191 output_asm_insn (\"cmp\\t%2, %3\", operands);
9192 if (which_alternative != 0)
9193 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9194 return \"sub%d4\\t%0, %1, #1\";
9195 "
9196 [(set_attr "conds" "clob")
9197 (set_attr "length" "8,12")]
9198 )
9199
9200 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9201 (define_insn "*cmp_ite0"
9202 [(set (match_operand 6 "dominant_cc_register" "")
9203 (compare
9204 (if_then_else:SI
9205 (match_operator 4 "arm_comparison_operator"
9206 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9207 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9208 (match_operator:SI 5 "arm_comparison_operator"
9209 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9210 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9211 (const_int 0))
9212 (const_int 0)))]
9213 "TARGET_ARM"
9214 "*
9215 {
9216 static const char * const opcodes[4][2] =
9217 {
9218 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9219 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9220 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9221 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9222 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9223 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9224 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9225 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9226 };
9227 int swap =
9228 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9229
9230 return opcodes[which_alternative][swap];
9231 }"
9232 [(set_attr "conds" "set")
9233 (set_attr "length" "8")]
9234 )
9235
9236 (define_insn "*cmp_ite1"
9237 [(set (match_operand 6 "dominant_cc_register" "")
9238 (compare
9239 (if_then_else:SI
9240 (match_operator 4 "arm_comparison_operator"
9241 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9242 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9243 (match_operator:SI 5 "arm_comparison_operator"
9244 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9245 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9246 (const_int 1))
9247 (const_int 0)))]
9248 "TARGET_ARM"
9249 "*
9250 {
9251 static const char * const opcodes[4][2] =
9252 {
9253 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9254 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9255 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9256 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9257 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9258 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9259 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9260 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9261 };
9262 int swap =
9263 comparison_dominates_p (GET_CODE (operands[5]),
9264 reverse_condition (GET_CODE (operands[4])));
9265
9266 return opcodes[which_alternative][swap];
9267 }"
9268 [(set_attr "conds" "set")
9269 (set_attr "length" "8")]
9270 )
9271
9272 (define_insn "*cmp_and"
9273 [(set (match_operand 6 "dominant_cc_register" "")
9274 (compare
9275 (and:SI
9276 (match_operator 4 "arm_comparison_operator"
9277 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9278 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9279 (match_operator:SI 5 "arm_comparison_operator"
9280 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9281 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9282 (const_int 0)))]
9283 "TARGET_ARM"
9284 "*
9285 {
9286 static const char *const opcodes[4][2] =
9287 {
9288 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9289 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9290 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9291 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9292 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9293 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9294 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9295 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9296 };
9297 int swap =
9298 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9299
9300 return opcodes[which_alternative][swap];
9301 }"
9302 [(set_attr "conds" "set")
9303 (set_attr "predicable" "no")
9304 (set_attr "length" "8")]
9305 )
9306
9307 (define_insn "*cmp_ior"
9308 [(set (match_operand 6 "dominant_cc_register" "")
9309 (compare
9310 (ior:SI
9311 (match_operator 4 "arm_comparison_operator"
9312 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9313 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9314 (match_operator:SI 5 "arm_comparison_operator"
9315 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9316 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9317 (const_int 0)))]
9318 "TARGET_ARM"
9319 "*
9320 {
9321 static const char *const opcodes[4][2] =
9322 {
9323 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9324 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9325 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9326 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9327 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9328 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9329 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9330 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9331 };
9332 int swap =
9333 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9334
9335 return opcodes[which_alternative][swap];
9336 }
9337 "
9338 [(set_attr "conds" "set")
9339 (set_attr "length" "8")]
9340 )
9341
9342 (define_insn_and_split "*ior_scc_scc"
9343 [(set (match_operand:SI 0 "s_register_operand" "=r")
9344 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9345 [(match_operand:SI 1 "s_register_operand" "r")
9346 (match_operand:SI 2 "arm_add_operand" "rIL")])
9347 (match_operator:SI 6 "arm_comparison_operator"
9348 [(match_operand:SI 4 "s_register_operand" "r")
9349 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9350 (clobber (reg:CC CC_REGNUM))]
9351 "TARGET_ARM
9352 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9353 != CCmode)"
9354 "#"
9355 "TARGET_ARM && reload_completed"
9356 [(set (match_dup 7)
9357 (compare
9358 (ior:SI
9359 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9360 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9361 (const_int 0)))
9362 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9363 "operands[7]
9364 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9365 DOM_CC_X_OR_Y),
9366 CC_REGNUM);"
9367 [(set_attr "conds" "clob")
9368 (set_attr "length" "16")])
9369
9370 ; If the above pattern is followed by a CMP insn, then the compare is
9371 ; redundant, since we can rework the conditional instruction that follows.
9372 (define_insn_and_split "*ior_scc_scc_cmp"
9373 [(set (match_operand 0 "dominant_cc_register" "")
9374 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9375 [(match_operand:SI 1 "s_register_operand" "r")
9376 (match_operand:SI 2 "arm_add_operand" "rIL")])
9377 (match_operator:SI 6 "arm_comparison_operator"
9378 [(match_operand:SI 4 "s_register_operand" "r")
9379 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9380 (const_int 0)))
9381 (set (match_operand:SI 7 "s_register_operand" "=r")
9382 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9383 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9384 "TARGET_ARM"
9385 "#"
9386 "TARGET_ARM && reload_completed"
9387 [(set (match_dup 0)
9388 (compare
9389 (ior:SI
9390 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9391 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9392 (const_int 0)))
9393 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9394 ""
9395 [(set_attr "conds" "set")
9396 (set_attr "length" "16")])
9397
9398 (define_insn_and_split "*and_scc_scc"
9399 [(set (match_operand:SI 0 "s_register_operand" "=r")
9400 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9401 [(match_operand:SI 1 "s_register_operand" "r")
9402 (match_operand:SI 2 "arm_add_operand" "rIL")])
9403 (match_operator:SI 6 "arm_comparison_operator"
9404 [(match_operand:SI 4 "s_register_operand" "r")
9405 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9406 (clobber (reg:CC CC_REGNUM))]
9407 "TARGET_ARM
9408 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9409 != CCmode)"
9410 "#"
9411 "TARGET_ARM && reload_completed
9412 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9413 != CCmode)"
9414 [(set (match_dup 7)
9415 (compare
9416 (and:SI
9417 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9418 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9419 (const_int 0)))
9420 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9421 "operands[7]
9422 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9423 DOM_CC_X_AND_Y),
9424 CC_REGNUM);"
9425 [(set_attr "conds" "clob")
9426 (set_attr "length" "16")])
9427
9428 ; If the above pattern is followed by a CMP insn, then the compare is
9429 ; redundant, since we can rework the conditional instruction that follows.
9430 (define_insn_and_split "*and_scc_scc_cmp"
9431 [(set (match_operand 0 "dominant_cc_register" "")
9432 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9433 [(match_operand:SI 1 "s_register_operand" "r")
9434 (match_operand:SI 2 "arm_add_operand" "rIL")])
9435 (match_operator:SI 6 "arm_comparison_operator"
9436 [(match_operand:SI 4 "s_register_operand" "r")
9437 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9438 (const_int 0)))
9439 (set (match_operand:SI 7 "s_register_operand" "=r")
9440 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9441 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9442 "TARGET_ARM"
9443 "#"
9444 "TARGET_ARM && reload_completed"
9445 [(set (match_dup 0)
9446 (compare
9447 (and:SI
9448 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9449 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9450 (const_int 0)))
9451 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9452 ""
9453 [(set_attr "conds" "set")
9454 (set_attr "length" "16")])
9455
9456 ;; If there is no dominance in the comparison, then we can still save an
9457 ;; instruction in the AND case, since we can know that the second compare
9458 ;; need only zero the value if false (if true, then the value is already
9459 ;; correct).
9460 (define_insn_and_split "*and_scc_scc_nodom"
9461 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9462 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9463 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9464 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9465 (match_operator:SI 6 "arm_comparison_operator"
9466 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9467 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9468 (clobber (reg:CC CC_REGNUM))]
9469 "TARGET_ARM
9470 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9471 == CCmode)"
9472 "#"
9473 "TARGET_ARM && reload_completed"
9474 [(parallel [(set (match_dup 0)
9475 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9476 (clobber (reg:CC CC_REGNUM))])
9477 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9478 (set (match_dup 0)
9479 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9480 (match_dup 0)
9481 (const_int 0)))]
9482 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9483 operands[4], operands[5]),
9484 CC_REGNUM);
9485 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9486 operands[5]);"
9487 [(set_attr "conds" "clob")
9488 (set_attr "length" "20")])
9489
9490 (define_split
9491 [(set (reg:CC_NOOV CC_REGNUM)
9492 (compare:CC_NOOV (ior:SI
9493 (and:SI (match_operand:SI 0 "s_register_operand" "")
9494 (const_int 1))
9495 (match_operator:SI 1 "comparison_operator"
9496 [(match_operand:SI 2 "s_register_operand" "")
9497 (match_operand:SI 3 "arm_add_operand" "")]))
9498 (const_int 0)))
9499 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9500 "TARGET_ARM"
9501 [(set (match_dup 4)
9502 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9503 (match_dup 0)))
9504 (set (reg:CC_NOOV CC_REGNUM)
9505 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9506 (const_int 0)))]
9507 "")
9508
9509 (define_split
9510 [(set (reg:CC_NOOV CC_REGNUM)
9511 (compare:CC_NOOV (ior:SI
9512 (match_operator:SI 1 "comparison_operator"
9513 [(match_operand:SI 2 "s_register_operand" "")
9514 (match_operand:SI 3 "arm_add_operand" "")])
9515 (and:SI (match_operand:SI 0 "s_register_operand" "")
9516 (const_int 1)))
9517 (const_int 0)))
9518 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9519 "TARGET_ARM"
9520 [(set (match_dup 4)
9521 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9522 (match_dup 0)))
9523 (set (reg:CC_NOOV CC_REGNUM)
9524 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9525 (const_int 0)))]
9526 "")
9527 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9528
9529 (define_insn "*negscc"
9530 [(set (match_operand:SI 0 "s_register_operand" "=r")
9531 (neg:SI (match_operator 3 "arm_comparison_operator"
9532 [(match_operand:SI 1 "s_register_operand" "r")
9533 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9534 (clobber (reg:CC CC_REGNUM))]
9535 "TARGET_ARM"
9536 "*
9537 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9538 return \"mov\\t%0, %1, asr #31\";
9539
9540 if (GET_CODE (operands[3]) == NE)
9541 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9542
9543 output_asm_insn (\"cmp\\t%1, %2\", operands);
9544 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9545 return \"mvn%d3\\t%0, #0\";
9546 "
9547 [(set_attr "conds" "clob")
9548 (set_attr "length" "12")]
9549 )
9550
9551 (define_insn "movcond"
9552 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9553 (if_then_else:SI
9554 (match_operator 5 "arm_comparison_operator"
9555 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9556 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9557 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9558 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9559 (clobber (reg:CC CC_REGNUM))]
9560 "TARGET_ARM"
9561 "*
9562 if (GET_CODE (operands[5]) == LT
9563 && (operands[4] == const0_rtx))
9564 {
9565 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9566 {
9567 if (operands[2] == const0_rtx)
9568 return \"and\\t%0, %1, %3, asr #31\";
9569 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9570 }
9571 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9572 {
9573 if (operands[1] == const0_rtx)
9574 return \"bic\\t%0, %2, %3, asr #31\";
9575 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9576 }
9577 /* The only case that falls through to here is when both ops 1 & 2
9578 are constants. */
9579 }
9580
9581 if (GET_CODE (operands[5]) == GE
9582 && (operands[4] == const0_rtx))
9583 {
9584 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9585 {
9586 if (operands[2] == const0_rtx)
9587 return \"bic\\t%0, %1, %3, asr #31\";
9588 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9589 }
9590 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9591 {
9592 if (operands[1] == const0_rtx)
9593 return \"and\\t%0, %2, %3, asr #31\";
9594 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9595 }
9596 /* The only case that falls through to here is when both ops 1 & 2
9597 are constants. */
9598 }
9599 if (GET_CODE (operands[4]) == CONST_INT
9600 && !const_ok_for_arm (INTVAL (operands[4])))
9601 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9602 else
9603 output_asm_insn (\"cmp\\t%3, %4\", operands);
9604 if (which_alternative != 0)
9605 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9606 if (which_alternative != 1)
9607 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9608 return \"\";
9609 "
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,8,12")]
9612 )
9613
9614 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9615
9616 (define_insn "*ifcompare_plus_move"
9617 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9618 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9619 [(match_operand:SI 4 "s_register_operand" "r,r")
9620 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9621 (plus:SI
9622 (match_operand:SI 2 "s_register_operand" "r,r")
9623 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9624 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9625 (clobber (reg:CC CC_REGNUM))]
9626 "TARGET_ARM"
9627 "#"
9628 [(set_attr "conds" "clob")
9629 (set_attr "length" "8,12")]
9630 )
9631
9632 (define_insn "*if_plus_move"
9633 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9634 (if_then_else:SI
9635 (match_operator 4 "arm_comparison_operator"
9636 [(match_operand 5 "cc_register" "") (const_int 0)])
9637 (plus:SI
9638 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9639 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9640 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9641 "TARGET_ARM"
9642 "@
9643 add%d4\\t%0, %2, %3
9644 sub%d4\\t%0, %2, #%n3
9645 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9646 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9647 [(set_attr "conds" "use")
9648 (set_attr "length" "4,4,8,8")
9649 (set_attr "type" "*,*,*,*")]
9650 )
9651
9652 (define_insn "*ifcompare_move_plus"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9654 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9655 [(match_operand:SI 4 "s_register_operand" "r,r")
9656 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9657 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9658 (plus:SI
9659 (match_operand:SI 2 "s_register_operand" "r,r")
9660 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9661 (clobber (reg:CC CC_REGNUM))]
9662 "TARGET_ARM"
9663 "#"
9664 [(set_attr "conds" "clob")
9665 (set_attr "length" "8,12")]
9666 )
9667
9668 (define_insn "*if_move_plus"
9669 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9670 (if_then_else:SI
9671 (match_operator 4 "arm_comparison_operator"
9672 [(match_operand 5 "cc_register" "") (const_int 0)])
9673 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9674 (plus:SI
9675 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9676 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9677 "TARGET_ARM"
9678 "@
9679 add%D4\\t%0, %2, %3
9680 sub%D4\\t%0, %2, #%n3
9681 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9682 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9683 [(set_attr "conds" "use")
9684 (set_attr "length" "4,4,8,8")
9685 (set_attr "type" "*,*,*,*")]
9686 )
9687
9688 (define_insn "*ifcompare_arith_arith"
9689 [(set (match_operand:SI 0 "s_register_operand" "=r")
9690 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9691 [(match_operand:SI 5 "s_register_operand" "r")
9692 (match_operand:SI 6 "arm_add_operand" "rIL")])
9693 (match_operator:SI 8 "shiftable_operator"
9694 [(match_operand:SI 1 "s_register_operand" "r")
9695 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9696 (match_operator:SI 7 "shiftable_operator"
9697 [(match_operand:SI 3 "s_register_operand" "r")
9698 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9699 (clobber (reg:CC CC_REGNUM))]
9700 "TARGET_ARM"
9701 "#"
9702 [(set_attr "conds" "clob")
9703 (set_attr "length" "12")]
9704 )
9705
9706 (define_insn "*if_arith_arith"
9707 [(set (match_operand:SI 0 "s_register_operand" "=r")
9708 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9709 [(match_operand 8 "cc_register" "") (const_int 0)])
9710 (match_operator:SI 6 "shiftable_operator"
9711 [(match_operand:SI 1 "s_register_operand" "r")
9712 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9713 (match_operator:SI 7 "shiftable_operator"
9714 [(match_operand:SI 3 "s_register_operand" "r")
9715 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9716 "TARGET_ARM"
9717 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9718 [(set_attr "conds" "use")
9719 (set_attr "length" "8")]
9720 )
9721
9722 (define_insn "*ifcompare_arith_move"
9723 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9724 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9725 [(match_operand:SI 2 "s_register_operand" "r,r")
9726 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9727 (match_operator:SI 7 "shiftable_operator"
9728 [(match_operand:SI 4 "s_register_operand" "r,r")
9729 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9730 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9731 (clobber (reg:CC CC_REGNUM))]
9732 "TARGET_ARM"
9733 "*
9734 /* If we have an operation where (op x 0) is the identity operation and
9735 the conditional operator is LT or GE and we are comparing against zero and
9736 everything is in registers then we can do this in two instructions. */
9737 if (operands[3] == const0_rtx
9738 && GET_CODE (operands[7]) != AND
9739 && GET_CODE (operands[5]) == REG
9740 && GET_CODE (operands[1]) == REG
9741 && REGNO (operands[1]) == REGNO (operands[4])
9742 && REGNO (operands[4]) != REGNO (operands[0]))
9743 {
9744 if (GET_CODE (operands[6]) == LT)
9745 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9746 else if (GET_CODE (operands[6]) == GE)
9747 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9748 }
9749 if (GET_CODE (operands[3]) == CONST_INT
9750 && !const_ok_for_arm (INTVAL (operands[3])))
9751 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9752 else
9753 output_asm_insn (\"cmp\\t%2, %3\", operands);
9754 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9755 if (which_alternative != 0)
9756 return \"mov%D6\\t%0, %1\";
9757 return \"\";
9758 "
9759 [(set_attr "conds" "clob")
9760 (set_attr "length" "8,12")]
9761 )
9762
9763 (define_insn "*if_arith_move"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9765 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9766 [(match_operand 6 "cc_register" "") (const_int 0)])
9767 (match_operator:SI 5 "shiftable_operator"
9768 [(match_operand:SI 2 "s_register_operand" "r,r")
9769 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9770 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9771 "TARGET_ARM"
9772 "@
9773 %I5%d4\\t%0, %2, %3
9774 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9775 [(set_attr "conds" "use")
9776 (set_attr "length" "4,8")
9777 (set_attr "type" "*,*")]
9778 )
9779
9780 (define_insn "*ifcompare_move_arith"
9781 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9782 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9783 [(match_operand:SI 4 "s_register_operand" "r,r")
9784 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9785 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9786 (match_operator:SI 7 "shiftable_operator"
9787 [(match_operand:SI 2 "s_register_operand" "r,r")
9788 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9789 (clobber (reg:CC CC_REGNUM))]
9790 "TARGET_ARM"
9791 "*
9792 /* If we have an operation where (op x 0) is the identity operation and
9793 the conditional operator is LT or GE and we are comparing against zero and
9794 everything is in registers then we can do this in two instructions */
9795 if (operands[5] == const0_rtx
9796 && GET_CODE (operands[7]) != AND
9797 && GET_CODE (operands[3]) == REG
9798 && GET_CODE (operands[1]) == REG
9799 && REGNO (operands[1]) == REGNO (operands[2])
9800 && REGNO (operands[2]) != REGNO (operands[0]))
9801 {
9802 if (GET_CODE (operands[6]) == GE)
9803 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9804 else if (GET_CODE (operands[6]) == LT)
9805 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9806 }
9807
9808 if (GET_CODE (operands[5]) == CONST_INT
9809 && !const_ok_for_arm (INTVAL (operands[5])))
9810 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9811 else
9812 output_asm_insn (\"cmp\\t%4, %5\", operands);
9813
9814 if (which_alternative != 0)
9815 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9816 return \"%I7%D6\\t%0, %2, %3\";
9817 "
9818 [(set_attr "conds" "clob")
9819 (set_attr "length" "8,12")]
9820 )
9821
9822 (define_insn "*if_move_arith"
9823 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9824 (if_then_else:SI
9825 (match_operator 4 "arm_comparison_operator"
9826 [(match_operand 6 "cc_register" "") (const_int 0)])
9827 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9828 (match_operator:SI 5 "shiftable_operator"
9829 [(match_operand:SI 2 "s_register_operand" "r,r")
9830 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9831 "TARGET_ARM"
9832 "@
9833 %I5%D4\\t%0, %2, %3
9834 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9835 [(set_attr "conds" "use")
9836 (set_attr "length" "4,8")
9837 (set_attr "type" "*,*")]
9838 )
9839
9840 (define_insn "*ifcompare_move_not"
9841 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9842 (if_then_else:SI
9843 (match_operator 5 "arm_comparison_operator"
9844 [(match_operand:SI 3 "s_register_operand" "r,r")
9845 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9846 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9847 (not:SI
9848 (match_operand:SI 2 "s_register_operand" "r,r"))))
9849 (clobber (reg:CC CC_REGNUM))]
9850 "TARGET_ARM"
9851 "#"
9852 [(set_attr "conds" "clob")
9853 (set_attr "length" "8,12")]
9854 )
9855
9856 (define_insn "*if_move_not"
9857 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9858 (if_then_else:SI
9859 (match_operator 4 "arm_comparison_operator"
9860 [(match_operand 3 "cc_register" "") (const_int 0)])
9861 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9862 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9863 "TARGET_ARM"
9864 "@
9865 mvn%D4\\t%0, %2
9866 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9867 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9868 [(set_attr "conds" "use")
9869 (set_attr "length" "4,8,8")]
9870 )
9871
9872 (define_insn "*ifcompare_not_move"
9873 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9874 (if_then_else:SI
9875 (match_operator 5 "arm_comparison_operator"
9876 [(match_operand:SI 3 "s_register_operand" "r,r")
9877 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9878 (not:SI
9879 (match_operand:SI 2 "s_register_operand" "r,r"))
9880 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9881 (clobber (reg:CC CC_REGNUM))]
9882 "TARGET_ARM"
9883 "#"
9884 [(set_attr "conds" "clob")
9885 (set_attr "length" "8,12")]
9886 )
9887
9888 (define_insn "*if_not_move"
9889 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9890 (if_then_else:SI
9891 (match_operator 4 "arm_comparison_operator"
9892 [(match_operand 3 "cc_register" "") (const_int 0)])
9893 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9894 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9895 "TARGET_ARM"
9896 "@
9897 mvn%d4\\t%0, %2
9898 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9899 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9900 [(set_attr "conds" "use")
9901 (set_attr "length" "4,8,8")]
9902 )
9903
9904 (define_insn "*ifcompare_shift_move"
9905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9906 (if_then_else:SI
9907 (match_operator 6 "arm_comparison_operator"
9908 [(match_operand:SI 4 "s_register_operand" "r,r")
9909 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9910 (match_operator:SI 7 "shift_operator"
9911 [(match_operand:SI 2 "s_register_operand" "r,r")
9912 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9913 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9914 (clobber (reg:CC CC_REGNUM))]
9915 "TARGET_ARM"
9916 "#"
9917 [(set_attr "conds" "clob")
9918 (set_attr "length" "8,12")]
9919 )
9920
9921 (define_insn "*if_shift_move"
9922 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9923 (if_then_else:SI
9924 (match_operator 5 "arm_comparison_operator"
9925 [(match_operand 6 "cc_register" "") (const_int 0)])
9926 (match_operator:SI 4 "shift_operator"
9927 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9928 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9929 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9930 "TARGET_ARM"
9931 "@
9932 mov%d5\\t%0, %2%S4
9933 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9934 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9935 [(set_attr "conds" "use")
9936 (set_attr "shift" "2")
9937 (set_attr "length" "4,8,8")
9938 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9939 (const_string "alu_shift")
9940 (const_string "alu_shift_reg")))]
9941 )
9942
9943 (define_insn "*ifcompare_move_shift"
9944 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9945 (if_then_else:SI
9946 (match_operator 6 "arm_comparison_operator"
9947 [(match_operand:SI 4 "s_register_operand" "r,r")
9948 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9949 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9950 (match_operator:SI 7 "shift_operator"
9951 [(match_operand:SI 2 "s_register_operand" "r,r")
9952 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9953 (clobber (reg:CC CC_REGNUM))]
9954 "TARGET_ARM"
9955 "#"
9956 [(set_attr "conds" "clob")
9957 (set_attr "length" "8,12")]
9958 )
9959
9960 (define_insn "*if_move_shift"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9962 (if_then_else:SI
9963 (match_operator 5 "arm_comparison_operator"
9964 [(match_operand 6 "cc_register" "") (const_int 0)])
9965 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9966 (match_operator:SI 4 "shift_operator"
9967 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9968 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9969 "TARGET_ARM"
9970 "@
9971 mov%D5\\t%0, %2%S4
9972 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9973 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9974 [(set_attr "conds" "use")
9975 (set_attr "shift" "2")
9976 (set_attr "length" "4,8,8")
9977 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9978 (const_string "alu_shift")
9979 (const_string "alu_shift_reg")))]
9980 )
9981
9982 (define_insn "*ifcompare_shift_shift"
9983 [(set (match_operand:SI 0 "s_register_operand" "=r")
9984 (if_then_else:SI
9985 (match_operator 7 "arm_comparison_operator"
9986 [(match_operand:SI 5 "s_register_operand" "r")
9987 (match_operand:SI 6 "arm_add_operand" "rIL")])
9988 (match_operator:SI 8 "shift_operator"
9989 [(match_operand:SI 1 "s_register_operand" "r")
9990 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9991 (match_operator:SI 9 "shift_operator"
9992 [(match_operand:SI 3 "s_register_operand" "r")
9993 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9994 (clobber (reg:CC CC_REGNUM))]
9995 "TARGET_ARM"
9996 "#"
9997 [(set_attr "conds" "clob")
9998 (set_attr "length" "12")]
9999 )
10000
10001 (define_insn "*if_shift_shift"
10002 [(set (match_operand:SI 0 "s_register_operand" "=r")
10003 (if_then_else:SI
10004 (match_operator 5 "arm_comparison_operator"
10005 [(match_operand 8 "cc_register" "") (const_int 0)])
10006 (match_operator:SI 6 "shift_operator"
10007 [(match_operand:SI 1 "s_register_operand" "r")
10008 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10009 (match_operator:SI 7 "shift_operator"
10010 [(match_operand:SI 3 "s_register_operand" "r")
10011 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10012 "TARGET_ARM"
10013 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10014 [(set_attr "conds" "use")
10015 (set_attr "shift" "1")
10016 (set_attr "length" "8")
10017 (set (attr "type") (if_then_else
10018 (and (match_operand 2 "const_int_operand" "")
10019 (match_operand 4 "const_int_operand" ""))
10020 (const_string "alu_shift")
10021 (const_string "alu_shift_reg")))]
10022 )
10023
10024 (define_insn "*ifcompare_not_arith"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r")
10026 (if_then_else:SI
10027 (match_operator 6 "arm_comparison_operator"
10028 [(match_operand:SI 4 "s_register_operand" "r")
10029 (match_operand:SI 5 "arm_add_operand" "rIL")])
10030 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10031 (match_operator:SI 7 "shiftable_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10034 (clobber (reg:CC CC_REGNUM))]
10035 "TARGET_ARM"
10036 "#"
10037 [(set_attr "conds" "clob")
10038 (set_attr "length" "12")]
10039 )
10040
10041 (define_insn "*if_not_arith"
10042 [(set (match_operand:SI 0 "s_register_operand" "=r")
10043 (if_then_else:SI
10044 (match_operator 5 "arm_comparison_operator"
10045 [(match_operand 4 "cc_register" "") (const_int 0)])
10046 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10047 (match_operator:SI 6 "shiftable_operator"
10048 [(match_operand:SI 2 "s_register_operand" "r")
10049 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10050 "TARGET_ARM"
10051 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10052 [(set_attr "conds" "use")
10053 (set_attr "length" "8")]
10054 )
10055
10056 (define_insn "*ifcompare_arith_not"
10057 [(set (match_operand:SI 0 "s_register_operand" "=r")
10058 (if_then_else:SI
10059 (match_operator 6 "arm_comparison_operator"
10060 [(match_operand:SI 4 "s_register_operand" "r")
10061 (match_operand:SI 5 "arm_add_operand" "rIL")])
10062 (match_operator:SI 7 "shiftable_operator"
10063 [(match_operand:SI 2 "s_register_operand" "r")
10064 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10065 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10066 (clobber (reg:CC CC_REGNUM))]
10067 "TARGET_ARM"
10068 "#"
10069 [(set_attr "conds" "clob")
10070 (set_attr "length" "12")]
10071 )
10072
10073 (define_insn "*if_arith_not"
10074 [(set (match_operand:SI 0 "s_register_operand" "=r")
10075 (if_then_else:SI
10076 (match_operator 5 "arm_comparison_operator"
10077 [(match_operand 4 "cc_register" "") (const_int 0)])
10078 (match_operator:SI 6 "shiftable_operator"
10079 [(match_operand:SI 2 "s_register_operand" "r")
10080 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10081 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10082 "TARGET_ARM"
10083 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10084 [(set_attr "conds" "use")
10085 (set_attr "length" "8")]
10086 )
10087
10088 (define_insn "*ifcompare_neg_move"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (if_then_else:SI
10091 (match_operator 5 "arm_comparison_operator"
10092 [(match_operand:SI 3 "s_register_operand" "r,r")
10093 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10094 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10095 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10096 (clobber (reg:CC CC_REGNUM))]
10097 "TARGET_ARM"
10098 "#"
10099 [(set_attr "conds" "clob")
10100 (set_attr "length" "8,12")]
10101 )
10102
10103 (define_insn "*if_neg_move"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10105 (if_then_else:SI
10106 (match_operator 4 "arm_comparison_operator"
10107 [(match_operand 3 "cc_register" "") (const_int 0)])
10108 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10110 "TARGET_ARM"
10111 "@
10112 rsb%d4\\t%0, %2, #0
10113 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10114 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10115 [(set_attr "conds" "use")
10116 (set_attr "length" "4,8,8")]
10117 )
10118
10119 (define_insn "*ifcompare_move_neg"
10120 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10121 (if_then_else:SI
10122 (match_operator 5 "arm_comparison_operator"
10123 [(match_operand:SI 3 "s_register_operand" "r,r")
10124 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10125 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10126 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10127 (clobber (reg:CC CC_REGNUM))]
10128 "TARGET_ARM"
10129 "#"
10130 [(set_attr "conds" "clob")
10131 (set_attr "length" "8,12")]
10132 )
10133
10134 (define_insn "*if_move_neg"
10135 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10136 (if_then_else:SI
10137 (match_operator 4 "arm_comparison_operator"
10138 [(match_operand 3 "cc_register" "") (const_int 0)])
10139 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10140 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10141 "TARGET_ARM"
10142 "@
10143 rsb%D4\\t%0, %2, #0
10144 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10145 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10146 [(set_attr "conds" "use")
10147 (set_attr "length" "4,8,8")]
10148 )
10149
10150 (define_insn "*arith_adjacentmem"
10151 [(set (match_operand:SI 0 "s_register_operand" "=r")
10152 (match_operator:SI 1 "shiftable_operator"
10153 [(match_operand:SI 2 "memory_operand" "m")
10154 (match_operand:SI 3 "memory_operand" "m")]))
10155 (clobber (match_scratch:SI 4 "=r"))]
10156 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10157 "*
10158 {
10159 rtx ldm[3];
10160 rtx arith[4];
10161 rtx base_reg;
10162 HOST_WIDE_INT val1 = 0, val2 = 0;
10163
10164 if (REGNO (operands[0]) > REGNO (operands[4]))
10165 {
10166 ldm[1] = operands[4];
10167 ldm[2] = operands[0];
10168 }
10169 else
10170 {
10171 ldm[1] = operands[0];
10172 ldm[2] = operands[4];
10173 }
10174
10175 base_reg = XEXP (operands[2], 0);
10176
10177 if (!REG_P (base_reg))
10178 {
10179 val1 = INTVAL (XEXP (base_reg, 1));
10180 base_reg = XEXP (base_reg, 0);
10181 }
10182
10183 if (!REG_P (XEXP (operands[3], 0)))
10184 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10185
10186 arith[0] = operands[0];
10187 arith[3] = operands[1];
10188
10189 if (val1 < val2)
10190 {
10191 arith[1] = ldm[1];
10192 arith[2] = ldm[2];
10193 }
10194 else
10195 {
10196 arith[1] = ldm[2];
10197 arith[2] = ldm[1];
10198 }
10199
10200 ldm[0] = base_reg;
10201 if (val1 !=0 && val2 != 0)
10202 {
10203 rtx ops[3];
10204
10205 if (val1 == 4 || val2 == 4)
10206 /* Other val must be 8, since we know they are adjacent and neither
10207 is zero. */
10208 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10209 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10210 {
10211 ldm[0] = ops[0] = operands[4];
10212 ops[1] = base_reg;
10213 ops[2] = GEN_INT (val1);
10214 output_add_immediate (ops);
10215 if (val1 < val2)
10216 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10217 else
10218 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10219 }
10220 else
10221 {
10222 /* Offset is out of range for a single add, so use two ldr. */
10223 ops[0] = ldm[1];
10224 ops[1] = base_reg;
10225 ops[2] = GEN_INT (val1);
10226 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10227 ops[0] = ldm[2];
10228 ops[2] = GEN_INT (val2);
10229 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10230 }
10231 }
10232 else if (val1 != 0)
10233 {
10234 if (val1 < val2)
10235 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10236 else
10237 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10238 }
10239 else
10240 {
10241 if (val1 < val2)
10242 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10243 else
10244 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10245 }
10246 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10247 return \"\";
10248 }"
10249 [(set_attr "length" "12")
10250 (set_attr "predicable" "yes")
10251 (set_attr "type" "load1")]
10252 )
10253
10254 ; This pattern is never tried by combine, so do it as a peephole
10255
10256 (define_peephole2
10257 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10258 (match_operand:SI 1 "arm_general_register_operand" ""))
10259 (set (reg:CC CC_REGNUM)
10260 (compare:CC (match_dup 1) (const_int 0)))]
10261 "TARGET_ARM"
10262 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10263 (set (match_dup 0) (match_dup 1))])]
10264 ""
10265 )
10266
10267 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10268 ; reversed, check that the memory references aren't volatile.
10269
10270 (define_peephole
10271 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10272 (match_operand:SI 4 "memory_operand" "m"))
10273 (set (match_operand:SI 1 "s_register_operand" "=rk")
10274 (match_operand:SI 5 "memory_operand" "m"))
10275 (set (match_operand:SI 2 "s_register_operand" "=rk")
10276 (match_operand:SI 6 "memory_operand" "m"))
10277 (set (match_operand:SI 3 "s_register_operand" "=rk")
10278 (match_operand:SI 7 "memory_operand" "m"))]
10279 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10280 "*
10281 return emit_ldm_seq (operands, 4);
10282 "
10283 )
10284
10285 (define_peephole
10286 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10287 (match_operand:SI 3 "memory_operand" "m"))
10288 (set (match_operand:SI 1 "s_register_operand" "=rk")
10289 (match_operand:SI 4 "memory_operand" "m"))
10290 (set (match_operand:SI 2 "s_register_operand" "=rk")
10291 (match_operand:SI 5 "memory_operand" "m"))]
10292 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10293 "*
10294 return emit_ldm_seq (operands, 3);
10295 "
10296 )
10297
10298 (define_peephole
10299 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10300 (match_operand:SI 2 "memory_operand" "m"))
10301 (set (match_operand:SI 1 "s_register_operand" "=rk")
10302 (match_operand:SI 3 "memory_operand" "m"))]
10303 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10304 "*
10305 return emit_ldm_seq (operands, 2);
10306 "
10307 )
10308
10309 (define_peephole
10310 [(set (match_operand:SI 4 "memory_operand" "=m")
10311 (match_operand:SI 0 "s_register_operand" "rk"))
10312 (set (match_operand:SI 5 "memory_operand" "=m")
10313 (match_operand:SI 1 "s_register_operand" "rk"))
10314 (set (match_operand:SI 6 "memory_operand" "=m")
10315 (match_operand:SI 2 "s_register_operand" "rk"))
10316 (set (match_operand:SI 7 "memory_operand" "=m")
10317 (match_operand:SI 3 "s_register_operand" "rk"))]
10318 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10319 "*
10320 return emit_stm_seq (operands, 4);
10321 "
10322 )
10323
10324 (define_peephole
10325 [(set (match_operand:SI 3 "memory_operand" "=m")
10326 (match_operand:SI 0 "s_register_operand" "rk"))
10327 (set (match_operand:SI 4 "memory_operand" "=m")
10328 (match_operand:SI 1 "s_register_operand" "rk"))
10329 (set (match_operand:SI 5 "memory_operand" "=m")
10330 (match_operand:SI 2 "s_register_operand" "rk"))]
10331 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10332 "*
10333 return emit_stm_seq (operands, 3);
10334 "
10335 )
10336
10337 (define_peephole
10338 [(set (match_operand:SI 2 "memory_operand" "=m")
10339 (match_operand:SI 0 "s_register_operand" "rk"))
10340 (set (match_operand:SI 3 "memory_operand" "=m")
10341 (match_operand:SI 1 "s_register_operand" "rk"))]
10342 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10343 "*
10344 return emit_stm_seq (operands, 2);
10345 "
10346 )
10347
10348 (define_split
10349 [(set (match_operand:SI 0 "s_register_operand" "")
10350 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10351 (const_int 0))
10352 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10353 [(match_operand:SI 3 "s_register_operand" "")
10354 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10355 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10356 "TARGET_ARM"
10357 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10358 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10359 (match_dup 5)))]
10360 ""
10361 )
10362
10363 ;; This split can be used because CC_Z mode implies that the following
10364 ;; branch will be an equality, or an unsigned inequality, so the sign
10365 ;; extension is not needed.
10366
10367 (define_split
10368 [(set (reg:CC_Z CC_REGNUM)
10369 (compare:CC_Z
10370 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10371 (const_int 24))
10372 (match_operand 1 "const_int_operand" "")))
10373 (clobber (match_scratch:SI 2 ""))]
10374 "TARGET_ARM
10375 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10376 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10377 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10378 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10379 "
10380 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10381 "
10382 )
10383 ;; ??? Check the patterns above for Thumb-2 usefulness
10384
10385 (define_expand "prologue"
10386 [(clobber (const_int 0))]
10387 "TARGET_EITHER"
10388 "if (TARGET_32BIT)
10389 arm_expand_prologue ();
10390 else
10391 thumb1_expand_prologue ();
10392 DONE;
10393 "
10394 )
10395
10396 (define_expand "epilogue"
10397 [(clobber (const_int 0))]
10398 "TARGET_EITHER"
10399 "
10400 if (crtl->calls_eh_return)
10401 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10402 if (TARGET_THUMB1)
10403 thumb1_expand_epilogue ();
10404 else if (USE_RETURN_INSN (FALSE))
10405 {
10406 emit_jump_insn (gen_return ());
10407 DONE;
10408 }
10409 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10410 gen_rtvec (1,
10411 gen_rtx_RETURN (VOIDmode)),
10412 VUNSPEC_EPILOGUE));
10413 DONE;
10414 "
10415 )
10416
10417 ;; Note - although unspec_volatile's USE all hard registers,
10418 ;; USEs are ignored after relaod has completed. Thus we need
10419 ;; to add an unspec of the link register to ensure that flow
10420 ;; does not think that it is unused by the sibcall branch that
10421 ;; will replace the standard function epilogue.
10422 (define_insn "sibcall_epilogue"
10423 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10424 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10425 "TARGET_32BIT"
10426 "*
10427 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10428 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10429 return arm_output_epilogue (next_nonnote_insn (insn));
10430 "
10431 ;; Length is absolute worst case
10432 [(set_attr "length" "44")
10433 (set_attr "type" "block")
10434 ;; We don't clobber the conditions, but the potential length of this
10435 ;; operation is sufficient to make conditionalizing the sequence
10436 ;; unlikely to be profitable.
10437 (set_attr "conds" "clob")]
10438 )
10439
10440 (define_insn "*epilogue_insns"
10441 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10442 "TARGET_EITHER"
10443 "*
10444 if (TARGET_32BIT)
10445 return arm_output_epilogue (NULL);
10446 else /* TARGET_THUMB1 */
10447 return thumb_unexpanded_epilogue ();
10448 "
10449 ; Length is absolute worst case
10450 [(set_attr "length" "44")
10451 (set_attr "type" "block")
10452 ;; We don't clobber the conditions, but the potential length of this
10453 ;; operation is sufficient to make conditionalizing the sequence
10454 ;; unlikely to be profitable.
10455 (set_attr "conds" "clob")]
10456 )
10457
10458 (define_expand "eh_epilogue"
10459 [(use (match_operand:SI 0 "register_operand" ""))
10460 (use (match_operand:SI 1 "register_operand" ""))
10461 (use (match_operand:SI 2 "register_operand" ""))]
10462 "TARGET_EITHER"
10463 "
10464 {
10465 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10466 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10467 {
10468 rtx ra = gen_rtx_REG (Pmode, 2);
10469
10470 emit_move_insn (ra, operands[2]);
10471 operands[2] = ra;
10472 }
10473 /* This is a hack -- we may have crystalized the function type too
10474 early. */
10475 cfun->machine->func_type = 0;
10476 }"
10477 )
10478
10479 ;; This split is only used during output to reduce the number of patterns
10480 ;; that need assembler instructions adding to them. We allowed the setting
10481 ;; of the conditions to be implicit during rtl generation so that
10482 ;; the conditional compare patterns would work. However this conflicts to
10483 ;; some extent with the conditional data operations, so we have to split them
10484 ;; up again here.
10485
10486 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10487 ;; conditional execution sufficient?
10488
10489 (define_split
10490 [(set (match_operand:SI 0 "s_register_operand" "")
10491 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10492 [(match_operand 2 "" "") (match_operand 3 "" "")])
10493 (match_dup 0)
10494 (match_operand 4 "" "")))
10495 (clobber (reg:CC CC_REGNUM))]
10496 "TARGET_ARM && reload_completed"
10497 [(set (match_dup 5) (match_dup 6))
10498 (cond_exec (match_dup 7)
10499 (set (match_dup 0) (match_dup 4)))]
10500 "
10501 {
10502 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10503 operands[2], operands[3]);
10504 enum rtx_code rc = GET_CODE (operands[1]);
10505
10506 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10507 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10508 if (mode == CCFPmode || mode == CCFPEmode)
10509 rc = reverse_condition_maybe_unordered (rc);
10510 else
10511 rc = reverse_condition (rc);
10512
10513 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10514 }"
10515 )
10516
10517 (define_split
10518 [(set (match_operand:SI 0 "s_register_operand" "")
10519 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10520 [(match_operand 2 "" "") (match_operand 3 "" "")])
10521 (match_operand 4 "" "")
10522 (match_dup 0)))
10523 (clobber (reg:CC CC_REGNUM))]
10524 "TARGET_ARM && reload_completed"
10525 [(set (match_dup 5) (match_dup 6))
10526 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10527 (set (match_dup 0) (match_dup 4)))]
10528 "
10529 {
10530 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10531 operands[2], operands[3]);
10532
10533 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10534 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10535 }"
10536 )
10537
10538 (define_split
10539 [(set (match_operand:SI 0 "s_register_operand" "")
10540 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10541 [(match_operand 2 "" "") (match_operand 3 "" "")])
10542 (match_operand 4 "" "")
10543 (match_operand 5 "" "")))
10544 (clobber (reg:CC CC_REGNUM))]
10545 "TARGET_ARM && reload_completed"
10546 [(set (match_dup 6) (match_dup 7))
10547 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10548 (set (match_dup 0) (match_dup 4)))
10549 (cond_exec (match_dup 8)
10550 (set (match_dup 0) (match_dup 5)))]
10551 "
10552 {
10553 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10554 operands[2], operands[3]);
10555 enum rtx_code rc = GET_CODE (operands[1]);
10556
10557 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10558 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10559 if (mode == CCFPmode || mode == CCFPEmode)
10560 rc = reverse_condition_maybe_unordered (rc);
10561 else
10562 rc = reverse_condition (rc);
10563
10564 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10565 }"
10566 )
10567
10568 (define_split
10569 [(set (match_operand:SI 0 "s_register_operand" "")
10570 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10571 [(match_operand:SI 2 "s_register_operand" "")
10572 (match_operand:SI 3 "arm_add_operand" "")])
10573 (match_operand:SI 4 "arm_rhs_operand" "")
10574 (not:SI
10575 (match_operand:SI 5 "s_register_operand" ""))))
10576 (clobber (reg:CC CC_REGNUM))]
10577 "TARGET_ARM && reload_completed"
10578 [(set (match_dup 6) (match_dup 7))
10579 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10580 (set (match_dup 0) (match_dup 4)))
10581 (cond_exec (match_dup 8)
10582 (set (match_dup 0) (not:SI (match_dup 5))))]
10583 "
10584 {
10585 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10586 operands[2], operands[3]);
10587 enum rtx_code rc = GET_CODE (operands[1]);
10588
10589 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10590 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10591 if (mode == CCFPmode || mode == CCFPEmode)
10592 rc = reverse_condition_maybe_unordered (rc);
10593 else
10594 rc = reverse_condition (rc);
10595
10596 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10597 }"
10598 )
10599
10600 (define_insn "*cond_move_not"
10601 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10602 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10603 [(match_operand 3 "cc_register" "") (const_int 0)])
10604 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10605 (not:SI
10606 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10607 "TARGET_ARM"
10608 "@
10609 mvn%D4\\t%0, %2
10610 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10611 [(set_attr "conds" "use")
10612 (set_attr "length" "4,8")]
10613 )
10614
10615 ;; The next two patterns occur when an AND operation is followed by a
10616 ;; scc insn sequence
10617
10618 (define_insn "*sign_extract_onebit"
10619 [(set (match_operand:SI 0 "s_register_operand" "=r")
10620 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10621 (const_int 1)
10622 (match_operand:SI 2 "const_int_operand" "n")))
10623 (clobber (reg:CC CC_REGNUM))]
10624 "TARGET_ARM"
10625 "*
10626 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10627 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10628 return \"mvnne\\t%0, #0\";
10629 "
10630 [(set_attr "conds" "clob")
10631 (set_attr "length" "8")]
10632 )
10633
10634 (define_insn "*not_signextract_onebit"
10635 [(set (match_operand:SI 0 "s_register_operand" "=r")
10636 (not:SI
10637 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10638 (const_int 1)
10639 (match_operand:SI 2 "const_int_operand" "n"))))
10640 (clobber (reg:CC CC_REGNUM))]
10641 "TARGET_ARM"
10642 "*
10643 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10644 output_asm_insn (\"tst\\t%1, %2\", operands);
10645 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10646 return \"movne\\t%0, #0\";
10647 "
10648 [(set_attr "conds" "clob")
10649 (set_attr "length" "12")]
10650 )
10651 ;; ??? The above patterns need auditing for Thumb-2
10652
10653 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10654 ;; expressions. For simplicity, the first register is also in the unspec
10655 ;; part.
10656 (define_insn "*push_multi"
10657 [(match_parallel 2 "multi_register_push"
10658 [(set (match_operand:BLK 0 "memory_operand" "=m")
10659 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10660 UNSPEC_PUSH_MULT))])]
10661 "TARGET_32BIT"
10662 "*
10663 {
10664 int num_saves = XVECLEN (operands[2], 0);
10665
10666 /* For the StrongARM at least it is faster to
10667 use STR to store only a single register.
10668 In Thumb mode always use push, and the assembler will pick
10669 something appropriate. */
10670 if (num_saves == 1 && TARGET_ARM)
10671 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10672 else
10673 {
10674 int i;
10675 char pattern[100];
10676
10677 if (TARGET_ARM)
10678 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10679 else
10680 strcpy (pattern, \"push\\t{%1\");
10681
10682 for (i = 1; i < num_saves; i++)
10683 {
10684 strcat (pattern, \", %|\");
10685 strcat (pattern,
10686 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10687 }
10688
10689 strcat (pattern, \"}\");
10690 output_asm_insn (pattern, operands);
10691 }
10692
10693 return \"\";
10694 }"
10695 [(set_attr "type" "store4")]
10696 )
10697
10698 (define_insn "stack_tie"
10699 [(set (mem:BLK (scratch))
10700 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10701 (match_operand:SI 1 "s_register_operand" "rk")]
10702 UNSPEC_PRLG_STK))]
10703 ""
10704 ""
10705 [(set_attr "length" "0")]
10706 )
10707
10708 ;; Similarly for the floating point registers
10709 (define_insn "*push_fp_multi"
10710 [(match_parallel 2 "multi_register_push"
10711 [(set (match_operand:BLK 0 "memory_operand" "=m")
10712 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10713 UNSPEC_PUSH_MULT))])]
10714 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10715 "*
10716 {
10717 char pattern[100];
10718
10719 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10720 output_asm_insn (pattern, operands);
10721 return \"\";
10722 }"
10723 [(set_attr "type" "f_store")]
10724 )
10725
10726 ;; Special patterns for dealing with the constant pool
10727
10728 (define_insn "align_4"
10729 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10730 "TARGET_EITHER"
10731 "*
10732 assemble_align (32);
10733 return \"\";
10734 "
10735 )
10736
10737 (define_insn "align_8"
10738 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10739 "TARGET_EITHER"
10740 "*
10741 assemble_align (64);
10742 return \"\";
10743 "
10744 )
10745
10746 (define_insn "consttable_end"
10747 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10748 "TARGET_EITHER"
10749 "*
10750 making_const_table = FALSE;
10751 return \"\";
10752 "
10753 )
10754
10755 (define_insn "consttable_1"
10756 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10757 "TARGET_THUMB1"
10758 "*
10759 making_const_table = TRUE;
10760 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10761 assemble_zeros (3);
10762 return \"\";
10763 "
10764 [(set_attr "length" "4")]
10765 )
10766
10767 (define_insn "consttable_2"
10768 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10769 "TARGET_THUMB1"
10770 "*
10771 making_const_table = TRUE;
10772 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10773 assemble_zeros (2);
10774 return \"\";
10775 "
10776 [(set_attr "length" "4")]
10777 )
10778
10779 (define_insn "consttable_4"
10780 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10781 "TARGET_EITHER"
10782 "*
10783 {
10784 making_const_table = TRUE;
10785 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10786 {
10787 case MODE_FLOAT:
10788 {
10789 REAL_VALUE_TYPE r;
10790 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10791 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10792 break;
10793 }
10794 default:
10795 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10796 break;
10797 }
10798 return \"\";
10799 }"
10800 [(set_attr "length" "4")]
10801 )
10802
10803 (define_insn "consttable_8"
10804 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10805 "TARGET_EITHER"
10806 "*
10807 {
10808 making_const_table = TRUE;
10809 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10810 {
10811 case MODE_FLOAT:
10812 {
10813 REAL_VALUE_TYPE r;
10814 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10815 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10816 break;
10817 }
10818 default:
10819 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10820 break;
10821 }
10822 return \"\";
10823 }"
10824 [(set_attr "length" "8")]
10825 )
10826
10827 (define_insn "consttable_16"
10828 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10829 "TARGET_EITHER"
10830 "*
10831 {
10832 making_const_table = TRUE;
10833 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10834 {
10835 case MODE_FLOAT:
10836 {
10837 REAL_VALUE_TYPE r;
10838 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10839 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10840 break;
10841 }
10842 default:
10843 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10844 break;
10845 }
10846 return \"\";
10847 }"
10848 [(set_attr "length" "16")]
10849 )
10850
10851 ;; Miscellaneous Thumb patterns
10852
10853 (define_expand "tablejump"
10854 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10855 (use (label_ref (match_operand 1 "" "")))])]
10856 "TARGET_THUMB1"
10857 "
10858 if (flag_pic)
10859 {
10860 /* Hopefully, CSE will eliminate this copy. */
10861 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10862 rtx reg2 = gen_reg_rtx (SImode);
10863
10864 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10865 operands[0] = reg2;
10866 }
10867 "
10868 )
10869
10870 ;; NB never uses BX.
10871 (define_insn "*thumb1_tablejump"
10872 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10873 (use (label_ref (match_operand 1 "" "")))]
10874 "TARGET_THUMB1"
10875 "mov\\t%|pc, %0"
10876 [(set_attr "length" "2")]
10877 )
10878
10879 ;; V5 Instructions,
10880
10881 (define_insn "clzsi2"
10882 [(set (match_operand:SI 0 "s_register_operand" "=r")
10883 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10884 "TARGET_32BIT && arm_arch5"
10885 "clz%?\\t%0, %1"
10886 [(set_attr "predicable" "yes")
10887 (set_attr "insn" "clz")])
10888
10889 ;; V5E instructions.
10890
10891 (define_insn "prefetch"
10892 [(prefetch (match_operand:SI 0 "address_operand" "p")
10893 (match_operand:SI 1 "" "")
10894 (match_operand:SI 2 "" ""))]
10895 "TARGET_32BIT && arm_arch5e"
10896 "pld\\t%a0")
10897
10898 ;; General predication pattern
10899
10900 (define_cond_exec
10901 [(match_operator 0 "arm_comparison_operator"
10902 [(match_operand 1 "cc_register" "")
10903 (const_int 0)])]
10904 "TARGET_32BIT"
10905 ""
10906 )
10907
10908 (define_insn "prologue_use"
10909 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10910 ""
10911 "%@ %0 needed for prologue"
10912 )
10913
10914
10915 ;; Patterns for exception handling
10916
10917 (define_expand "eh_return"
10918 [(use (match_operand 0 "general_operand" ""))]
10919 "TARGET_EITHER"
10920 "
10921 {
10922 if (TARGET_32BIT)
10923 emit_insn (gen_arm_eh_return (operands[0]));
10924 else
10925 emit_insn (gen_thumb_eh_return (operands[0]));
10926 DONE;
10927 }"
10928 )
10929
10930 ;; We can't expand this before we know where the link register is stored.
10931 (define_insn_and_split "arm_eh_return"
10932 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10933 VUNSPEC_EH_RETURN)
10934 (clobber (match_scratch:SI 1 "=&r"))]
10935 "TARGET_ARM"
10936 "#"
10937 "&& reload_completed"
10938 [(const_int 0)]
10939 "
10940 {
10941 arm_set_return_address (operands[0], operands[1]);
10942 DONE;
10943 }"
10944 )
10945
10946 (define_insn_and_split "thumb_eh_return"
10947 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10948 VUNSPEC_EH_RETURN)
10949 (clobber (match_scratch:SI 1 "=&l"))]
10950 "TARGET_THUMB1"
10951 "#"
10952 "&& reload_completed"
10953 [(const_int 0)]
10954 "
10955 {
10956 thumb_set_return_address (operands[0], operands[1]);
10957 DONE;
10958 }"
10959 )
10960
10961 \f
10962 ;; TLS support
10963
10964 (define_insn "load_tp_hard"
10965 [(set (match_operand:SI 0 "register_operand" "=r")
10966 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10967 "TARGET_HARD_TP"
10968 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10969 [(set_attr "predicable" "yes")]
10970 )
10971
10972 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10973 (define_insn "load_tp_soft"
10974 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10975 (clobber (reg:SI LR_REGNUM))
10976 (clobber (reg:SI IP_REGNUM))
10977 (clobber (reg:CC CC_REGNUM))]
10978 "TARGET_SOFT_TP"
10979 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10980 [(set_attr "conds" "clob")]
10981 )
10982
10983 ;; Load the FPA co-processor patterns
10984 (include "fpa.md")
10985 ;; Load the Maverick co-processor patterns
10986 (include "cirrus.md")
10987 ;; Vector bits common to IWMMXT and Neon
10988 (include "vec-common.md")
10989 ;; Load the Intel Wireless Multimedia Extension patterns
10990 (include "iwmmxt.md")
10991 ;; Load the VFP co-processor patterns
10992 (include "vfp.md")
10993 ;; Thumb-2 patterns
10994 (include "thumb2.md")
10995 ;; Neon patterns
10996 (include "neon.md")
10997