* config/arm/arm.md (insv): Do not share operands[0].
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
79 ; register to "use".
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
95 ; instruction stream.
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 ]
103 )
104
105 ;; UNSPEC_VOLATILE Usage:
106
107 (define_constants
108 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
109 ; insn in the code.
110 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
111 ; instruction epilogue sequence that isn't expanded
112 ; into normal RTL. Used for both normal and sibcall
113 ; epilogues.
114 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
115 ; for inlined constants.
116 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
117 ; table.
118 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
119 ; an 8-bit object.
120 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
121 ; a 16-bit object.
122 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
123 ; a 32-bit object.
124 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
125 ; a 64-bit object.
126 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
127 ; a 128-bit object.
128 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
129 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
130 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
131 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
132 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
133 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
134 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
135 ; handling.
136 ]
137 )
138 \f
139 ;;---------------------------------------------------------------------------
140 ;; Attributes
141
142 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
143 ; generating ARM code. This is used to control the length of some insn
144 ; patterns that share the same RTL in both ARM and Thumb code.
145 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
146
147 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
148 ; scheduling decisions for the load unit and the multiplier.
149 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
150
151 ; IS_XSCALE is set to 'yes' when compiling for XScale.
152 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
153
154 ;; Operand number of an input operand that is shifted. Zero if the
155 ;; given instruction does not shift one of its input operands.
156 (define_attr "shift" "" (const_int 0))
157
158 ; Floating Point Unit. If we only have floating point emulation, then there
159 ; is no point in scheduling the floating point insns. (Well, for best
160 ; performance we should try and group them together).
161 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon"
162 (const (symbol_ref "arm_fpu_attr")))
163
164 ; LENGTH of an instruction (in bytes)
165 (define_attr "length" "" (const_int 4))
166
167 ; POOL_RANGE is how far away from a constant pool entry that this insn
168 ; can be placed. If the distance is zero, then this insn will never
169 ; reference the pool.
170 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
171 ; before its address.
172 (define_attr "pool_range" "" (const_int 0))
173 (define_attr "neg_pool_range" "" (const_int 0))
174
175 ; An assembler sequence may clobber the condition codes without us knowing.
176 ; If such an insn references the pool, then we have no way of knowing how,
177 ; so use the most conservative value for pool_range.
178 (define_asm_attributes
179 [(set_attr "conds" "clob")
180 (set_attr "length" "4")
181 (set_attr "pool_range" "250")])
182
183 ;; The instruction used to implement a particular pattern. This
184 ;; information is used by pipeline descriptions to provide accurate
185 ;; scheduling information.
186
187 (define_attr "insn"
188 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
189 (const_string "other"))
190
191 ; TYPE attribute is used to detect floating point instructions which, if
192 ; running on a co-processor can run in parallel with other, basic instructions
193 ; If write-buffer scheduling is enabled then it can also be used in the
194 ; scheduling of writes.
195
196 ; Classification of each insn
197 ; Note: vfp.md has different meanings for some of these, and some further
198 ; types as well. See that file for details.
199 ; alu any alu instruction that doesn't hit memory or fp
200 ; regs or have a shifted source operand
201 ; alu_shift any data instruction that doesn't hit memory or fp
202 ; regs, but has a source operand shifted by a constant
203 ; alu_shift_reg any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a register value
205 ; mult a multiply instruction
206 ; block blockage insn, this blocks all functional units
207 ; float a floating point arithmetic operation (subject to expansion)
208 ; fdivd DFmode floating point division
209 ; fdivs SFmode floating point division
210 ; fmul Floating point multiply
211 ; ffmul Fast floating point multiply
212 ; farith Floating point arithmetic (4 cycle)
213 ; ffarith Fast floating point arithmetic (2 cycle)
214 ; float_em a floating point arithmetic operation that is normally emulated
215 ; even on a machine with an fpa.
216 ; f_load a floating point load from memory
217 ; f_store a floating point store to memory
218 ; f_load[sd] single/double load from memory
219 ; f_store[sd] single/double store to memory
220 ; f_flag a transfer of co-processor flags to the CPSR
221 ; f_mem_r a transfer of a floating point register to a real reg via mem
222 ; r_mem_f the reverse of f_mem_r
223 ; f_2_r fast transfer float to arm (no memory needed)
224 ; r_2_f fast transfer arm to float
225 ; f_cvt convert floating<->integral
226 ; branch a branch
227 ; call a subroutine call
228 ; load_byte load byte(s) from memory to arm registers
229 ; load1 load 1 word from memory to arm registers
230 ; load2 load 2 words from memory to arm registers
231 ; load3 load 3 words from memory to arm registers
232 ; load4 load 4 words from memory to arm registers
233 ; store store 1 word to memory from arm registers
234 ; store2 store 2 words
235 ; store3 store 3 words
236 ; store4 store 4 (or more) words
237 ; Additions for Cirrus Maverick co-processor:
238 ; mav_farith Floating point arithmetic (4 cycle)
239 ; mav_dmult Double multiplies (7 cycle)
240 ;
241
242 (define_attr "type"
243 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
244 (if_then_else
245 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
246 (const_string "mult")
247 (const_string "alu")))
248
249 ; Load scheduling, set from the arm_ld_sched variable
250 ; initialized by arm_override_options()
251 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
252
253 ;; Classification of NEON instructions for scheduling purposes.
254 ;; Do not set this attribute and the "type" attribute together in
255 ;; any one instruction pattern.
256 (define_attr "neon_type"
257 "neon_int_1,\
258 neon_int_2,\
259 neon_int_3,\
260 neon_int_4,\
261 neon_int_5,\
262 neon_vqneg_vqabs,\
263 neon_vmov,\
264 neon_vaba,\
265 neon_vsma,\
266 neon_vaba_qqq,\
267 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mul_qqq_8_16_32_ddd_32,\
269 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
270 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
271 neon_mla_qqq_8_16,\
272 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
273 neon_mla_qqq_32_qqd_32_scalar,\
274 neon_mul_ddd_16_scalar_32_16_long_scalar,\
275 neon_mul_qqd_32_scalar,\
276 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
277 neon_shift_1,\
278 neon_shift_2,\
279 neon_shift_3,\
280 neon_vshl_ddd,\
281 neon_vqshl_vrshl_vqrshl_qqq,\
282 neon_vsra_vrsra,\
283 neon_fp_vadd_ddd_vabs_dd,\
284 neon_fp_vadd_qqq_vabs_qq,\
285 neon_fp_vsum,\
286 neon_fp_vmul_ddd,\
287 neon_fp_vmul_qqd,\
288 neon_fp_vmla_ddd,\
289 neon_fp_vmla_qqq,\
290 neon_fp_vmla_ddd_scalar,\
291 neon_fp_vmla_qqq_scalar,\
292 neon_fp_vrecps_vrsqrts_ddd,\
293 neon_fp_vrecps_vrsqrts_qqq,\
294 neon_bp_simple,\
295 neon_bp_2cycle,\
296 neon_bp_3cycle,\
297 neon_ldr,\
298 neon_str,\
299 neon_vld1_1_2_regs,\
300 neon_vld1_3_4_regs,\
301 neon_vld2_2_regs_vld1_vld2_all_lanes,\
302 neon_vld2_4_regs,\
303 neon_vld3_vld4,\
304 neon_vst1_1_2_regs_vst2_2_regs,\
305 neon_vst1_3_4_regs,\
306 neon_vst2_4_regs_vst3_vst4,\
307 neon_vst3_vst4,\
308 neon_vld1_vld2_lane,\
309 neon_vld3_vld4_lane,\
310 neon_vst1_vst2_lane,\
311 neon_vst3_vst4_lane,\
312 neon_vld3_vld4_all_lanes,\
313 neon_mcr,\
314 neon_mcr_2_mcrr,\
315 neon_mrc,\
316 neon_mrrc,\
317 neon_ldm_2,\
318 neon_stm_2,\
319 none"
320 (const_string "none"))
321
322 ; condition codes: this one is used by final_prescan_insn to speed up
323 ; conditionalizing instructions. It saves having to scan the rtl to see if
324 ; it uses or alters the condition codes.
325 ;
326 ; USE means that the condition codes are used by the insn in the process of
327 ; outputting code, this means (at present) that we can't use the insn in
328 ; inlined branches
329 ;
330 ; SET means that the purpose of the insn is to set the condition codes in a
331 ; well defined manner.
332 ;
333 ; CLOB means that the condition codes are altered in an undefined manner, if
334 ; they are altered at all
335 ;
336 ; JUMP_CLOB is used when the condition cannot be represented by a single
337 ; instruction (UNEQ and LTGT). These cannot be predicated.
338 ;
339 ; UNCONDITIONAL means the instions can not be conditionally executed.
340 ;
341 ; NOCOND means that the condition codes are neither altered nor affect the
342 ; output of this insn
343
344 (define_attr "conds" "use,set,clob,jump_clob,unconditional,nocond"
345 (if_then_else (eq_attr "type" "call")
346 (const_string "clob")
347 (if_then_else (eq_attr "neon_type" "none")
348 (const_string "nocond")
349 (const_string "unconditional"))))
350
351 ; Predicable means that the insn can be conditionally executed based on
352 ; an automatically added predicate (additional patterns are generated by
353 ; gen...). We default to 'no' because no Thumb patterns match this rule
354 ; and not all ARM patterns do.
355 (define_attr "predicable" "no,yes" (const_string "no"))
356
357 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
358 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
359 ; suffer blockages enough to warrant modelling this (and it can adversely
360 ; affect the schedule).
361 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
362
363 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
364 ; to stall the processor. Used with model_wbuf above.
365 (define_attr "write_conflict" "no,yes"
366 (if_then_else (eq_attr "type"
367 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
368 (const_string "yes")
369 (const_string "no")))
370
371 ; Classify the insns into those that take one cycle and those that take more
372 ; than one on the main cpu execution unit.
373 (define_attr "core_cycles" "single,multi"
374 (if_then_else (eq_attr "type"
375 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
376 (const_string "single")
377 (const_string "multi")))
378
379 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
380 ;; distant label. Only applicable to Thumb code.
381 (define_attr "far_jump" "yes,no" (const_string "no"))
382
383
384 ;; The number of machine instructions this pattern expands to.
385 ;; Used for Thumb-2 conditional execution.
386 (define_attr "ce_count" "" (const_int 1))
387
388 ;;---------------------------------------------------------------------------
389 ;; Mode iterators
390
391 ; A list of modes that are exactly 64 bits in size. We use this to expand
392 ; some splits that are the same for all modes when operating on ARM
393 ; registers.
394 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
395
396 ;;---------------------------------------------------------------------------
397 ;; Predicates
398
399 (include "predicates.md")
400 (include "constraints.md")
401
402 ;;---------------------------------------------------------------------------
403 ;; Pipeline descriptions
404
405 ;; Processor type. This is created automatically from arm-cores.def.
406 (include "arm-tune.md")
407
408 (define_attr "tune_cortexr4" "yes,no"
409 (const (if_then_else
410 (eq_attr "tune" "cortexr4,cortexr4f")
411 (const_string "yes")
412 (const_string "no"))))
413
414 ;; True if the generic scheduling description should be used.
415
416 (define_attr "generic_sched" "yes,no"
417 (const (if_then_else
418 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
419 (eq_attr "tune_cortexr4" "yes"))
420 (const_string "no")
421 (const_string "yes"))))
422
423 (define_attr "generic_vfp" "yes,no"
424 (const (if_then_else
425 (and (eq_attr "fpu" "vfp")
426 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
427 (eq_attr "tune_cortexr4" "no"))
428 (const_string "yes")
429 (const_string "no"))))
430
431 (include "arm-generic.md")
432 (include "arm926ejs.md")
433 (include "arm1020e.md")
434 (include "arm1026ejs.md")
435 (include "arm1136jfs.md")
436 (include "cortex-a8.md")
437 (include "cortex-a9.md")
438 (include "cortex-r4.md")
439 (include "cortex-r4f.md")
440 (include "vfp11.md")
441
442 \f
443 ;;---------------------------------------------------------------------------
444 ;; Insn patterns
445 ;;
446 ;; Addition insns.
447
448 ;; Note: For DImode insns, there is normally no reason why operands should
449 ;; not be in the same register, what we don't want is for something being
450 ;; written to partially overlap something that is an input.
451 ;; Cirrus 64bit additions should not be split because we have a native
452 ;; 64bit addition instructions.
453
454 (define_expand "adddi3"
455 [(parallel
456 [(set (match_operand:DI 0 "s_register_operand" "")
457 (plus:DI (match_operand:DI 1 "s_register_operand" "")
458 (match_operand:DI 2 "s_register_operand" "")))
459 (clobber (reg:CC CC_REGNUM))])]
460 "TARGET_EITHER"
461 "
462 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
463 {
464 if (!cirrus_fp_register (operands[0], DImode))
465 operands[0] = force_reg (DImode, operands[0]);
466 if (!cirrus_fp_register (operands[1], DImode))
467 operands[1] = force_reg (DImode, operands[1]);
468 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
469 DONE;
470 }
471
472 if (TARGET_THUMB1)
473 {
474 if (GET_CODE (operands[1]) != REG)
475 operands[1] = force_reg (SImode, operands[1]);
476 if (GET_CODE (operands[2]) != REG)
477 operands[2] = force_reg (SImode, operands[2]);
478 }
479 "
480 )
481
482 (define_insn "*thumb1_adddi3"
483 [(set (match_operand:DI 0 "register_operand" "=l")
484 (plus:DI (match_operand:DI 1 "register_operand" "%0")
485 (match_operand:DI 2 "register_operand" "l")))
486 (clobber (reg:CC CC_REGNUM))
487 ]
488 "TARGET_THUMB1"
489 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
490 [(set_attr "length" "4")]
491 )
492
493 (define_insn_and_split "*arm_adddi3"
494 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
495 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
496 (match_operand:DI 2 "s_register_operand" "r, 0")))
497 (clobber (reg:CC CC_REGNUM))]
498 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
499 "#"
500 "TARGET_32BIT && reload_completed"
501 [(parallel [(set (reg:CC_C CC_REGNUM)
502 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
503 (match_dup 1)))
504 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
505 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
506 (plus:SI (match_dup 4) (match_dup 5))))]
507 "
508 {
509 operands[3] = gen_highpart (SImode, operands[0]);
510 operands[0] = gen_lowpart (SImode, operands[0]);
511 operands[4] = gen_highpart (SImode, operands[1]);
512 operands[1] = gen_lowpart (SImode, operands[1]);
513 operands[5] = gen_highpart (SImode, operands[2]);
514 operands[2] = gen_lowpart (SImode, operands[2]);
515 }"
516 [(set_attr "conds" "clob")
517 (set_attr "length" "8")]
518 )
519
520 (define_insn_and_split "*adddi_sesidi_di"
521 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
522 (plus:DI (sign_extend:DI
523 (match_operand:SI 2 "s_register_operand" "r,r"))
524 (match_operand:DI 1 "s_register_operand" "r,0")))
525 (clobber (reg:CC CC_REGNUM))]
526 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
527 "#"
528 "TARGET_32BIT && reload_completed"
529 [(parallel [(set (reg:CC_C CC_REGNUM)
530 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
531 (match_dup 1)))
532 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
533 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
534 (plus:SI (ashiftrt:SI (match_dup 2)
535 (const_int 31))
536 (match_dup 4))))]
537 "
538 {
539 operands[3] = gen_highpart (SImode, operands[0]);
540 operands[0] = gen_lowpart (SImode, operands[0]);
541 operands[4] = gen_highpart (SImode, operands[1]);
542 operands[1] = gen_lowpart (SImode, operands[1]);
543 operands[2] = gen_lowpart (SImode, operands[2]);
544 }"
545 [(set_attr "conds" "clob")
546 (set_attr "length" "8")]
547 )
548
549 (define_insn_and_split "*adddi_zesidi_di"
550 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
551 (plus:DI (zero_extend:DI
552 (match_operand:SI 2 "s_register_operand" "r,r"))
553 (match_operand:DI 1 "s_register_operand" "r,0")))
554 (clobber (reg:CC CC_REGNUM))]
555 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
556 "#"
557 "TARGET_32BIT && reload_completed"
558 [(parallel [(set (reg:CC_C CC_REGNUM)
559 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
560 (match_dup 1)))
561 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
562 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
563 (plus:SI (match_dup 4) (const_int 0))))]
564 "
565 {
566 operands[3] = gen_highpart (SImode, operands[0]);
567 operands[0] = gen_lowpart (SImode, operands[0]);
568 operands[4] = gen_highpart (SImode, operands[1]);
569 operands[1] = gen_lowpart (SImode, operands[1]);
570 operands[2] = gen_lowpart (SImode, operands[2]);
571 }"
572 [(set_attr "conds" "clob")
573 (set_attr "length" "8")]
574 )
575
576 (define_expand "addsi3"
577 [(set (match_operand:SI 0 "s_register_operand" "")
578 (plus:SI (match_operand:SI 1 "s_register_operand" "")
579 (match_operand:SI 2 "reg_or_int_operand" "")))]
580 "TARGET_EITHER"
581 "
582 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
583 {
584 arm_split_constant (PLUS, SImode, NULL_RTX,
585 INTVAL (operands[2]), operands[0], operands[1],
586 optimize && can_create_pseudo_p ());
587 DONE;
588 }
589 "
590 )
591
592 ; If there is a scratch available, this will be faster than synthesizing the
593 ; addition.
594 (define_peephole2
595 [(match_scratch:SI 3 "r")
596 (set (match_operand:SI 0 "arm_general_register_operand" "")
597 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
598 (match_operand:SI 2 "const_int_operand" "")))]
599 "TARGET_32BIT &&
600 !(const_ok_for_arm (INTVAL (operands[2]))
601 || const_ok_for_arm (-INTVAL (operands[2])))
602 && const_ok_for_arm (~INTVAL (operands[2]))"
603 [(set (match_dup 3) (match_dup 2))
604 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
605 ""
606 )
607
608 ;; The r/r/k alternative is required when reloading the address
609 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
610 ;; put the duplicated register first, and not try the commutative version.
611 (define_insn_and_split "*arm_addsi3"
612 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
613 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
614 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
615 "TARGET_32BIT"
616 "@
617 add%?\\t%0, %1, %2
618 add%?\\t%0, %1, %2
619 add%?\\t%0, %2, %1
620 sub%?\\t%0, %1, #%n2
621 sub%?\\t%0, %1, #%n2
622 #"
623 "TARGET_32BIT &&
624 GET_CODE (operands[2]) == CONST_INT
625 && !(const_ok_for_arm (INTVAL (operands[2]))
626 || const_ok_for_arm (-INTVAL (operands[2])))"
627 [(clobber (const_int 0))]
628 "
629 arm_split_constant (PLUS, SImode, curr_insn,
630 INTVAL (operands[2]), operands[0],
631 operands[1], 0);
632 DONE;
633 "
634 [(set_attr "length" "4,4,4,4,4,16")
635 (set_attr "predicable" "yes")]
636 )
637
638 ;; Register group 'k' is a single register group containing only the stack
639 ;; register. Trying to reload it will always fail catastrophically,
640 ;; so never allow those alternatives to match if reloading is needed.
641
642 (define_insn "*thumb1_addsi3"
643 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
644 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
645 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
646 "TARGET_THUMB1"
647 "*
648 static const char * const asms[] =
649 {
650 \"add\\t%0, %0, %2\",
651 \"sub\\t%0, %0, #%n2\",
652 \"add\\t%0, %1, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %1, %2\",
656 \"add\\t%0, %1, %2\"
657 };
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
663 "
664 [(set_attr "length" "2")]
665 )
666
667 ;; Reloading and elimination of the frame pointer can
668 ;; sometimes cause this optimization to be missed.
669 (define_peephole2
670 [(set (match_operand:SI 0 "arm_general_register_operand" "")
671 (match_operand:SI 1 "const_int_operand" ""))
672 (set (match_dup 0)
673 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
674 "TARGET_THUMB1
675 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
676 && (INTVAL (operands[1]) & 3) == 0"
677 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
678 ""
679 )
680
681 ;; ??? Make Thumb-2 variants which prefer low regs
682 (define_insn "*addsi3_compare0"
683 [(set (reg:CC_NOOV CC_REGNUM)
684 (compare:CC_NOOV
685 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
686 (match_operand:SI 2 "arm_add_operand" "rI,L"))
687 (const_int 0)))
688 (set (match_operand:SI 0 "s_register_operand" "=r,r")
689 (plus:SI (match_dup 1) (match_dup 2)))]
690 "TARGET_32BIT"
691 "@
692 add%.\\t%0, %1, %2
693 sub%.\\t%0, %1, #%n2"
694 [(set_attr "conds" "set")]
695 )
696
697 (define_insn "*addsi3_compare0_scratch"
698 [(set (reg:CC_NOOV CC_REGNUM)
699 (compare:CC_NOOV
700 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
701 (match_operand:SI 1 "arm_add_operand" "rI,L"))
702 (const_int 0)))]
703 "TARGET_32BIT"
704 "@
705 cmn%?\\t%0, %1
706 cmp%?\\t%0, #%n1"
707 [(set_attr "conds" "set")]
708 )
709
710 (define_insn "*compare_negsi_si"
711 [(set (reg:CC_Z CC_REGNUM)
712 (compare:CC_Z
713 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
714 (match_operand:SI 1 "s_register_operand" "r")))]
715 "TARGET_32BIT"
716 "cmn%?\\t%1, %0"
717 [(set_attr "conds" "set")]
718 )
719
720 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
721 ;; addend is a constant.
722 (define_insn "*cmpsi2_addneg"
723 [(set (reg:CC CC_REGNUM)
724 (compare:CC
725 (match_operand:SI 1 "s_register_operand" "r,r")
726 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
727 (set (match_operand:SI 0 "s_register_operand" "=r,r")
728 (plus:SI (match_dup 1)
729 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
730 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
731 "@
732 sub%.\\t%0, %1, %2
733 add%.\\t%0, %1, #%n2"
734 [(set_attr "conds" "set")]
735 )
736
737 ;; Convert the sequence
738 ;; sub rd, rn, #1
739 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
740 ;; bne dest
741 ;; into
742 ;; subs rd, rn, #1
743 ;; bcs dest ((unsigned)rn >= 1)
744 ;; similarly for the beq variant using bcc.
745 ;; This is a common looping idiom (while (n--))
746 (define_peephole2
747 [(set (match_operand:SI 0 "arm_general_register_operand" "")
748 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
749 (const_int -1)))
750 (set (match_operand 2 "cc_register" "")
751 (compare (match_dup 0) (const_int -1)))
752 (set (pc)
753 (if_then_else (match_operator 3 "equality_operator"
754 [(match_dup 2) (const_int 0)])
755 (match_operand 4 "" "")
756 (match_operand 5 "" "")))]
757 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
758 [(parallel[
759 (set (match_dup 2)
760 (compare:CC
761 (match_dup 1) (const_int 1)))
762 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
763 (set (pc)
764 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
765 (match_dup 4)
766 (match_dup 5)))]
767 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
768 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
769 ? GEU : LTU),
770 VOIDmode,
771 operands[2], const0_rtx);"
772 )
773
774 ;; The next four insns work because they compare the result with one of
775 ;; the operands, and we know that the use of the condition code is
776 ;; either GEU or LTU, so we can use the carry flag from the addition
777 ;; instead of doing the compare a second time.
778 (define_insn "*addsi3_compare_op1"
779 [(set (reg:CC_C CC_REGNUM)
780 (compare:CC_C
781 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
782 (match_operand:SI 2 "arm_add_operand" "rI,L"))
783 (match_dup 1)))
784 (set (match_operand:SI 0 "s_register_operand" "=r,r")
785 (plus:SI (match_dup 1) (match_dup 2)))]
786 "TARGET_32BIT"
787 "@
788 add%.\\t%0, %1, %2
789 sub%.\\t%0, %1, #%n2"
790 [(set_attr "conds" "set")]
791 )
792
793 (define_insn "*addsi3_compare_op2"
794 [(set (reg:CC_C CC_REGNUM)
795 (compare:CC_C
796 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
797 (match_operand:SI 2 "arm_add_operand" "rI,L"))
798 (match_dup 2)))
799 (set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_dup 1) (match_dup 2)))]
801 "TARGET_32BIT"
802 "@
803 add%.\\t%0, %1, %2
804 sub%.\\t%0, %1, #%n2"
805 [(set_attr "conds" "set")]
806 )
807
808 (define_insn "*compare_addsi2_op0"
809 [(set (reg:CC_C CC_REGNUM)
810 (compare:CC_C
811 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
812 (match_operand:SI 1 "arm_add_operand" "rI,L"))
813 (match_dup 0)))]
814 "TARGET_32BIT"
815 "@
816 cmn%?\\t%0, %1
817 cmp%?\\t%0, #%n1"
818 [(set_attr "conds" "set")]
819 )
820
821 (define_insn "*compare_addsi2_op1"
822 [(set (reg:CC_C CC_REGNUM)
823 (compare:CC_C
824 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
825 (match_operand:SI 1 "arm_add_operand" "rI,L"))
826 (match_dup 1)))]
827 "TARGET_32BIT"
828 "@
829 cmn%?\\t%0, %1
830 cmp%?\\t%0, #%n1"
831 [(set_attr "conds" "set")]
832 )
833
834 (define_insn "*addsi3_carryin"
835 [(set (match_operand:SI 0 "s_register_operand" "=r")
836 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
837 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
838 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
839 "TARGET_32BIT"
840 "adc%?\\t%0, %1, %2"
841 [(set_attr "conds" "use")]
842 )
843
844 (define_insn "*addsi3_carryin_shift"
845 [(set (match_operand:SI 0 "s_register_operand" "=r")
846 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
847 (plus:SI
848 (match_operator:SI 2 "shift_operator"
849 [(match_operand:SI 3 "s_register_operand" "r")
850 (match_operand:SI 4 "reg_or_int_operand" "rM")])
851 (match_operand:SI 1 "s_register_operand" "r"))))]
852 "TARGET_32BIT"
853 "adc%?\\t%0, %1, %3%S2"
854 [(set_attr "conds" "use")
855 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
856 (const_string "alu_shift")
857 (const_string "alu_shift_reg")))]
858 )
859
860 (define_insn "*addsi3_carryin_alt1"
861 [(set (match_operand:SI 0 "s_register_operand" "=r")
862 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
863 (match_operand:SI 2 "arm_rhs_operand" "rI"))
864 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
865 "TARGET_32BIT"
866 "adc%?\\t%0, %1, %2"
867 [(set_attr "conds" "use")]
868 )
869
870 (define_insn "*addsi3_carryin_alt2"
871 [(set (match_operand:SI 0 "s_register_operand" "=r")
872 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
873 (match_operand:SI 1 "s_register_operand" "r"))
874 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
875 "TARGET_32BIT"
876 "adc%?\\t%0, %1, %2"
877 [(set_attr "conds" "use")]
878 )
879
880 (define_insn "*addsi3_carryin_alt3"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
883 (match_operand:SI 2 "arm_rhs_operand" "rI"))
884 (match_operand:SI 1 "s_register_operand" "r")))]
885 "TARGET_32BIT"
886 "adc%?\\t%0, %1, %2"
887 [(set_attr "conds" "use")]
888 )
889
890 (define_expand "incscc"
891 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
892 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
893 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
894 (match_operand:SI 1 "s_register_operand" "0,?r")))]
895 "TARGET_32BIT"
896 ""
897 )
898
899 (define_insn "*arm_incscc"
900 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
901 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
902 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
903 (match_operand:SI 1 "s_register_operand" "0,?r")))]
904 "TARGET_ARM"
905 "@
906 add%d2\\t%0, %1, #1
907 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
908 [(set_attr "conds" "use")
909 (set_attr "length" "4,8")]
910 )
911
912 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
913 (define_split
914 [(set (match_operand:SI 0 "s_register_operand" "")
915 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
916 (match_operand:SI 2 "s_register_operand" ""))
917 (const_int -1)))
918 (clobber (match_operand:SI 3 "s_register_operand" ""))]
919 "TARGET_32BIT"
920 [(set (match_dup 3) (match_dup 1))
921 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
922 "
923 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
924 ")
925
926 (define_expand "addsf3"
927 [(set (match_operand:SF 0 "s_register_operand" "")
928 (plus:SF (match_operand:SF 1 "s_register_operand" "")
929 (match_operand:SF 2 "arm_float_add_operand" "")))]
930 "TARGET_32BIT && TARGET_HARD_FLOAT"
931 "
932 if (TARGET_MAVERICK
933 && !cirrus_fp_register (operands[2], SFmode))
934 operands[2] = force_reg (SFmode, operands[2]);
935 ")
936
937 (define_expand "adddf3"
938 [(set (match_operand:DF 0 "s_register_operand" "")
939 (plus:DF (match_operand:DF 1 "s_register_operand" "")
940 (match_operand:DF 2 "arm_float_add_operand" "")))]
941 "TARGET_32BIT && TARGET_HARD_FLOAT"
942 "
943 if (TARGET_MAVERICK
944 && !cirrus_fp_register (operands[2], DFmode))
945 operands[2] = force_reg (DFmode, operands[2]);
946 ")
947
948 (define_expand "subdi3"
949 [(parallel
950 [(set (match_operand:DI 0 "s_register_operand" "")
951 (minus:DI (match_operand:DI 1 "s_register_operand" "")
952 (match_operand:DI 2 "s_register_operand" "")))
953 (clobber (reg:CC CC_REGNUM))])]
954 "TARGET_EITHER"
955 "
956 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
957 && TARGET_32BIT
958 && cirrus_fp_register (operands[0], DImode)
959 && cirrus_fp_register (operands[1], DImode))
960 {
961 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
962 DONE;
963 }
964
965 if (TARGET_THUMB1)
966 {
967 if (GET_CODE (operands[1]) != REG)
968 operands[1] = force_reg (SImode, operands[1]);
969 if (GET_CODE (operands[2]) != REG)
970 operands[2] = force_reg (SImode, operands[2]);
971 }
972 "
973 )
974
975 (define_insn "*arm_subdi3"
976 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
977 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
978 (match_operand:DI 2 "s_register_operand" "r,0,0")))
979 (clobber (reg:CC CC_REGNUM))]
980 "TARGET_32BIT"
981 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
982 [(set_attr "conds" "clob")
983 (set_attr "length" "8")]
984 )
985
986 (define_insn "*thumb_subdi3"
987 [(set (match_operand:DI 0 "register_operand" "=l")
988 (minus:DI (match_operand:DI 1 "register_operand" "0")
989 (match_operand:DI 2 "register_operand" "l")))
990 (clobber (reg:CC CC_REGNUM))]
991 "TARGET_THUMB1"
992 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
993 [(set_attr "length" "4")]
994 )
995
996 (define_insn "*subdi_di_zesidi"
997 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
998 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
999 (zero_extend:DI
1000 (match_operand:SI 2 "s_register_operand" "r,r"))))
1001 (clobber (reg:CC CC_REGNUM))]
1002 "TARGET_32BIT"
1003 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1004 [(set_attr "conds" "clob")
1005 (set_attr "length" "8")]
1006 )
1007
1008 (define_insn "*subdi_di_sesidi"
1009 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1010 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1011 (sign_extend:DI
1012 (match_operand:SI 2 "s_register_operand" "r,r"))))
1013 (clobber (reg:CC CC_REGNUM))]
1014 "TARGET_32BIT"
1015 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1016 [(set_attr "conds" "clob")
1017 (set_attr "length" "8")]
1018 )
1019
1020 (define_insn "*subdi_zesidi_di"
1021 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1022 (minus:DI (zero_extend:DI
1023 (match_operand:SI 2 "s_register_operand" "r,r"))
1024 (match_operand:DI 1 "s_register_operand" "?r,0")))
1025 (clobber (reg:CC CC_REGNUM))]
1026 "TARGET_ARM"
1027 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1028 [(set_attr "conds" "clob")
1029 (set_attr "length" "8")]
1030 )
1031
1032 (define_insn "*subdi_sesidi_di"
1033 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1034 (minus:DI (sign_extend:DI
1035 (match_operand:SI 2 "s_register_operand" "r,r"))
1036 (match_operand:DI 1 "s_register_operand" "?r,0")))
1037 (clobber (reg:CC CC_REGNUM))]
1038 "TARGET_ARM"
1039 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1040 [(set_attr "conds" "clob")
1041 (set_attr "length" "8")]
1042 )
1043
1044 (define_insn "*subdi_zesidi_zesidi"
1045 [(set (match_operand:DI 0 "s_register_operand" "=r")
1046 (minus:DI (zero_extend:DI
1047 (match_operand:SI 1 "s_register_operand" "r"))
1048 (zero_extend:DI
1049 (match_operand:SI 2 "s_register_operand" "r"))))
1050 (clobber (reg:CC CC_REGNUM))]
1051 "TARGET_32BIT"
1052 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1053 [(set_attr "conds" "clob")
1054 (set_attr "length" "8")]
1055 )
1056
1057 (define_expand "subsi3"
1058 [(set (match_operand:SI 0 "s_register_operand" "")
1059 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1060 (match_operand:SI 2 "s_register_operand" "")))]
1061 "TARGET_EITHER"
1062 "
1063 if (GET_CODE (operands[1]) == CONST_INT)
1064 {
1065 if (TARGET_32BIT)
1066 {
1067 arm_split_constant (MINUS, SImode, NULL_RTX,
1068 INTVAL (operands[1]), operands[0],
1069 operands[2], optimize && can_create_pseudo_p ());
1070 DONE;
1071 }
1072 else /* TARGET_THUMB1 */
1073 operands[1] = force_reg (SImode, operands[1]);
1074 }
1075 "
1076 )
1077
1078 (define_insn "*thumb1_subsi3_insn"
1079 [(set (match_operand:SI 0 "register_operand" "=l")
1080 (minus:SI (match_operand:SI 1 "register_operand" "l")
1081 (match_operand:SI 2 "register_operand" "l")))]
1082 "TARGET_THUMB1"
1083 "sub\\t%0, %1, %2"
1084 [(set_attr "length" "2")]
1085 )
1086
1087 ; ??? Check Thumb-2 split length
1088 (define_insn_and_split "*arm_subsi3_insn"
1089 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1090 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1091 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1092 "TARGET_32BIT"
1093 "@
1094 rsb%?\\t%0, %2, %1
1095 sub%?\\t%0, %1, %2
1096 #"
1097 "TARGET_32BIT
1098 && GET_CODE (operands[1]) == CONST_INT
1099 && !const_ok_for_arm (INTVAL (operands[1]))"
1100 [(clobber (const_int 0))]
1101 "
1102 arm_split_constant (MINUS, SImode, curr_insn,
1103 INTVAL (operands[1]), operands[0], operands[2], 0);
1104 DONE;
1105 "
1106 [(set_attr "length" "4,4,16")
1107 (set_attr "predicable" "yes")]
1108 )
1109
1110 (define_peephole2
1111 [(match_scratch:SI 3 "r")
1112 (set (match_operand:SI 0 "arm_general_register_operand" "")
1113 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1114 (match_operand:SI 2 "arm_general_register_operand" "")))]
1115 "TARGET_32BIT
1116 && !const_ok_for_arm (INTVAL (operands[1]))
1117 && const_ok_for_arm (~INTVAL (operands[1]))"
1118 [(set (match_dup 3) (match_dup 1))
1119 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1120 ""
1121 )
1122
1123 (define_insn "*subsi3_compare0"
1124 [(set (reg:CC_NOOV CC_REGNUM)
1125 (compare:CC_NOOV
1126 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1127 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1128 (const_int 0)))
1129 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1130 (minus:SI (match_dup 1) (match_dup 2)))]
1131 "TARGET_32BIT"
1132 "@
1133 sub%.\\t%0, %1, %2
1134 rsb%.\\t%0, %2, %1"
1135 [(set_attr "conds" "set")]
1136 )
1137
1138 (define_expand "decscc"
1139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1140 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1141 (match_operator:SI 2 "arm_comparison_operator"
1142 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1143 "TARGET_32BIT"
1144 ""
1145 )
1146
1147 (define_insn "*arm_decscc"
1148 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1149 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1150 (match_operator:SI 2 "arm_comparison_operator"
1151 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1152 "TARGET_ARM"
1153 "@
1154 sub%d2\\t%0, %1, #1
1155 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1156 [(set_attr "conds" "use")
1157 (set_attr "length" "*,8")]
1158 )
1159
1160 (define_expand "subsf3"
1161 [(set (match_operand:SF 0 "s_register_operand" "")
1162 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1163 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1164 "TARGET_32BIT && TARGET_HARD_FLOAT"
1165 "
1166 if (TARGET_MAVERICK)
1167 {
1168 if (!cirrus_fp_register (operands[1], SFmode))
1169 operands[1] = force_reg (SFmode, operands[1]);
1170 if (!cirrus_fp_register (operands[2], SFmode))
1171 operands[2] = force_reg (SFmode, operands[2]);
1172 }
1173 ")
1174
1175 (define_expand "subdf3"
1176 [(set (match_operand:DF 0 "s_register_operand" "")
1177 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1178 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT"
1180 "
1181 if (TARGET_MAVERICK)
1182 {
1183 if (!cirrus_fp_register (operands[1], DFmode))
1184 operands[1] = force_reg (DFmode, operands[1]);
1185 if (!cirrus_fp_register (operands[2], DFmode))
1186 operands[2] = force_reg (DFmode, operands[2]);
1187 }
1188 ")
1189
1190 \f
1191 ;; Multiplication insns
1192
1193 (define_expand "mulsi3"
1194 [(set (match_operand:SI 0 "s_register_operand" "")
1195 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1196 (match_operand:SI 1 "s_register_operand" "")))]
1197 "TARGET_EITHER"
1198 ""
1199 )
1200
1201 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1202 (define_insn "*arm_mulsi3"
1203 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1204 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1205 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1206 "TARGET_32BIT && !arm_arch6"
1207 "mul%?\\t%0, %2, %1"
1208 [(set_attr "insn" "mul")
1209 (set_attr "predicable" "yes")]
1210 )
1211
1212 (define_insn "*arm_mulsi3_v6"
1213 [(set (match_operand:SI 0 "s_register_operand" "=r")
1214 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1215 (match_operand:SI 2 "s_register_operand" "r")))]
1216 "TARGET_32BIT && arm_arch6"
1217 "mul%?\\t%0, %1, %2"
1218 [(set_attr "insn" "mul")
1219 (set_attr "predicable" "yes")]
1220 )
1221
1222 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1223 ; 1 and 2; are the same, because reload will make operand 0 match
1224 ; operand 1 without realizing that this conflicts with operand 2. We fix
1225 ; this by adding another alternative to match this case, and then `reload'
1226 ; it ourselves. This alternative must come first.
1227 (define_insn "*thumb_mulsi3"
1228 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1229 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1230 (match_operand:SI 2 "register_operand" "l,l,l")))]
1231 "TARGET_THUMB1 && !arm_arch6"
1232 "*
1233 if (which_alternative < 2)
1234 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1235 else
1236 return \"mul\\t%0, %2\";
1237 "
1238 [(set_attr "length" "4,4,2")
1239 (set_attr "insn" "mul")]
1240 )
1241
1242 (define_insn "*thumb_mulsi3_v6"
1243 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1244 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1245 (match_operand:SI 2 "register_operand" "l,0,0")))]
1246 "TARGET_THUMB1 && arm_arch6"
1247 "@
1248 mul\\t%0, %2
1249 mul\\t%0, %1
1250 mul\\t%0, %1"
1251 [(set_attr "length" "2")
1252 (set_attr "insn" "mul")]
1253 )
1254
1255 (define_insn "*mulsi3_compare0"
1256 [(set (reg:CC_NOOV CC_REGNUM)
1257 (compare:CC_NOOV (mult:SI
1258 (match_operand:SI 2 "s_register_operand" "r,r")
1259 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1260 (const_int 0)))
1261 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1262 (mult:SI (match_dup 2) (match_dup 1)))]
1263 "TARGET_ARM && !arm_arch6"
1264 "mul%.\\t%0, %2, %1"
1265 [(set_attr "conds" "set")
1266 (set_attr "insn" "muls")]
1267 )
1268
1269 (define_insn "*mulsi3_compare0_v6"
1270 [(set (reg:CC_NOOV CC_REGNUM)
1271 (compare:CC_NOOV (mult:SI
1272 (match_operand:SI 2 "s_register_operand" "r")
1273 (match_operand:SI 1 "s_register_operand" "r"))
1274 (const_int 0)))
1275 (set (match_operand:SI 0 "s_register_operand" "=r")
1276 (mult:SI (match_dup 2) (match_dup 1)))]
1277 "TARGET_ARM && arm_arch6 && optimize_size"
1278 "mul%.\\t%0, %2, %1"
1279 [(set_attr "conds" "set")
1280 (set_attr "insn" "muls")]
1281 )
1282
1283 (define_insn "*mulsi_compare0_scratch"
1284 [(set (reg:CC_NOOV CC_REGNUM)
1285 (compare:CC_NOOV (mult:SI
1286 (match_operand:SI 2 "s_register_operand" "r,r")
1287 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1288 (const_int 0)))
1289 (clobber (match_scratch:SI 0 "=&r,&r"))]
1290 "TARGET_ARM && !arm_arch6"
1291 "mul%.\\t%0, %2, %1"
1292 [(set_attr "conds" "set")
1293 (set_attr "insn" "muls")]
1294 )
1295
1296 (define_insn "*mulsi_compare0_scratch_v6"
1297 [(set (reg:CC_NOOV CC_REGNUM)
1298 (compare:CC_NOOV (mult:SI
1299 (match_operand:SI 2 "s_register_operand" "r")
1300 (match_operand:SI 1 "s_register_operand" "r"))
1301 (const_int 0)))
1302 (clobber (match_scratch:SI 0 "=r"))]
1303 "TARGET_ARM && arm_arch6 && optimize_size"
1304 "mul%.\\t%0, %2, %1"
1305 [(set_attr "conds" "set")
1306 (set_attr "insn" "muls")]
1307 )
1308
1309 ;; Unnamed templates to match MLA instruction.
1310
1311 (define_insn "*mulsi3addsi"
1312 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1313 (plus:SI
1314 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1315 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1316 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1317 "TARGET_32BIT && !arm_arch6"
1318 "mla%?\\t%0, %2, %1, %3"
1319 [(set_attr "insn" "mla")
1320 (set_attr "predicable" "yes")]
1321 )
1322
1323 (define_insn "*mulsi3addsi_v6"
1324 [(set (match_operand:SI 0 "s_register_operand" "=r")
1325 (plus:SI
1326 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1327 (match_operand:SI 1 "s_register_operand" "r"))
1328 (match_operand:SI 3 "s_register_operand" "r")))]
1329 "TARGET_32BIT && arm_arch6"
1330 "mla%?\\t%0, %2, %1, %3"
1331 [(set_attr "insn" "mla")
1332 (set_attr "predicable" "yes")]
1333 )
1334
1335 (define_insn "*mulsi3addsi_compare0"
1336 [(set (reg:CC_NOOV CC_REGNUM)
1337 (compare:CC_NOOV
1338 (plus:SI (mult:SI
1339 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1340 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1341 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1342 (const_int 0)))
1343 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1344 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1345 (match_dup 3)))]
1346 "TARGET_ARM && arm_arch6"
1347 "mla%.\\t%0, %2, %1, %3"
1348 [(set_attr "conds" "set")
1349 (set_attr "insn" "mlas")]
1350 )
1351
1352 (define_insn "*mulsi3addsi_compare0_v6"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1354 (compare:CC_NOOV
1355 (plus:SI (mult:SI
1356 (match_operand:SI 2 "s_register_operand" "r")
1357 (match_operand:SI 1 "s_register_operand" "r"))
1358 (match_operand:SI 3 "s_register_operand" "r"))
1359 (const_int 0)))
1360 (set (match_operand:SI 0 "s_register_operand" "=r")
1361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1362 (match_dup 3)))]
1363 "TARGET_ARM && arm_arch6 && optimize_size"
1364 "mla%.\\t%0, %2, %1, %3"
1365 [(set_attr "conds" "set")
1366 (set_attr "insn" "mlas")]
1367 )
1368
1369 (define_insn "*mulsi3addsi_compare0_scratch"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1371 (compare:CC_NOOV
1372 (plus:SI (mult:SI
1373 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1374 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1375 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1376 (const_int 0)))
1377 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1378 "TARGET_ARM && !arm_arch6"
1379 "mla%.\\t%0, %2, %1, %3"
1380 [(set_attr "conds" "set")
1381 (set_attr "insn" "mlas")]
1382 )
1383
1384 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1386 (compare:CC_NOOV
1387 (plus:SI (mult:SI
1388 (match_operand:SI 2 "s_register_operand" "r")
1389 (match_operand:SI 1 "s_register_operand" "r"))
1390 (match_operand:SI 3 "s_register_operand" "r"))
1391 (const_int 0)))
1392 (clobber (match_scratch:SI 0 "=r"))]
1393 "TARGET_ARM && arm_arch6 && optimize_size"
1394 "mla%.\\t%0, %2, %1, %3"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "mlas")]
1397 )
1398
1399 (define_insn "*mulsi3subsi"
1400 [(set (match_operand:SI 0 "s_register_operand" "=r")
1401 (minus:SI
1402 (match_operand:SI 3 "s_register_operand" "r")
1403 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1404 (match_operand:SI 1 "s_register_operand" "r"))))]
1405 "TARGET_32BIT && arm_arch_thumb2"
1406 "mls%?\\t%0, %2, %1, %3"
1407 [(set_attr "insn" "mla")
1408 (set_attr "predicable" "yes")]
1409 )
1410
1411 ;; Unnamed template to match long long multiply-accumulate (smlal)
1412
1413 (define_insn "*mulsidi3adddi"
1414 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1415 (plus:DI
1416 (mult:DI
1417 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1418 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1419 (match_operand:DI 1 "s_register_operand" "0")))]
1420 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1421 "smlal%?\\t%Q0, %R0, %3, %2"
1422 [(set_attr "insn" "smlal")
1423 (set_attr "predicable" "yes")]
1424 )
1425
1426 (define_insn "*mulsidi3adddi_v6"
1427 [(set (match_operand:DI 0 "s_register_operand" "=r")
1428 (plus:DI
1429 (mult:DI
1430 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1431 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1432 (match_operand:DI 1 "s_register_operand" "0")))]
1433 "TARGET_32BIT && arm_arch6"
1434 "smlal%?\\t%Q0, %R0, %3, %2"
1435 [(set_attr "insn" "smlal")
1436 (set_attr "predicable" "yes")]
1437 )
1438
1439 ;; 32x32->64 widening multiply.
1440 ;; As with mulsi3, the only difference between the v3-5 and v6+
1441 ;; versions of these patterns is the requirement that the output not
1442 ;; overlap the inputs, but that still means we have to have a named
1443 ;; expander and two different starred insns.
1444
1445 (define_expand "mulsidi3"
1446 [(set (match_operand:DI 0 "s_register_operand" "")
1447 (mult:DI
1448 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1449 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1450 "TARGET_32BIT && arm_arch3m"
1451 ""
1452 )
1453
1454 (define_insn "*mulsidi3_nov6"
1455 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1456 (mult:DI
1457 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1458 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1459 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1460 "smull%?\\t%Q0, %R0, %1, %2"
1461 [(set_attr "insn" "smull")
1462 (set_attr "predicable" "yes")]
1463 )
1464
1465 (define_insn "*mulsidi3_v6"
1466 [(set (match_operand:DI 0 "s_register_operand" "=r")
1467 (mult:DI
1468 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1469 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1470 "TARGET_32BIT && arm_arch6"
1471 "smull%?\\t%Q0, %R0, %1, %2"
1472 [(set_attr "insn" "smull")
1473 (set_attr "predicable" "yes")]
1474 )
1475
1476 (define_expand "umulsidi3"
1477 [(set (match_operand:DI 0 "s_register_operand" "")
1478 (mult:DI
1479 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1480 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1481 "TARGET_32BIT && arm_arch3m"
1482 ""
1483 )
1484
1485 (define_insn "*umulsidi3_nov6"
1486 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1487 (mult:DI
1488 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1489 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1490 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1491 "umull%?\\t%Q0, %R0, %1, %2"
1492 [(set_attr "insn" "umull")
1493 (set_attr "predicable" "yes")]
1494 )
1495
1496 (define_insn "*umulsidi3_v6"
1497 [(set (match_operand:DI 0 "s_register_operand" "=r")
1498 (mult:DI
1499 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1500 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1501 "TARGET_32BIT && arm_arch6"
1502 "umull%?\\t%Q0, %R0, %1, %2"
1503 [(set_attr "insn" "umull")
1504 (set_attr "predicable" "yes")]
1505 )
1506
1507 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1508
1509 (define_insn "*umulsidi3adddi"
1510 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1511 (plus:DI
1512 (mult:DI
1513 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1514 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1515 (match_operand:DI 1 "s_register_operand" "0")))]
1516 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1517 "umlal%?\\t%Q0, %R0, %3, %2"
1518 [(set_attr "insn" "umlal")
1519 (set_attr "predicable" "yes")]
1520 )
1521
1522 (define_insn "*umulsidi3adddi_v6"
1523 [(set (match_operand:DI 0 "s_register_operand" "=r")
1524 (plus:DI
1525 (mult:DI
1526 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1527 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1528 (match_operand:DI 1 "s_register_operand" "0")))]
1529 "TARGET_32BIT && arm_arch6"
1530 "umlal%?\\t%Q0, %R0, %3, %2"
1531 [(set_attr "insn" "umlal")
1532 (set_attr "predicable" "yes")]
1533 )
1534
1535 (define_expand "smulsi3_highpart"
1536 [(parallel
1537 [(set (match_operand:SI 0 "s_register_operand" "")
1538 (truncate:SI
1539 (lshiftrt:DI
1540 (mult:DI
1541 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1542 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1543 (const_int 32))))
1544 (clobber (match_scratch:SI 3 ""))])]
1545 "TARGET_32BIT && arm_arch3m"
1546 ""
1547 )
1548
1549 (define_insn "*smulsi3_highpart_nov6"
1550 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1551 (truncate:SI
1552 (lshiftrt:DI
1553 (mult:DI
1554 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1555 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1556 (const_int 32))))
1557 (clobber (match_scratch:SI 3 "=&r,&r"))]
1558 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1559 "smull%?\\t%3, %0, %2, %1"
1560 [(set_attr "insn" "smull")
1561 (set_attr "predicable" "yes")]
1562 )
1563
1564 (define_insn "*smulsi3_highpart_v6"
1565 [(set (match_operand:SI 0 "s_register_operand" "=r")
1566 (truncate:SI
1567 (lshiftrt:DI
1568 (mult:DI
1569 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1570 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1571 (const_int 32))))
1572 (clobber (match_scratch:SI 3 "=r"))]
1573 "TARGET_32BIT && arm_arch6"
1574 "smull%?\\t%3, %0, %2, %1"
1575 [(set_attr "insn" "smull")
1576 (set_attr "predicable" "yes")]
1577 )
1578
1579 (define_expand "umulsi3_highpart"
1580 [(parallel
1581 [(set (match_operand:SI 0 "s_register_operand" "")
1582 (truncate:SI
1583 (lshiftrt:DI
1584 (mult:DI
1585 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1586 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1587 (const_int 32))))
1588 (clobber (match_scratch:SI 3 ""))])]
1589 "TARGET_32BIT && arm_arch3m"
1590 ""
1591 )
1592
1593 (define_insn "*umulsi3_highpart_nov6"
1594 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1595 (truncate:SI
1596 (lshiftrt:DI
1597 (mult:DI
1598 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1599 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1600 (const_int 32))))
1601 (clobber (match_scratch:SI 3 "=&r,&r"))]
1602 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1603 "umull%?\\t%3, %0, %2, %1"
1604 [(set_attr "insn" "umull")
1605 (set_attr "predicable" "yes")]
1606 )
1607
1608 (define_insn "*umulsi3_highpart_v6"
1609 [(set (match_operand:SI 0 "s_register_operand" "=r")
1610 (truncate:SI
1611 (lshiftrt:DI
1612 (mult:DI
1613 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1614 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1615 (const_int 32))))
1616 (clobber (match_scratch:SI 3 "=r"))]
1617 "TARGET_32BIT && arm_arch6"
1618 "umull%?\\t%3, %0, %2, %1"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1621 )
1622
1623 (define_insn "mulhisi3"
1624 [(set (match_operand:SI 0 "s_register_operand" "=r")
1625 (mult:SI (sign_extend:SI
1626 (match_operand:HI 1 "s_register_operand" "%r"))
1627 (sign_extend:SI
1628 (match_operand:HI 2 "s_register_operand" "r"))))]
1629 "TARGET_DSP_MULTIPLY"
1630 "smulbb%?\\t%0, %1, %2"
1631 [(set_attr "insn" "smulxy")
1632 (set_attr "predicable" "yes")]
1633 )
1634
1635 (define_insn "*mulhisi3tb"
1636 [(set (match_operand:SI 0 "s_register_operand" "=r")
1637 (mult:SI (ashiftrt:SI
1638 (match_operand:SI 1 "s_register_operand" "r")
1639 (const_int 16))
1640 (sign_extend:SI
1641 (match_operand:HI 2 "s_register_operand" "r"))))]
1642 "TARGET_DSP_MULTIPLY"
1643 "smultb%?\\t%0, %1, %2"
1644 [(set_attr "insn" "smulxy")
1645 (set_attr "predicable" "yes")]
1646 )
1647
1648 (define_insn "*mulhisi3bt"
1649 [(set (match_operand:SI 0 "s_register_operand" "=r")
1650 (mult:SI (sign_extend:SI
1651 (match_operand:HI 1 "s_register_operand" "r"))
1652 (ashiftrt:SI
1653 (match_operand:SI 2 "s_register_operand" "r")
1654 (const_int 16))))]
1655 "TARGET_DSP_MULTIPLY"
1656 "smulbt%?\\t%0, %1, %2"
1657 [(set_attr "insn" "smulxy")
1658 (set_attr "predicable" "yes")]
1659 )
1660
1661 (define_insn "*mulhisi3tt"
1662 [(set (match_operand:SI 0 "s_register_operand" "=r")
1663 (mult:SI (ashiftrt:SI
1664 (match_operand:SI 1 "s_register_operand" "r")
1665 (const_int 16))
1666 (ashiftrt:SI
1667 (match_operand:SI 2 "s_register_operand" "r")
1668 (const_int 16))))]
1669 "TARGET_DSP_MULTIPLY"
1670 "smultt%?\\t%0, %1, %2"
1671 [(set_attr "insn" "smulxy")
1672 (set_attr "predicable" "yes")]
1673 )
1674
1675 (define_insn "*mulhisi3addsi"
1676 [(set (match_operand:SI 0 "s_register_operand" "=r")
1677 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1678 (mult:SI (sign_extend:SI
1679 (match_operand:HI 2 "s_register_operand" "%r"))
1680 (sign_extend:SI
1681 (match_operand:HI 3 "s_register_operand" "r")))))]
1682 "TARGET_DSP_MULTIPLY"
1683 "smlabb%?\\t%0, %2, %3, %1"
1684 [(set_attr "insn" "smlaxy")
1685 (set_attr "predicable" "yes")]
1686 )
1687
1688 (define_insn "*mulhidi3adddi"
1689 [(set (match_operand:DI 0 "s_register_operand" "=r")
1690 (plus:DI
1691 (match_operand:DI 1 "s_register_operand" "0")
1692 (mult:DI (sign_extend:DI
1693 (match_operand:HI 2 "s_register_operand" "%r"))
1694 (sign_extend:DI
1695 (match_operand:HI 3 "s_register_operand" "r")))))]
1696 "TARGET_DSP_MULTIPLY"
1697 "smlalbb%?\\t%Q0, %R0, %2, %3"
1698 [(set_attr "insn" "smlalxy")
1699 (set_attr "predicable" "yes")])
1700
1701 (define_expand "mulsf3"
1702 [(set (match_operand:SF 0 "s_register_operand" "")
1703 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1704 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1705 "TARGET_32BIT && TARGET_HARD_FLOAT"
1706 "
1707 if (TARGET_MAVERICK
1708 && !cirrus_fp_register (operands[2], SFmode))
1709 operands[2] = force_reg (SFmode, operands[2]);
1710 ")
1711
1712 (define_expand "muldf3"
1713 [(set (match_operand:DF 0 "s_register_operand" "")
1714 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1715 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1716 "TARGET_32BIT && TARGET_HARD_FLOAT"
1717 "
1718 if (TARGET_MAVERICK
1719 && !cirrus_fp_register (operands[2], DFmode))
1720 operands[2] = force_reg (DFmode, operands[2]);
1721 ")
1722 \f
1723 ;; Division insns
1724
1725 (define_expand "divsf3"
1726 [(set (match_operand:SF 0 "s_register_operand" "")
1727 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1728 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1729 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1730 "")
1731
1732 (define_expand "divdf3"
1733 [(set (match_operand:DF 0 "s_register_operand" "")
1734 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1735 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1736 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1737 "")
1738 \f
1739 ;; Modulo insns
1740
1741 (define_expand "modsf3"
1742 [(set (match_operand:SF 0 "s_register_operand" "")
1743 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1744 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1745 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1746 "")
1747
1748 (define_expand "moddf3"
1749 [(set (match_operand:DF 0 "s_register_operand" "")
1750 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1751 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1752 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1753 "")
1754 \f
1755 ;; Boolean and,ior,xor insns
1756
1757 ;; Split up double word logical operations
1758
1759 ;; Split up simple DImode logical operations. Simply perform the logical
1760 ;; operation on the upper and lower halves of the registers.
1761 (define_split
1762 [(set (match_operand:DI 0 "s_register_operand" "")
1763 (match_operator:DI 6 "logical_binary_operator"
1764 [(match_operand:DI 1 "s_register_operand" "")
1765 (match_operand:DI 2 "s_register_operand" "")]))]
1766 "TARGET_32BIT && reload_completed
1767 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1768 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1769 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1770 "
1771 {
1772 operands[3] = gen_highpart (SImode, operands[0]);
1773 operands[0] = gen_lowpart (SImode, operands[0]);
1774 operands[4] = gen_highpart (SImode, operands[1]);
1775 operands[1] = gen_lowpart (SImode, operands[1]);
1776 operands[5] = gen_highpart (SImode, operands[2]);
1777 operands[2] = gen_lowpart (SImode, operands[2]);
1778 }"
1779 )
1780
1781 (define_split
1782 [(set (match_operand:DI 0 "s_register_operand" "")
1783 (match_operator:DI 6 "logical_binary_operator"
1784 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1785 (match_operand:DI 1 "s_register_operand" "")]))]
1786 "TARGET_32BIT && reload_completed"
1787 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1788 (set (match_dup 3) (match_op_dup:SI 6
1789 [(ashiftrt:SI (match_dup 2) (const_int 31))
1790 (match_dup 4)]))]
1791 "
1792 {
1793 operands[3] = gen_highpart (SImode, operands[0]);
1794 operands[0] = gen_lowpart (SImode, operands[0]);
1795 operands[4] = gen_highpart (SImode, operands[1]);
1796 operands[1] = gen_lowpart (SImode, operands[1]);
1797 operands[5] = gen_highpart (SImode, operands[2]);
1798 operands[2] = gen_lowpart (SImode, operands[2]);
1799 }"
1800 )
1801
1802 ;; The zero extend of operand 2 means we can just copy the high part of
1803 ;; operand1 into operand0.
1804 (define_split
1805 [(set (match_operand:DI 0 "s_register_operand" "")
1806 (ior:DI
1807 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1808 (match_operand:DI 1 "s_register_operand" "")))]
1809 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1810 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1811 (set (match_dup 3) (match_dup 4))]
1812 "
1813 {
1814 operands[4] = gen_highpart (SImode, operands[1]);
1815 operands[3] = gen_highpart (SImode, operands[0]);
1816 operands[0] = gen_lowpart (SImode, operands[0]);
1817 operands[1] = gen_lowpart (SImode, operands[1]);
1818 }"
1819 )
1820
1821 ;; The zero extend of operand 2 means we can just copy the high part of
1822 ;; operand1 into operand0.
1823 (define_split
1824 [(set (match_operand:DI 0 "s_register_operand" "")
1825 (xor:DI
1826 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1827 (match_operand:DI 1 "s_register_operand" "")))]
1828 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1829 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1830 (set (match_dup 3) (match_dup 4))]
1831 "
1832 {
1833 operands[4] = gen_highpart (SImode, operands[1]);
1834 operands[3] = gen_highpart (SImode, operands[0]);
1835 operands[0] = gen_lowpart (SImode, operands[0]);
1836 operands[1] = gen_lowpart (SImode, operands[1]);
1837 }"
1838 )
1839
1840 (define_insn "anddi3"
1841 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1842 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1843 (match_operand:DI 2 "s_register_operand" "r,r")))]
1844 "TARGET_32BIT && ! TARGET_IWMMXT"
1845 "#"
1846 [(set_attr "length" "8")]
1847 )
1848
1849 (define_insn_and_split "*anddi_zesidi_di"
1850 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1851 (and:DI (zero_extend:DI
1852 (match_operand:SI 2 "s_register_operand" "r,r"))
1853 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1854 "TARGET_32BIT"
1855 "#"
1856 "TARGET_32BIT && reload_completed"
1857 ; The zero extend of operand 2 clears the high word of the output
1858 ; operand.
1859 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1860 (set (match_dup 3) (const_int 0))]
1861 "
1862 {
1863 operands[3] = gen_highpart (SImode, operands[0]);
1864 operands[0] = gen_lowpart (SImode, operands[0]);
1865 operands[1] = gen_lowpart (SImode, operands[1]);
1866 }"
1867 [(set_attr "length" "8")]
1868 )
1869
1870 (define_insn "*anddi_sesdi_di"
1871 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1872 (and:DI (sign_extend:DI
1873 (match_operand:SI 2 "s_register_operand" "r,r"))
1874 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1875 "TARGET_32BIT"
1876 "#"
1877 [(set_attr "length" "8")]
1878 )
1879
1880 (define_expand "andsi3"
1881 [(set (match_operand:SI 0 "s_register_operand" "")
1882 (and:SI (match_operand:SI 1 "s_register_operand" "")
1883 (match_operand:SI 2 "reg_or_int_operand" "")))]
1884 "TARGET_EITHER"
1885 "
1886 if (TARGET_32BIT)
1887 {
1888 if (GET_CODE (operands[2]) == CONST_INT)
1889 {
1890 arm_split_constant (AND, SImode, NULL_RTX,
1891 INTVAL (operands[2]), operands[0],
1892 operands[1], optimize && can_create_pseudo_p ());
1893
1894 DONE;
1895 }
1896 }
1897 else /* TARGET_THUMB1 */
1898 {
1899 if (GET_CODE (operands[2]) != CONST_INT)
1900 operands[2] = force_reg (SImode, operands[2]);
1901 else
1902 {
1903 int i;
1904
1905 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1906 {
1907 operands[2] = force_reg (SImode,
1908 GEN_INT (~INTVAL (operands[2])));
1909
1910 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1911
1912 DONE;
1913 }
1914
1915 for (i = 9; i <= 31; i++)
1916 {
1917 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1918 {
1919 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1920 const0_rtx));
1921 DONE;
1922 }
1923 else if ((((HOST_WIDE_INT) 1) << i) - 1
1924 == ~INTVAL (operands[2]))
1925 {
1926 rtx shift = GEN_INT (i);
1927 rtx reg = gen_reg_rtx (SImode);
1928
1929 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1930 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1931
1932 DONE;
1933 }
1934 }
1935
1936 operands[2] = force_reg (SImode, operands[2]);
1937 }
1938 }
1939 "
1940 )
1941
1942 ; ??? Check split length for Thumb-2
1943 (define_insn_and_split "*arm_andsi3_insn"
1944 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1945 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1946 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1947 "TARGET_32BIT"
1948 "@
1949 and%?\\t%0, %1, %2
1950 bic%?\\t%0, %1, #%B2
1951 #"
1952 "TARGET_32BIT
1953 && GET_CODE (operands[2]) == CONST_INT
1954 && !(const_ok_for_arm (INTVAL (operands[2]))
1955 || const_ok_for_arm (~INTVAL (operands[2])))"
1956 [(clobber (const_int 0))]
1957 "
1958 arm_split_constant (AND, SImode, curr_insn,
1959 INTVAL (operands[2]), operands[0], operands[1], 0);
1960 DONE;
1961 "
1962 [(set_attr "length" "4,4,16")
1963 (set_attr "predicable" "yes")]
1964 )
1965
1966 (define_insn "*thumb1_andsi3_insn"
1967 [(set (match_operand:SI 0 "register_operand" "=l")
1968 (and:SI (match_operand:SI 1 "register_operand" "%0")
1969 (match_operand:SI 2 "register_operand" "l")))]
1970 "TARGET_THUMB1"
1971 "and\\t%0, %0, %2"
1972 [(set_attr "length" "2")]
1973 )
1974
1975 (define_insn "*andsi3_compare0"
1976 [(set (reg:CC_NOOV CC_REGNUM)
1977 (compare:CC_NOOV
1978 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1979 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1980 (const_int 0)))
1981 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1982 (and:SI (match_dup 1) (match_dup 2)))]
1983 "TARGET_32BIT"
1984 "@
1985 and%.\\t%0, %1, %2
1986 bic%.\\t%0, %1, #%B2"
1987 [(set_attr "conds" "set")]
1988 )
1989
1990 (define_insn "*andsi3_compare0_scratch"
1991 [(set (reg:CC_NOOV CC_REGNUM)
1992 (compare:CC_NOOV
1993 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1994 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1995 (const_int 0)))
1996 (clobber (match_scratch:SI 2 "=X,r"))]
1997 "TARGET_32BIT"
1998 "@
1999 tst%?\\t%0, %1
2000 bic%.\\t%2, %0, #%B1"
2001 [(set_attr "conds" "set")]
2002 )
2003
2004 (define_insn "*zeroextractsi_compare0_scratch"
2005 [(set (reg:CC_NOOV CC_REGNUM)
2006 (compare:CC_NOOV (zero_extract:SI
2007 (match_operand:SI 0 "s_register_operand" "r")
2008 (match_operand 1 "const_int_operand" "n")
2009 (match_operand 2 "const_int_operand" "n"))
2010 (const_int 0)))]
2011 "TARGET_32BIT
2012 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2013 && INTVAL (operands[1]) > 0
2014 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2015 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2016 "*
2017 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2018 << INTVAL (operands[2]));
2019 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2020 return \"\";
2021 "
2022 [(set_attr "conds" "set")]
2023 )
2024
2025 (define_insn_and_split "*ne_zeroextractsi"
2026 [(set (match_operand:SI 0 "s_register_operand" "=r")
2027 (ne:SI (zero_extract:SI
2028 (match_operand:SI 1 "s_register_operand" "r")
2029 (match_operand:SI 2 "const_int_operand" "n")
2030 (match_operand:SI 3 "const_int_operand" "n"))
2031 (const_int 0)))
2032 (clobber (reg:CC CC_REGNUM))]
2033 "TARGET_32BIT
2034 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2035 && INTVAL (operands[2]) > 0
2036 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2037 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2038 "#"
2039 "TARGET_32BIT
2040 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2041 && INTVAL (operands[2]) > 0
2042 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2043 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2044 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2045 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2046 (const_int 0)))
2047 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2048 (set (match_dup 0)
2049 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2050 (match_dup 0) (const_int 1)))]
2051 "
2052 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2053 << INTVAL (operands[3]));
2054 "
2055 [(set_attr "conds" "clob")
2056 (set (attr "length")
2057 (if_then_else (eq_attr "is_thumb" "yes")
2058 (const_int 12)
2059 (const_int 8)))]
2060 )
2061
2062 (define_insn_and_split "*ne_zeroextractsi_shifted"
2063 [(set (match_operand:SI 0 "s_register_operand" "=r")
2064 (ne:SI (zero_extract:SI
2065 (match_operand:SI 1 "s_register_operand" "r")
2066 (match_operand:SI 2 "const_int_operand" "n")
2067 (const_int 0))
2068 (const_int 0)))
2069 (clobber (reg:CC CC_REGNUM))]
2070 "TARGET_ARM"
2071 "#"
2072 "TARGET_ARM"
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2075 (const_int 0)))
2076 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2077 (set (match_dup 0)
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2080 "
2081 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2082 "
2083 [(set_attr "conds" "clob")
2084 (set_attr "length" "8")]
2085 )
2086
2087 (define_insn_and_split "*ite_ne_zeroextractsi"
2088 [(set (match_operand:SI 0 "s_register_operand" "=r")
2089 (if_then_else:SI (ne (zero_extract:SI
2090 (match_operand:SI 1 "s_register_operand" "r")
2091 (match_operand:SI 2 "const_int_operand" "n")
2092 (match_operand:SI 3 "const_int_operand" "n"))
2093 (const_int 0))
2094 (match_operand:SI 4 "arm_not_operand" "rIK")
2095 (const_int 0)))
2096 (clobber (reg:CC CC_REGNUM))]
2097 "TARGET_ARM
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2102 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2103 "#"
2104 "TARGET_ARM
2105 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2106 && INTVAL (operands[2]) > 0
2107 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2108 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2109 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2110 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2111 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2112 (const_int 0)))
2113 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2114 (set (match_dup 0)
2115 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2116 (match_dup 0) (match_dup 4)))]
2117 "
2118 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2119 << INTVAL (operands[3]));
2120 "
2121 [(set_attr "conds" "clob")
2122 (set_attr "length" "8")]
2123 )
2124
2125 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2126 [(set (match_operand:SI 0 "s_register_operand" "=r")
2127 (if_then_else:SI (ne (zero_extract:SI
2128 (match_operand:SI 1 "s_register_operand" "r")
2129 (match_operand:SI 2 "const_int_operand" "n")
2130 (const_int 0))
2131 (const_int 0))
2132 (match_operand:SI 3 "arm_not_operand" "rIK")
2133 (const_int 0)))
2134 (clobber (reg:CC CC_REGNUM))]
2135 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2136 "#"
2137 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2138 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2139 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2140 (const_int 0)))
2141 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2142 (set (match_dup 0)
2143 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2144 (match_dup 0) (match_dup 3)))]
2145 "
2146 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2147 "
2148 [(set_attr "conds" "clob")
2149 (set_attr "length" "8")]
2150 )
2151
2152 (define_split
2153 [(set (match_operand:SI 0 "s_register_operand" "")
2154 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2155 (match_operand:SI 2 "const_int_operand" "")
2156 (match_operand:SI 3 "const_int_operand" "")))
2157 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2158 "TARGET_THUMB1"
2159 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2160 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2161 "{
2162 HOST_WIDE_INT temp = INTVAL (operands[2]);
2163
2164 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2165 operands[3] = GEN_INT (32 - temp);
2166 }"
2167 )
2168
2169 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2170 (define_split
2171 [(set (match_operand:SI 0 "s_register_operand" "")
2172 (match_operator:SI 1 "shiftable_operator"
2173 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2174 (match_operand:SI 3 "const_int_operand" "")
2175 (match_operand:SI 4 "const_int_operand" ""))
2176 (match_operand:SI 5 "s_register_operand" "")]))
2177 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2178 "TARGET_ARM"
2179 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2180 (set (match_dup 0)
2181 (match_op_dup 1
2182 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2183 (match_dup 5)]))]
2184 "{
2185 HOST_WIDE_INT temp = INTVAL (operands[3]);
2186
2187 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2188 operands[4] = GEN_INT (32 - temp);
2189 }"
2190 )
2191
2192 (define_split
2193 [(set (match_operand:SI 0 "s_register_operand" "")
2194 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2195 (match_operand:SI 2 "const_int_operand" "")
2196 (match_operand:SI 3 "const_int_operand" "")))]
2197 "TARGET_THUMB1"
2198 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2199 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2200 "{
2201 HOST_WIDE_INT temp = INTVAL (operands[2]);
2202
2203 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2204 operands[3] = GEN_INT (32 - temp);
2205 }"
2206 )
2207
2208 (define_split
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (match_operator:SI 1 "shiftable_operator"
2211 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")
2213 (match_operand:SI 4 "const_int_operand" ""))
2214 (match_operand:SI 5 "s_register_operand" "")]))
2215 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2216 "TARGET_ARM"
2217 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2218 (set (match_dup 0)
2219 (match_op_dup 1
2220 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2221 (match_dup 5)]))]
2222 "{
2223 HOST_WIDE_INT temp = INTVAL (operands[3]);
2224
2225 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2226 operands[4] = GEN_INT (32 - temp);
2227 }"
2228 )
2229
2230 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2231 ;;; represented by the bitfield, then this will produce incorrect results.
2232 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2233 ;;; which have a real bit-field insert instruction, the truncation happens
2234 ;;; in the bit-field insert instruction itself. Since arm does not have a
2235 ;;; bit-field insert instruction, we would have to emit code here to truncate
2236 ;;; the value before we insert. This loses some of the advantage of having
2237 ;;; this insv pattern, so this pattern needs to be reevalutated.
2238
2239 (define_expand "insv"
2240 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2241 (match_operand:SI 1 "general_operand" "")
2242 (match_operand:SI 2 "general_operand" ""))
2243 (match_operand:SI 3 "reg_or_int_operand" ""))]
2244 "TARGET_ARM || arm_arch_thumb2"
2245 "
2246 {
2247 int start_bit = INTVAL (operands[2]);
2248 int width = INTVAL (operands[1]);
2249 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2250 rtx target, subtarget;
2251
2252 if (arm_arch_thumb2)
2253 {
2254 bool use_bfi = TRUE;
2255
2256 if (GET_CODE (operands[3]) == CONST_INT)
2257 {
2258 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2259
2260 if (val == 0)
2261 {
2262 emit_insn (gen_insv_zero (operands[0], operands[1],
2263 operands[2]));
2264 DONE;
2265 }
2266
2267 /* See if the set can be done with a single orr instruction. */
2268 if (val == mask && const_ok_for_arm (val << start_bit))
2269 use_bfi = FALSE;
2270 }
2271
2272 if (use_bfi)
2273 {
2274 if (GET_CODE (operands[3]) != REG)
2275 operands[3] = force_reg (SImode, operands[3]);
2276
2277 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2278 operands[3]));
2279 DONE;
2280 }
2281 }
2282
2283 target = copy_rtx (operands[0]);
2284 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2285 subreg as the final target. */
2286 if (GET_CODE (target) == SUBREG)
2287 {
2288 subtarget = gen_reg_rtx (SImode);
2289 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2290 < GET_MODE_SIZE (SImode))
2291 target = SUBREG_REG (target);
2292 }
2293 else
2294 subtarget = target;
2295
2296 if (GET_CODE (operands[3]) == CONST_INT)
2297 {
2298 /* Since we are inserting a known constant, we may be able to
2299 reduce the number of bits that we have to clear so that
2300 the mask becomes simple. */
2301 /* ??? This code does not check to see if the new mask is actually
2302 simpler. It may not be. */
2303 rtx op1 = gen_reg_rtx (SImode);
2304 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2305 start of this pattern. */
2306 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2307 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2308
2309 emit_insn (gen_andsi3 (op1, operands[0],
2310 gen_int_mode (~mask2, SImode)));
2311 emit_insn (gen_iorsi3 (subtarget, op1,
2312 gen_int_mode (op3_value << start_bit, SImode)));
2313 }
2314 else if (start_bit == 0
2315 && !(const_ok_for_arm (mask)
2316 || const_ok_for_arm (~mask)))
2317 {
2318 /* A Trick, since we are setting the bottom bits in the word,
2319 we can shift operand[3] up, operand[0] down, OR them together
2320 and rotate the result back again. This takes 3 insns, and
2321 the third might be mergeable into another op. */
2322 /* The shift up copes with the possibility that operand[3] is
2323 wider than the bitfield. */
2324 rtx op0 = gen_reg_rtx (SImode);
2325 rtx op1 = gen_reg_rtx (SImode);
2326
2327 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2328 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2329 emit_insn (gen_iorsi3 (op1, op1, op0));
2330 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2331 }
2332 else if ((width + start_bit == 32)
2333 && !(const_ok_for_arm (mask)
2334 || const_ok_for_arm (~mask)))
2335 {
2336 /* Similar trick, but slightly less efficient. */
2337
2338 rtx op0 = gen_reg_rtx (SImode);
2339 rtx op1 = gen_reg_rtx (SImode);
2340
2341 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2342 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2343 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2344 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2345 }
2346 else
2347 {
2348 rtx op0 = gen_int_mode (mask, SImode);
2349 rtx op1 = gen_reg_rtx (SImode);
2350 rtx op2 = gen_reg_rtx (SImode);
2351
2352 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2353 {
2354 rtx tmp = gen_reg_rtx (SImode);
2355
2356 emit_insn (gen_movsi (tmp, op0));
2357 op0 = tmp;
2358 }
2359
2360 /* Mask out any bits in operand[3] that are not needed. */
2361 emit_insn (gen_andsi3 (op1, operands[3], op0));
2362
2363 if (GET_CODE (op0) == CONST_INT
2364 && (const_ok_for_arm (mask << start_bit)
2365 || const_ok_for_arm (~(mask << start_bit))))
2366 {
2367 op0 = gen_int_mode (~(mask << start_bit), SImode);
2368 emit_insn (gen_andsi3 (op2, operands[0], op0));
2369 }
2370 else
2371 {
2372 if (GET_CODE (op0) == CONST_INT)
2373 {
2374 rtx tmp = gen_reg_rtx (SImode);
2375
2376 emit_insn (gen_movsi (tmp, op0));
2377 op0 = tmp;
2378 }
2379
2380 if (start_bit != 0)
2381 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2382
2383 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2384 }
2385
2386 if (start_bit != 0)
2387 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2388
2389 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2390 }
2391
2392 if (subtarget != target)
2393 {
2394 /* If TARGET is still a SUBREG, then it must be wider than a word,
2395 so we must be careful only to set the subword we were asked to. */
2396 if (GET_CODE (target) == SUBREG)
2397 emit_move_insn (target, subtarget);
2398 else
2399 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2400 }
2401
2402 DONE;
2403 }"
2404 )
2405
2406 (define_insn "insv_zero"
2407 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2408 (match_operand:SI 1 "const_int_operand" "M")
2409 (match_operand:SI 2 "const_int_operand" "M"))
2410 (const_int 0))]
2411 "arm_arch_thumb2"
2412 "bfc%?\t%0, %2, %1"
2413 [(set_attr "length" "4")
2414 (set_attr "predicable" "yes")]
2415 )
2416
2417 (define_insn "insv_t2"
2418 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2419 (match_operand:SI 1 "const_int_operand" "M")
2420 (match_operand:SI 2 "const_int_operand" "M"))
2421 (match_operand:SI 3 "s_register_operand" "r"))]
2422 "arm_arch_thumb2"
2423 "bfi%?\t%0, %3, %2, %1"
2424 [(set_attr "length" "4")
2425 (set_attr "predicable" "yes")]
2426 )
2427
2428 ; constants for op 2 will never be given to these patterns.
2429 (define_insn_and_split "*anddi_notdi_di"
2430 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2431 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2432 (match_operand:DI 2 "s_register_operand" "0,r")))]
2433 "TARGET_32BIT"
2434 "#"
2435 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2436 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2437 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2438 "
2439 {
2440 operands[3] = gen_highpart (SImode, operands[0]);
2441 operands[0] = gen_lowpart (SImode, operands[0]);
2442 operands[4] = gen_highpart (SImode, operands[1]);
2443 operands[1] = gen_lowpart (SImode, operands[1]);
2444 operands[5] = gen_highpart (SImode, operands[2]);
2445 operands[2] = gen_lowpart (SImode, operands[2]);
2446 }"
2447 [(set_attr "length" "8")
2448 (set_attr "predicable" "yes")]
2449 )
2450
2451 (define_insn_and_split "*anddi_notzesidi_di"
2452 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2453 (and:DI (not:DI (zero_extend:DI
2454 (match_operand:SI 2 "s_register_operand" "r,r")))
2455 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2456 "TARGET_32BIT"
2457 "@
2458 bic%?\\t%Q0, %Q1, %2
2459 #"
2460 ; (not (zero_extend ...)) allows us to just copy the high word from
2461 ; operand1 to operand0.
2462 "TARGET_32BIT
2463 && reload_completed
2464 && operands[0] != operands[1]"
2465 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2466 (set (match_dup 3) (match_dup 4))]
2467 "
2468 {
2469 operands[3] = gen_highpart (SImode, operands[0]);
2470 operands[0] = gen_lowpart (SImode, operands[0]);
2471 operands[4] = gen_highpart (SImode, operands[1]);
2472 operands[1] = gen_lowpart (SImode, operands[1]);
2473 }"
2474 [(set_attr "length" "4,8")
2475 (set_attr "predicable" "yes")]
2476 )
2477
2478 (define_insn_and_split "*anddi_notsesidi_di"
2479 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2480 (and:DI (not:DI (sign_extend:DI
2481 (match_operand:SI 2 "s_register_operand" "r,r")))
2482 (match_operand:DI 1 "s_register_operand" "0,r")))]
2483 "TARGET_32BIT"
2484 "#"
2485 "TARGET_32BIT && reload_completed"
2486 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2487 (set (match_dup 3) (and:SI (not:SI
2488 (ashiftrt:SI (match_dup 2) (const_int 31)))
2489 (match_dup 4)))]
2490 "
2491 {
2492 operands[3] = gen_highpart (SImode, operands[0]);
2493 operands[0] = gen_lowpart (SImode, operands[0]);
2494 operands[4] = gen_highpart (SImode, operands[1]);
2495 operands[1] = gen_lowpart (SImode, operands[1]);
2496 }"
2497 [(set_attr "length" "8")
2498 (set_attr "predicable" "yes")]
2499 )
2500
2501 (define_insn "andsi_notsi_si"
2502 [(set (match_operand:SI 0 "s_register_operand" "=r")
2503 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2504 (match_operand:SI 1 "s_register_operand" "r")))]
2505 "TARGET_32BIT"
2506 "bic%?\\t%0, %1, %2"
2507 [(set_attr "predicable" "yes")]
2508 )
2509
2510 (define_insn "bicsi3"
2511 [(set (match_operand:SI 0 "register_operand" "=l")
2512 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2513 (match_operand:SI 2 "register_operand" "0")))]
2514 "TARGET_THUMB1"
2515 "bic\\t%0, %0, %1"
2516 [(set_attr "length" "2")]
2517 )
2518
2519 (define_insn "andsi_not_shiftsi_si"
2520 [(set (match_operand:SI 0 "s_register_operand" "=r")
2521 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2522 [(match_operand:SI 2 "s_register_operand" "r")
2523 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2524 (match_operand:SI 1 "s_register_operand" "r")))]
2525 "TARGET_ARM"
2526 "bic%?\\t%0, %1, %2%S4"
2527 [(set_attr "predicable" "yes")
2528 (set_attr "shift" "2")
2529 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2530 (const_string "alu_shift")
2531 (const_string "alu_shift_reg")))]
2532 )
2533
2534 (define_insn "*andsi_notsi_si_compare0"
2535 [(set (reg:CC_NOOV CC_REGNUM)
2536 (compare:CC_NOOV
2537 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2538 (match_operand:SI 1 "s_register_operand" "r"))
2539 (const_int 0)))
2540 (set (match_operand:SI 0 "s_register_operand" "=r")
2541 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2542 "TARGET_32BIT"
2543 "bic%.\\t%0, %1, %2"
2544 [(set_attr "conds" "set")]
2545 )
2546
2547 (define_insn "*andsi_notsi_si_compare0_scratch"
2548 [(set (reg:CC_NOOV CC_REGNUM)
2549 (compare:CC_NOOV
2550 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2551 (match_operand:SI 1 "s_register_operand" "r"))
2552 (const_int 0)))
2553 (clobber (match_scratch:SI 0 "=r"))]
2554 "TARGET_32BIT"
2555 "bic%.\\t%0, %1, %2"
2556 [(set_attr "conds" "set")]
2557 )
2558
2559 (define_insn "iordi3"
2560 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2561 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2562 (match_operand:DI 2 "s_register_operand" "r,r")))]
2563 "TARGET_32BIT && ! TARGET_IWMMXT"
2564 "#"
2565 [(set_attr "length" "8")
2566 (set_attr "predicable" "yes")]
2567 )
2568
2569 (define_insn "*iordi_zesidi_di"
2570 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2571 (ior:DI (zero_extend:DI
2572 (match_operand:SI 2 "s_register_operand" "r,r"))
2573 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2574 "TARGET_32BIT"
2575 "@
2576 orr%?\\t%Q0, %Q1, %2
2577 #"
2578 [(set_attr "length" "4,8")
2579 (set_attr "predicable" "yes")]
2580 )
2581
2582 (define_insn "*iordi_sesidi_di"
2583 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2584 (ior:DI (sign_extend:DI
2585 (match_operand:SI 2 "s_register_operand" "r,r"))
2586 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2587 "TARGET_32BIT"
2588 "#"
2589 [(set_attr "length" "8")
2590 (set_attr "predicable" "yes")]
2591 )
2592
2593 (define_expand "iorsi3"
2594 [(set (match_operand:SI 0 "s_register_operand" "")
2595 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2596 (match_operand:SI 2 "reg_or_int_operand" "")))]
2597 "TARGET_EITHER"
2598 "
2599 if (GET_CODE (operands[2]) == CONST_INT)
2600 {
2601 if (TARGET_32BIT)
2602 {
2603 arm_split_constant (IOR, SImode, NULL_RTX,
2604 INTVAL (operands[2]), operands[0], operands[1],
2605 optimize && can_create_pseudo_p ());
2606 DONE;
2607 }
2608 else /* TARGET_THUMB1 */
2609 operands [2] = force_reg (SImode, operands [2]);
2610 }
2611 "
2612 )
2613
2614 (define_insn_and_split "*arm_iorsi3"
2615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2616 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2617 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2618 "TARGET_32BIT"
2619 "@
2620 orr%?\\t%0, %1, %2
2621 #"
2622 "TARGET_32BIT
2623 && GET_CODE (operands[2]) == CONST_INT
2624 && !const_ok_for_arm (INTVAL (operands[2]))"
2625 [(clobber (const_int 0))]
2626 "
2627 arm_split_constant (IOR, SImode, curr_insn,
2628 INTVAL (operands[2]), operands[0], operands[1], 0);
2629 DONE;
2630 "
2631 [(set_attr "length" "4,16")
2632 (set_attr "predicable" "yes")]
2633 )
2634
2635 (define_insn "*thumb1_iorsi3"
2636 [(set (match_operand:SI 0 "register_operand" "=l")
2637 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2638 (match_operand:SI 2 "register_operand" "l")))]
2639 "TARGET_THUMB1"
2640 "orr\\t%0, %0, %2"
2641 [(set_attr "length" "2")]
2642 )
2643
2644 (define_peephole2
2645 [(match_scratch:SI 3 "r")
2646 (set (match_operand:SI 0 "arm_general_register_operand" "")
2647 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2648 (match_operand:SI 2 "const_int_operand" "")))]
2649 "TARGET_32BIT
2650 && !const_ok_for_arm (INTVAL (operands[2]))
2651 && const_ok_for_arm (~INTVAL (operands[2]))"
2652 [(set (match_dup 3) (match_dup 2))
2653 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2654 ""
2655 )
2656
2657 (define_insn "*iorsi3_compare0"
2658 [(set (reg:CC_NOOV CC_REGNUM)
2659 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2660 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2661 (const_int 0)))
2662 (set (match_operand:SI 0 "s_register_operand" "=r")
2663 (ior:SI (match_dup 1) (match_dup 2)))]
2664 "TARGET_32BIT"
2665 "orr%.\\t%0, %1, %2"
2666 [(set_attr "conds" "set")]
2667 )
2668
2669 (define_insn "*iorsi3_compare0_scratch"
2670 [(set (reg:CC_NOOV CC_REGNUM)
2671 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2672 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2673 (const_int 0)))
2674 (clobber (match_scratch:SI 0 "=r"))]
2675 "TARGET_32BIT"
2676 "orr%.\\t%0, %1, %2"
2677 [(set_attr "conds" "set")]
2678 )
2679
2680 (define_insn "xordi3"
2681 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2682 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2683 (match_operand:DI 2 "s_register_operand" "r,r")))]
2684 "TARGET_32BIT && !TARGET_IWMMXT"
2685 "#"
2686 [(set_attr "length" "8")
2687 (set_attr "predicable" "yes")]
2688 )
2689
2690 (define_insn "*xordi_zesidi_di"
2691 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2692 (xor:DI (zero_extend:DI
2693 (match_operand:SI 2 "s_register_operand" "r,r"))
2694 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2695 "TARGET_32BIT"
2696 "@
2697 eor%?\\t%Q0, %Q1, %2
2698 #"
2699 [(set_attr "length" "4,8")
2700 (set_attr "predicable" "yes")]
2701 )
2702
2703 (define_insn "*xordi_sesidi_di"
2704 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2705 (xor:DI (sign_extend:DI
2706 (match_operand:SI 2 "s_register_operand" "r,r"))
2707 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2708 "TARGET_32BIT"
2709 "#"
2710 [(set_attr "length" "8")
2711 (set_attr "predicable" "yes")]
2712 )
2713
2714 (define_expand "xorsi3"
2715 [(set (match_operand:SI 0 "s_register_operand" "")
2716 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2717 (match_operand:SI 2 "arm_rhs_operand" "")))]
2718 "TARGET_EITHER"
2719 "if (TARGET_THUMB1)
2720 if (GET_CODE (operands[2]) == CONST_INT)
2721 operands[2] = force_reg (SImode, operands[2]);
2722 "
2723 )
2724
2725 (define_insn "*arm_xorsi3"
2726 [(set (match_operand:SI 0 "s_register_operand" "=r")
2727 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2728 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2729 "TARGET_32BIT"
2730 "eor%?\\t%0, %1, %2"
2731 [(set_attr "predicable" "yes")]
2732 )
2733
2734 (define_insn "*thumb1_xorsi3"
2735 [(set (match_operand:SI 0 "register_operand" "=l")
2736 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2737 (match_operand:SI 2 "register_operand" "l")))]
2738 "TARGET_THUMB1"
2739 "eor\\t%0, %0, %2"
2740 [(set_attr "length" "2")]
2741 )
2742
2743 (define_insn "*xorsi3_compare0"
2744 [(set (reg:CC_NOOV CC_REGNUM)
2745 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2746 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2747 (const_int 0)))
2748 (set (match_operand:SI 0 "s_register_operand" "=r")
2749 (xor:SI (match_dup 1) (match_dup 2)))]
2750 "TARGET_32BIT"
2751 "eor%.\\t%0, %1, %2"
2752 [(set_attr "conds" "set")]
2753 )
2754
2755 (define_insn "*xorsi3_compare0_scratch"
2756 [(set (reg:CC_NOOV CC_REGNUM)
2757 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2758 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2759 (const_int 0)))]
2760 "TARGET_32BIT"
2761 "teq%?\\t%0, %1"
2762 [(set_attr "conds" "set")]
2763 )
2764
2765 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2766 ; (NOT D) we can sometimes merge the final NOT into one of the following
2767 ; insns.
2768
2769 (define_split
2770 [(set (match_operand:SI 0 "s_register_operand" "")
2771 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2772 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2773 (match_operand:SI 3 "arm_rhs_operand" "")))
2774 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2775 "TARGET_32BIT"
2776 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2777 (not:SI (match_dup 3))))
2778 (set (match_dup 0) (not:SI (match_dup 4)))]
2779 ""
2780 )
2781
2782 (define_insn "*andsi_iorsi3_notsi"
2783 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2784 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2785 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2786 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2787 "TARGET_32BIT"
2788 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2789 [(set_attr "length" "8")
2790 (set_attr "ce_count" "2")
2791 (set_attr "predicable" "yes")]
2792 )
2793
2794 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2795 ; insns are available?
2796 (define_split
2797 [(set (match_operand:SI 0 "s_register_operand" "")
2798 (match_operator:SI 1 "logical_binary_operator"
2799 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2800 (match_operand:SI 3 "const_int_operand" "")
2801 (match_operand:SI 4 "const_int_operand" ""))
2802 (match_operator:SI 9 "logical_binary_operator"
2803 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2804 (match_operand:SI 6 "const_int_operand" ""))
2805 (match_operand:SI 7 "s_register_operand" "")])]))
2806 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2807 "TARGET_32BIT
2808 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2809 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2810 [(set (match_dup 8)
2811 (match_op_dup 1
2812 [(ashift:SI (match_dup 2) (match_dup 4))
2813 (match_dup 5)]))
2814 (set (match_dup 0)
2815 (match_op_dup 1
2816 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2817 (match_dup 7)]))]
2818 "
2819 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2820 ")
2821
2822 (define_split
2823 [(set (match_operand:SI 0 "s_register_operand" "")
2824 (match_operator:SI 1 "logical_binary_operator"
2825 [(match_operator:SI 9 "logical_binary_operator"
2826 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2827 (match_operand:SI 6 "const_int_operand" ""))
2828 (match_operand:SI 7 "s_register_operand" "")])
2829 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2830 (match_operand:SI 3 "const_int_operand" "")
2831 (match_operand:SI 4 "const_int_operand" ""))]))
2832 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2833 "TARGET_32BIT
2834 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2835 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2836 [(set (match_dup 8)
2837 (match_op_dup 1
2838 [(ashift:SI (match_dup 2) (match_dup 4))
2839 (match_dup 5)]))
2840 (set (match_dup 0)
2841 (match_op_dup 1
2842 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2843 (match_dup 7)]))]
2844 "
2845 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2846 ")
2847
2848 (define_split
2849 [(set (match_operand:SI 0 "s_register_operand" "")
2850 (match_operator:SI 1 "logical_binary_operator"
2851 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2852 (match_operand:SI 3 "const_int_operand" "")
2853 (match_operand:SI 4 "const_int_operand" ""))
2854 (match_operator:SI 9 "logical_binary_operator"
2855 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2856 (match_operand:SI 6 "const_int_operand" ""))
2857 (match_operand:SI 7 "s_register_operand" "")])]))
2858 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2859 "TARGET_32BIT
2860 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2861 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2862 [(set (match_dup 8)
2863 (match_op_dup 1
2864 [(ashift:SI (match_dup 2) (match_dup 4))
2865 (match_dup 5)]))
2866 (set (match_dup 0)
2867 (match_op_dup 1
2868 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2869 (match_dup 7)]))]
2870 "
2871 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2872 ")
2873
2874 (define_split
2875 [(set (match_operand:SI 0 "s_register_operand" "")
2876 (match_operator:SI 1 "logical_binary_operator"
2877 [(match_operator:SI 9 "logical_binary_operator"
2878 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2879 (match_operand:SI 6 "const_int_operand" ""))
2880 (match_operand:SI 7 "s_register_operand" "")])
2881 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2882 (match_operand:SI 3 "const_int_operand" "")
2883 (match_operand:SI 4 "const_int_operand" ""))]))
2884 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2885 "TARGET_32BIT
2886 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2887 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2888 [(set (match_dup 8)
2889 (match_op_dup 1
2890 [(ashift:SI (match_dup 2) (match_dup 4))
2891 (match_dup 5)]))
2892 (set (match_dup 0)
2893 (match_op_dup 1
2894 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2895 (match_dup 7)]))]
2896 "
2897 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2898 ")
2899 \f
2900
2901 ;; Minimum and maximum insns
2902
2903 (define_expand "smaxsi3"
2904 [(parallel [
2905 (set (match_operand:SI 0 "s_register_operand" "")
2906 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2907 (match_operand:SI 2 "arm_rhs_operand" "")))
2908 (clobber (reg:CC CC_REGNUM))])]
2909 "TARGET_32BIT"
2910 "
2911 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2912 {
2913 /* No need for a clobber of the condition code register here. */
2914 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2915 gen_rtx_SMAX (SImode, operands[1],
2916 operands[2])));
2917 DONE;
2918 }
2919 ")
2920
2921 (define_insn "*smax_0"
2922 [(set (match_operand:SI 0 "s_register_operand" "=r")
2923 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2924 (const_int 0)))]
2925 "TARGET_32BIT"
2926 "bic%?\\t%0, %1, %1, asr #31"
2927 [(set_attr "predicable" "yes")]
2928 )
2929
2930 (define_insn "*smax_m1"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r")
2932 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2933 (const_int -1)))]
2934 "TARGET_32BIT"
2935 "orr%?\\t%0, %1, %1, asr #31"
2936 [(set_attr "predicable" "yes")]
2937 )
2938
2939 (define_insn "*arm_smax_insn"
2940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2941 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2942 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2943 (clobber (reg:CC CC_REGNUM))]
2944 "TARGET_ARM"
2945 "@
2946 cmp\\t%1, %2\;movlt\\t%0, %2
2947 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2948 [(set_attr "conds" "clob")
2949 (set_attr "length" "8,12")]
2950 )
2951
2952 (define_expand "sminsi3"
2953 [(parallel [
2954 (set (match_operand:SI 0 "s_register_operand" "")
2955 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2956 (match_operand:SI 2 "arm_rhs_operand" "")))
2957 (clobber (reg:CC CC_REGNUM))])]
2958 "TARGET_32BIT"
2959 "
2960 if (operands[2] == const0_rtx)
2961 {
2962 /* No need for a clobber of the condition code register here. */
2963 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2964 gen_rtx_SMIN (SImode, operands[1],
2965 operands[2])));
2966 DONE;
2967 }
2968 ")
2969
2970 (define_insn "*smin_0"
2971 [(set (match_operand:SI 0 "s_register_operand" "=r")
2972 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2973 (const_int 0)))]
2974 "TARGET_32BIT"
2975 "and%?\\t%0, %1, %1, asr #31"
2976 [(set_attr "predicable" "yes")]
2977 )
2978
2979 (define_insn "*arm_smin_insn"
2980 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2981 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2982 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2983 (clobber (reg:CC CC_REGNUM))]
2984 "TARGET_ARM"
2985 "@
2986 cmp\\t%1, %2\;movge\\t%0, %2
2987 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2988 [(set_attr "conds" "clob")
2989 (set_attr "length" "8,12")]
2990 )
2991
2992 (define_expand "umaxsi3"
2993 [(parallel [
2994 (set (match_operand:SI 0 "s_register_operand" "")
2995 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2996 (match_operand:SI 2 "arm_rhs_operand" "")))
2997 (clobber (reg:CC CC_REGNUM))])]
2998 "TARGET_32BIT"
2999 ""
3000 )
3001
3002 (define_insn "*arm_umaxsi3"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3004 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3005 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3006 (clobber (reg:CC CC_REGNUM))]
3007 "TARGET_ARM"
3008 "@
3009 cmp\\t%1, %2\;movcc\\t%0, %2
3010 cmp\\t%1, %2\;movcs\\t%0, %1
3011 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3012 [(set_attr "conds" "clob")
3013 (set_attr "length" "8,8,12")]
3014 )
3015
3016 (define_expand "uminsi3"
3017 [(parallel [
3018 (set (match_operand:SI 0 "s_register_operand" "")
3019 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3020 (match_operand:SI 2 "arm_rhs_operand" "")))
3021 (clobber (reg:CC CC_REGNUM))])]
3022 "TARGET_32BIT"
3023 ""
3024 )
3025
3026 (define_insn "*arm_uminsi3"
3027 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3028 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3029 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3030 (clobber (reg:CC CC_REGNUM))]
3031 "TARGET_ARM"
3032 "@
3033 cmp\\t%1, %2\;movcs\\t%0, %2
3034 cmp\\t%1, %2\;movcc\\t%0, %1
3035 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3036 [(set_attr "conds" "clob")
3037 (set_attr "length" "8,8,12")]
3038 )
3039
3040 (define_insn "*store_minmaxsi"
3041 [(set (match_operand:SI 0 "memory_operand" "=m")
3042 (match_operator:SI 3 "minmax_operator"
3043 [(match_operand:SI 1 "s_register_operand" "r")
3044 (match_operand:SI 2 "s_register_operand" "r")]))
3045 (clobber (reg:CC CC_REGNUM))]
3046 "TARGET_32BIT"
3047 "*
3048 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3049 operands[1], operands[2]);
3050 output_asm_insn (\"cmp\\t%1, %2\", operands);
3051 if (TARGET_THUMB2)
3052 output_asm_insn (\"ite\t%d3\", operands);
3053 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3054 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3055 return \"\";
3056 "
3057 [(set_attr "conds" "clob")
3058 (set (attr "length")
3059 (if_then_else (eq_attr "is_thumb" "yes")
3060 (const_int 14)
3061 (const_int 12)))
3062 (set_attr "type" "store1")]
3063 )
3064
3065 ; Reject the frame pointer in operand[1], since reloading this after
3066 ; it has been eliminated can cause carnage.
3067 (define_insn "*minmax_arithsi"
3068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3069 (match_operator:SI 4 "shiftable_operator"
3070 [(match_operator:SI 5 "minmax_operator"
3071 [(match_operand:SI 2 "s_register_operand" "r,r")
3072 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3073 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3074 (clobber (reg:CC CC_REGNUM))]
3075 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3076 "*
3077 {
3078 enum rtx_code code = GET_CODE (operands[4]);
3079 bool need_else;
3080
3081 if (which_alternative != 0 || operands[3] != const0_rtx
3082 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3083 need_else = true;
3084 else
3085 need_else = false;
3086
3087 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3088 operands[2], operands[3]);
3089 output_asm_insn (\"cmp\\t%2, %3\", operands);
3090 if (TARGET_THUMB2)
3091 {
3092 if (need_else)
3093 output_asm_insn (\"ite\\t%d5\", operands);
3094 else
3095 output_asm_insn (\"it\\t%d5\", operands);
3096 }
3097 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3098 if (need_else)
3099 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3100 return \"\";
3101 }"
3102 [(set_attr "conds" "clob")
3103 (set (attr "length")
3104 (if_then_else (eq_attr "is_thumb" "yes")
3105 (const_int 14)
3106 (const_int 12)))]
3107 )
3108
3109 \f
3110 ;; Shift and rotation insns
3111
3112 (define_expand "ashldi3"
3113 [(set (match_operand:DI 0 "s_register_operand" "")
3114 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3115 (match_operand:SI 2 "reg_or_int_operand" "")))]
3116 "TARGET_32BIT"
3117 "
3118 if (GET_CODE (operands[2]) == CONST_INT)
3119 {
3120 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3121 {
3122 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3123 DONE;
3124 }
3125 /* Ideally we shouldn't fail here if we could know that operands[1]
3126 ends up already living in an iwmmxt register. Otherwise it's
3127 cheaper to have the alternate code being generated than moving
3128 values to iwmmxt regs and back. */
3129 FAIL;
3130 }
3131 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3132 FAIL;
3133 "
3134 )
3135
3136 (define_insn "arm_ashldi3_1bit"
3137 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3138 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3139 (const_int 1)))
3140 (clobber (reg:CC CC_REGNUM))]
3141 "TARGET_32BIT"
3142 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3143 [(set_attr "conds" "clob")
3144 (set_attr "length" "8")]
3145 )
3146
3147 (define_expand "ashlsi3"
3148 [(set (match_operand:SI 0 "s_register_operand" "")
3149 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3150 (match_operand:SI 2 "arm_rhs_operand" "")))]
3151 "TARGET_EITHER"
3152 "
3153 if (GET_CODE (operands[2]) == CONST_INT
3154 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3155 {
3156 emit_insn (gen_movsi (operands[0], const0_rtx));
3157 DONE;
3158 }
3159 "
3160 )
3161
3162 (define_insn "*thumb1_ashlsi3"
3163 [(set (match_operand:SI 0 "register_operand" "=l,l")
3164 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3165 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3166 "TARGET_THUMB1"
3167 "lsl\\t%0, %1, %2"
3168 [(set_attr "length" "2")]
3169 )
3170
3171 (define_expand "ashrdi3"
3172 [(set (match_operand:DI 0 "s_register_operand" "")
3173 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3174 (match_operand:SI 2 "reg_or_int_operand" "")))]
3175 "TARGET_32BIT"
3176 "
3177 if (GET_CODE (operands[2]) == CONST_INT)
3178 {
3179 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3180 {
3181 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3182 DONE;
3183 }
3184 /* Ideally we shouldn't fail here if we could know that operands[1]
3185 ends up already living in an iwmmxt register. Otherwise it's
3186 cheaper to have the alternate code being generated than moving
3187 values to iwmmxt regs and back. */
3188 FAIL;
3189 }
3190 else if (!TARGET_REALLY_IWMMXT)
3191 FAIL;
3192 "
3193 )
3194
3195 (define_insn "arm_ashrdi3_1bit"
3196 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3197 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3198 (const_int 1)))
3199 (clobber (reg:CC CC_REGNUM))]
3200 "TARGET_32BIT"
3201 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3202 [(set_attr "conds" "clob")
3203 (set_attr "length" "8")]
3204 )
3205
3206 (define_expand "ashrsi3"
3207 [(set (match_operand:SI 0 "s_register_operand" "")
3208 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3209 (match_operand:SI 2 "arm_rhs_operand" "")))]
3210 "TARGET_EITHER"
3211 "
3212 if (GET_CODE (operands[2]) == CONST_INT
3213 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3214 operands[2] = GEN_INT (31);
3215 "
3216 )
3217
3218 (define_insn "*thumb1_ashrsi3"
3219 [(set (match_operand:SI 0 "register_operand" "=l,l")
3220 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3221 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3222 "TARGET_THUMB1"
3223 "asr\\t%0, %1, %2"
3224 [(set_attr "length" "2")]
3225 )
3226
3227 (define_expand "lshrdi3"
3228 [(set (match_operand:DI 0 "s_register_operand" "")
3229 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3230 (match_operand:SI 2 "reg_or_int_operand" "")))]
3231 "TARGET_32BIT"
3232 "
3233 if (GET_CODE (operands[2]) == CONST_INT)
3234 {
3235 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3236 {
3237 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3238 DONE;
3239 }
3240 /* Ideally we shouldn't fail here if we could know that operands[1]
3241 ends up already living in an iwmmxt register. Otherwise it's
3242 cheaper to have the alternate code being generated than moving
3243 values to iwmmxt regs and back. */
3244 FAIL;
3245 }
3246 else if (!TARGET_REALLY_IWMMXT)
3247 FAIL;
3248 "
3249 )
3250
3251 (define_insn "arm_lshrdi3_1bit"
3252 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3253 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3254 (const_int 1)))
3255 (clobber (reg:CC CC_REGNUM))]
3256 "TARGET_32BIT"
3257 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3258 [(set_attr "conds" "clob")
3259 (set_attr "length" "8")]
3260 )
3261
3262 (define_expand "lshrsi3"
3263 [(set (match_operand:SI 0 "s_register_operand" "")
3264 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3265 (match_operand:SI 2 "arm_rhs_operand" "")))]
3266 "TARGET_EITHER"
3267 "
3268 if (GET_CODE (operands[2]) == CONST_INT
3269 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3270 {
3271 emit_insn (gen_movsi (operands[0], const0_rtx));
3272 DONE;
3273 }
3274 "
3275 )
3276
3277 (define_insn "*thumb1_lshrsi3"
3278 [(set (match_operand:SI 0 "register_operand" "=l,l")
3279 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3280 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3281 "TARGET_THUMB1"
3282 "lsr\\t%0, %1, %2"
3283 [(set_attr "length" "2")]
3284 )
3285
3286 (define_expand "rotlsi3"
3287 [(set (match_operand:SI 0 "s_register_operand" "")
3288 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3289 (match_operand:SI 2 "reg_or_int_operand" "")))]
3290 "TARGET_32BIT"
3291 "
3292 if (GET_CODE (operands[2]) == CONST_INT)
3293 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3294 else
3295 {
3296 rtx reg = gen_reg_rtx (SImode);
3297 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3298 operands[2] = reg;
3299 }
3300 "
3301 )
3302
3303 (define_expand "rotrsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "")
3305 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "arm_rhs_operand" "")))]
3307 "TARGET_EITHER"
3308 "
3309 if (TARGET_32BIT)
3310 {
3311 if (GET_CODE (operands[2]) == CONST_INT
3312 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3313 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3314 }
3315 else /* TARGET_THUMB1 */
3316 {
3317 if (GET_CODE (operands [2]) == CONST_INT)
3318 operands [2] = force_reg (SImode, operands[2]);
3319 }
3320 "
3321 )
3322
3323 (define_insn "*thumb1_rotrsi3"
3324 [(set (match_operand:SI 0 "register_operand" "=l")
3325 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3326 (match_operand:SI 2 "register_operand" "l")))]
3327 "TARGET_THUMB1"
3328 "ror\\t%0, %0, %2"
3329 [(set_attr "length" "2")]
3330 )
3331
3332 (define_insn "*arm_shiftsi3"
3333 [(set (match_operand:SI 0 "s_register_operand" "=r")
3334 (match_operator:SI 3 "shift_operator"
3335 [(match_operand:SI 1 "s_register_operand" "r")
3336 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3337 "TARGET_32BIT"
3338 "* return arm_output_shift(operands, 0);"
3339 [(set_attr "predicable" "yes")
3340 (set_attr "shift" "1")
3341 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3342 (const_string "alu_shift")
3343 (const_string "alu_shift_reg")))]
3344 )
3345
3346 (define_insn "*shiftsi3_compare0"
3347 [(set (reg:CC_NOOV CC_REGNUM)
3348 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3349 [(match_operand:SI 1 "s_register_operand" "r")
3350 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3351 (const_int 0)))
3352 (set (match_operand:SI 0 "s_register_operand" "=r")
3353 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3354 "TARGET_32BIT"
3355 "* return arm_output_shift(operands, 1);"
3356 [(set_attr "conds" "set")
3357 (set_attr "shift" "1")
3358 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3359 (const_string "alu_shift")
3360 (const_string "alu_shift_reg")))]
3361 )
3362
3363 (define_insn "*shiftsi3_compare0_scratch"
3364 [(set (reg:CC_NOOV CC_REGNUM)
3365 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3366 [(match_operand:SI 1 "s_register_operand" "r")
3367 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3368 (const_int 0)))
3369 (clobber (match_scratch:SI 0 "=r"))]
3370 "TARGET_32BIT"
3371 "* return arm_output_shift(operands, 1);"
3372 [(set_attr "conds" "set")
3373 (set_attr "shift" "1")]
3374 )
3375
3376 (define_insn "*arm_notsi_shiftsi"
3377 [(set (match_operand:SI 0 "s_register_operand" "=r")
3378 (not:SI (match_operator:SI 3 "shift_operator"
3379 [(match_operand:SI 1 "s_register_operand" "r")
3380 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3381 "TARGET_ARM"
3382 "mvn%?\\t%0, %1%S3"
3383 [(set_attr "predicable" "yes")
3384 (set_attr "shift" "1")
3385 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3386 (const_string "alu_shift")
3387 (const_string "alu_shift_reg")))]
3388 )
3389
3390 (define_insn "*arm_notsi_shiftsi_compare0"
3391 [(set (reg:CC_NOOV CC_REGNUM)
3392 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3393 [(match_operand:SI 1 "s_register_operand" "r")
3394 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3395 (const_int 0)))
3396 (set (match_operand:SI 0 "s_register_operand" "=r")
3397 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3398 "TARGET_ARM"
3399 "mvn%.\\t%0, %1%S3"
3400 [(set_attr "conds" "set")
3401 (set_attr "shift" "1")
3402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3403 (const_string "alu_shift")
3404 (const_string "alu_shift_reg")))]
3405 )
3406
3407 (define_insn "*arm_not_shiftsi_compare0_scratch"
3408 [(set (reg:CC_NOOV CC_REGNUM)
3409 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3410 [(match_operand:SI 1 "s_register_operand" "r")
3411 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3412 (const_int 0)))
3413 (clobber (match_scratch:SI 0 "=r"))]
3414 "TARGET_ARM"
3415 "mvn%.\\t%0, %1%S3"
3416 [(set_attr "conds" "set")
3417 (set_attr "shift" "1")
3418 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3419 (const_string "alu_shift")
3420 (const_string "alu_shift_reg")))]
3421 )
3422
3423 ;; We don't really have extzv, but defining this using shifts helps
3424 ;; to reduce register pressure later on.
3425
3426 (define_expand "extzv"
3427 [(set (match_dup 4)
3428 (ashift:SI (match_operand:SI 1 "register_operand" "")
3429 (match_operand:SI 2 "const_int_operand" "")))
3430 (set (match_operand:SI 0 "register_operand" "")
3431 (lshiftrt:SI (match_dup 4)
3432 (match_operand:SI 3 "const_int_operand" "")))]
3433 "TARGET_THUMB1 || arm_arch_thumb2"
3434 "
3435 {
3436 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3437 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3438
3439 if (arm_arch_thumb2)
3440 {
3441 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3442 operands[3]));
3443 DONE;
3444 }
3445
3446 operands[3] = GEN_INT (rshift);
3447
3448 if (lshift == 0)
3449 {
3450 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3451 DONE;
3452 }
3453
3454 operands[2] = GEN_INT (lshift);
3455 operands[4] = gen_reg_rtx (SImode);
3456 }"
3457 )
3458
3459 (define_insn "extv"
3460 [(set (match_operand:SI 0 "s_register_operand" "=r")
3461 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3462 (match_operand:SI 2 "const_int_operand" "M")
3463 (match_operand:SI 3 "const_int_operand" "M")))]
3464 "arm_arch_thumb2"
3465 "sbfx%?\t%0, %1, %3, %2"
3466 [(set_attr "length" "4")
3467 (set_attr "predicable" "yes")]
3468 )
3469
3470 (define_insn "extzv_t2"
3471 [(set (match_operand:SI 0 "s_register_operand" "=r")
3472 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3473 (match_operand:SI 2 "const_int_operand" "M")
3474 (match_operand:SI 3 "const_int_operand" "M")))]
3475 "arm_arch_thumb2"
3476 "ubfx%?\t%0, %1, %3, %2"
3477 [(set_attr "length" "4")
3478 (set_attr "predicable" "yes")]
3479 )
3480
3481 \f
3482 ;; Unary arithmetic insns
3483
3484 (define_expand "negdi2"
3485 [(parallel
3486 [(set (match_operand:DI 0 "s_register_operand" "")
3487 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3488 (clobber (reg:CC CC_REGNUM))])]
3489 "TARGET_EITHER"
3490 "
3491 if (TARGET_THUMB1)
3492 {
3493 if (GET_CODE (operands[1]) != REG)
3494 operands[1] = force_reg (SImode, operands[1]);
3495 }
3496 "
3497 )
3498
3499 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3500 ;; The second alternative is to allow the common case of a *full* overlap.
3501 (define_insn "*arm_negdi2"
3502 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3503 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3504 (clobber (reg:CC CC_REGNUM))]
3505 "TARGET_ARM"
3506 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3507 [(set_attr "conds" "clob")
3508 (set_attr "length" "8")]
3509 )
3510
3511 (define_insn "*thumb1_negdi2"
3512 [(set (match_operand:DI 0 "register_operand" "=&l")
3513 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3514 (clobber (reg:CC CC_REGNUM))]
3515 "TARGET_THUMB1"
3516 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3517 [(set_attr "length" "6")]
3518 )
3519
3520 (define_expand "negsi2"
3521 [(set (match_operand:SI 0 "s_register_operand" "")
3522 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3523 "TARGET_EITHER"
3524 ""
3525 )
3526
3527 (define_insn "*arm_negsi2"
3528 [(set (match_operand:SI 0 "s_register_operand" "=r")
3529 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3530 "TARGET_32BIT"
3531 "rsb%?\\t%0, %1, #0"
3532 [(set_attr "predicable" "yes")]
3533 )
3534
3535 (define_insn "*thumb1_negsi2"
3536 [(set (match_operand:SI 0 "register_operand" "=l")
3537 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3538 "TARGET_THUMB1"
3539 "neg\\t%0, %1"
3540 [(set_attr "length" "2")]
3541 )
3542
3543 (define_expand "negsf2"
3544 [(set (match_operand:SF 0 "s_register_operand" "")
3545 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3546 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3547 ""
3548 )
3549
3550 (define_expand "negdf2"
3551 [(set (match_operand:DF 0 "s_register_operand" "")
3552 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3553 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3554 "")
3555
3556 ;; abssi2 doesn't really clobber the condition codes if a different register
3557 ;; is being set. To keep things simple, assume during rtl manipulations that
3558 ;; it does, but tell the final scan operator the truth. Similarly for
3559 ;; (neg (abs...))
3560
3561 (define_expand "abssi2"
3562 [(parallel
3563 [(set (match_operand:SI 0 "s_register_operand" "")
3564 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3565 (clobber (match_dup 2))])]
3566 "TARGET_EITHER"
3567 "
3568 if (TARGET_THUMB1)
3569 operands[2] = gen_rtx_SCRATCH (SImode);
3570 else
3571 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3572 ")
3573
3574 (define_insn "*arm_abssi2"
3575 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3576 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3577 (clobber (reg:CC CC_REGNUM))]
3578 "TARGET_ARM"
3579 "@
3580 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3581 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3582 [(set_attr "conds" "clob,*")
3583 (set_attr "shift" "1")
3584 ;; predicable can't be set based on the variant, so left as no
3585 (set_attr "length" "8")]
3586 )
3587
3588 (define_insn_and_split "*thumb1_abssi2"
3589 [(set (match_operand:SI 0 "s_register_operand" "=l")
3590 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3591 (clobber (match_scratch:SI 2 "=&l"))]
3592 "TARGET_THUMB1"
3593 "#"
3594 "TARGET_THUMB1 && reload_completed"
3595 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3596 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3597 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3598 ""
3599 [(set_attr "length" "6")]
3600 )
3601
3602 (define_insn "*arm_neg_abssi2"
3603 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3604 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3605 (clobber (reg:CC CC_REGNUM))]
3606 "TARGET_ARM"
3607 "@
3608 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3609 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3610 [(set_attr "conds" "clob,*")
3611 (set_attr "shift" "1")
3612 ;; predicable can't be set based on the variant, so left as no
3613 (set_attr "length" "8")]
3614 )
3615
3616 (define_insn_and_split "*thumb1_neg_abssi2"
3617 [(set (match_operand:SI 0 "s_register_operand" "=l")
3618 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3619 (clobber (match_scratch:SI 2 "=&l"))]
3620 "TARGET_THUMB1"
3621 "#"
3622 "TARGET_THUMB1 && reload_completed"
3623 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3624 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3625 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3626 ""
3627 [(set_attr "length" "6")]
3628 )
3629
3630 (define_expand "abssf2"
3631 [(set (match_operand:SF 0 "s_register_operand" "")
3632 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3633 "TARGET_32BIT && TARGET_HARD_FLOAT"
3634 "")
3635
3636 (define_expand "absdf2"
3637 [(set (match_operand:DF 0 "s_register_operand" "")
3638 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3639 "TARGET_32BIT && TARGET_HARD_FLOAT"
3640 "")
3641
3642 (define_expand "sqrtsf2"
3643 [(set (match_operand:SF 0 "s_register_operand" "")
3644 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3645 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3646 "")
3647
3648 (define_expand "sqrtdf2"
3649 [(set (match_operand:DF 0 "s_register_operand" "")
3650 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3651 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3652 "")
3653
3654 (define_insn_and_split "one_cmpldi2"
3655 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3656 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3657 "TARGET_32BIT"
3658 "#"
3659 "TARGET_32BIT && reload_completed"
3660 [(set (match_dup 0) (not:SI (match_dup 1)))
3661 (set (match_dup 2) (not:SI (match_dup 3)))]
3662 "
3663 {
3664 operands[2] = gen_highpart (SImode, operands[0]);
3665 operands[0] = gen_lowpart (SImode, operands[0]);
3666 operands[3] = gen_highpart (SImode, operands[1]);
3667 operands[1] = gen_lowpart (SImode, operands[1]);
3668 }"
3669 [(set_attr "length" "8")
3670 (set_attr "predicable" "yes")]
3671 )
3672
3673 (define_expand "one_cmplsi2"
3674 [(set (match_operand:SI 0 "s_register_operand" "")
3675 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3676 "TARGET_EITHER"
3677 ""
3678 )
3679
3680 (define_insn "*arm_one_cmplsi2"
3681 [(set (match_operand:SI 0 "s_register_operand" "=r")
3682 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3683 "TARGET_32BIT"
3684 "mvn%?\\t%0, %1"
3685 [(set_attr "predicable" "yes")]
3686 )
3687
3688 (define_insn "*thumb1_one_cmplsi2"
3689 [(set (match_operand:SI 0 "register_operand" "=l")
3690 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3691 "TARGET_THUMB1"
3692 "mvn\\t%0, %1"
3693 [(set_attr "length" "2")]
3694 )
3695
3696 (define_insn "*notsi_compare0"
3697 [(set (reg:CC_NOOV CC_REGNUM)
3698 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3699 (const_int 0)))
3700 (set (match_operand:SI 0 "s_register_operand" "=r")
3701 (not:SI (match_dup 1)))]
3702 "TARGET_32BIT"
3703 "mvn%.\\t%0, %1"
3704 [(set_attr "conds" "set")]
3705 )
3706
3707 (define_insn "*notsi_compare0_scratch"
3708 [(set (reg:CC_NOOV CC_REGNUM)
3709 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3710 (const_int 0)))
3711 (clobber (match_scratch:SI 0 "=r"))]
3712 "TARGET_32BIT"
3713 "mvn%.\\t%0, %1"
3714 [(set_attr "conds" "set")]
3715 )
3716 \f
3717 ;; Fixed <--> Floating conversion insns
3718
3719 (define_expand "floatsisf2"
3720 [(set (match_operand:SF 0 "s_register_operand" "")
3721 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3722 "TARGET_32BIT && TARGET_HARD_FLOAT"
3723 "
3724 if (TARGET_MAVERICK)
3725 {
3726 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3727 DONE;
3728 }
3729 ")
3730
3731 (define_expand "floatsidf2"
3732 [(set (match_operand:DF 0 "s_register_operand" "")
3733 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3734 "TARGET_32BIT && TARGET_HARD_FLOAT"
3735 "
3736 if (TARGET_MAVERICK)
3737 {
3738 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3739 DONE;
3740 }
3741 ")
3742
3743 (define_expand "fix_truncsfsi2"
3744 [(set (match_operand:SI 0 "s_register_operand" "")
3745 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3746 "TARGET_32BIT && TARGET_HARD_FLOAT"
3747 "
3748 if (TARGET_MAVERICK)
3749 {
3750 if (!cirrus_fp_register (operands[0], SImode))
3751 operands[0] = force_reg (SImode, operands[0]);
3752 if (!cirrus_fp_register (operands[1], SFmode))
3753 operands[1] = force_reg (SFmode, operands[0]);
3754 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3755 DONE;
3756 }
3757 ")
3758
3759 (define_expand "fix_truncdfsi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "")
3761 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3762 "TARGET_32BIT && TARGET_HARD_FLOAT"
3763 "
3764 if (TARGET_MAVERICK)
3765 {
3766 if (!cirrus_fp_register (operands[1], DFmode))
3767 operands[1] = force_reg (DFmode, operands[0]);
3768 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3769 DONE;
3770 }
3771 ")
3772
3773 ;; Truncation insns
3774
3775 (define_expand "truncdfsf2"
3776 [(set (match_operand:SF 0 "s_register_operand" "")
3777 (float_truncate:SF
3778 (match_operand:DF 1 "s_register_operand" "")))]
3779 "TARGET_32BIT && TARGET_HARD_FLOAT"
3780 ""
3781 )
3782 \f
3783 ;; Zero and sign extension instructions.
3784
3785 (define_expand "zero_extendsidi2"
3786 [(set (match_operand:DI 0 "s_register_operand" "")
3787 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3788 "TARGET_32BIT"
3789 ""
3790 )
3791
3792 (define_insn "*arm_zero_extendsidi2"
3793 [(set (match_operand:DI 0 "s_register_operand" "=r")
3794 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3795 "TARGET_ARM"
3796 "*
3797 if (REGNO (operands[1])
3798 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3799 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3800 return \"mov%?\\t%R0, #0\";
3801 "
3802 [(set_attr "length" "8")
3803 (set_attr "predicable" "yes")]
3804 )
3805
3806 (define_expand "zero_extendqidi2"
3807 [(set (match_operand:DI 0 "s_register_operand" "")
3808 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3809 "TARGET_32BIT"
3810 ""
3811 )
3812
3813 (define_insn "*arm_zero_extendqidi2"
3814 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3815 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3816 "TARGET_ARM"
3817 "@
3818 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3819 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3820 [(set_attr "length" "8")
3821 (set_attr "predicable" "yes")
3822 (set_attr "type" "*,load_byte")
3823 (set_attr "pool_range" "*,4092")
3824 (set_attr "neg_pool_range" "*,4084")]
3825 )
3826
3827 (define_expand "extendsidi2"
3828 [(set (match_operand:DI 0 "s_register_operand" "")
3829 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3830 "TARGET_32BIT"
3831 ""
3832 )
3833
3834 (define_insn "*arm_extendsidi2"
3835 [(set (match_operand:DI 0 "s_register_operand" "=r")
3836 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3837 "TARGET_ARM"
3838 "*
3839 if (REGNO (operands[1])
3840 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3841 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3842 return \"mov%?\\t%R0, %Q0, asr #31\";
3843 "
3844 [(set_attr "length" "8")
3845 (set_attr "shift" "1")
3846 (set_attr "predicable" "yes")]
3847 )
3848
3849 (define_expand "zero_extendhisi2"
3850 [(set (match_dup 2)
3851 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3852 (const_int 16)))
3853 (set (match_operand:SI 0 "s_register_operand" "")
3854 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3855 "TARGET_EITHER"
3856 "
3857 {
3858 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3859 {
3860 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3861 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3862 DONE;
3863 }
3864
3865 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3866 {
3867 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3868 DONE;
3869 }
3870
3871 if (!s_register_operand (operands[1], HImode))
3872 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3873
3874 if (arm_arch6)
3875 {
3876 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3877 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3878 DONE;
3879 }
3880
3881 operands[1] = gen_lowpart (SImode, operands[1]);
3882 operands[2] = gen_reg_rtx (SImode);
3883 }"
3884 )
3885
3886 (define_insn "*thumb1_zero_extendhisi2"
3887 [(set (match_operand:SI 0 "register_operand" "=l")
3888 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3889 "TARGET_THUMB1 && !arm_arch6"
3890 "*
3891 rtx mem = XEXP (operands[1], 0);
3892
3893 if (GET_CODE (mem) == CONST)
3894 mem = XEXP (mem, 0);
3895
3896 if (GET_CODE (mem) == LABEL_REF)
3897 return \"ldr\\t%0, %1\";
3898
3899 if (GET_CODE (mem) == PLUS)
3900 {
3901 rtx a = XEXP (mem, 0);
3902 rtx b = XEXP (mem, 1);
3903
3904 /* This can happen due to bugs in reload. */
3905 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3906 {
3907 rtx ops[2];
3908 ops[0] = operands[0];
3909 ops[1] = a;
3910
3911 output_asm_insn (\"mov %0, %1\", ops);
3912
3913 XEXP (mem, 0) = operands[0];
3914 }
3915
3916 else if ( GET_CODE (a) == LABEL_REF
3917 && GET_CODE (b) == CONST_INT)
3918 return \"ldr\\t%0, %1\";
3919 }
3920
3921 return \"ldrh\\t%0, %1\";
3922 "
3923 [(set_attr "length" "4")
3924 (set_attr "type" "load_byte")
3925 (set_attr "pool_range" "60")]
3926 )
3927
3928 (define_insn "*thumb1_zero_extendhisi2_v6"
3929 [(set (match_operand:SI 0 "register_operand" "=l,l")
3930 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3931 "TARGET_THUMB1 && arm_arch6"
3932 "*
3933 rtx mem;
3934
3935 if (which_alternative == 0)
3936 return \"uxth\\t%0, %1\";
3937
3938 mem = XEXP (operands[1], 0);
3939
3940 if (GET_CODE (mem) == CONST)
3941 mem = XEXP (mem, 0);
3942
3943 if (GET_CODE (mem) == LABEL_REF)
3944 return \"ldr\\t%0, %1\";
3945
3946 if (GET_CODE (mem) == PLUS)
3947 {
3948 rtx a = XEXP (mem, 0);
3949 rtx b = XEXP (mem, 1);
3950
3951 /* This can happen due to bugs in reload. */
3952 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3953 {
3954 rtx ops[2];
3955 ops[0] = operands[0];
3956 ops[1] = a;
3957
3958 output_asm_insn (\"mov %0, %1\", ops);
3959
3960 XEXP (mem, 0) = operands[0];
3961 }
3962
3963 else if ( GET_CODE (a) == LABEL_REF
3964 && GET_CODE (b) == CONST_INT)
3965 return \"ldr\\t%0, %1\";
3966 }
3967
3968 return \"ldrh\\t%0, %1\";
3969 "
3970 [(set_attr "length" "2,4")
3971 (set_attr "type" "alu_shift,load_byte")
3972 (set_attr "pool_range" "*,60")]
3973 )
3974
3975 (define_insn "*arm_zero_extendhisi2"
3976 [(set (match_operand:SI 0 "s_register_operand" "=r")
3977 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3978 "TARGET_ARM && arm_arch4 && !arm_arch6"
3979 "ldr%(h%)\\t%0, %1"
3980 [(set_attr "type" "load_byte")
3981 (set_attr "predicable" "yes")
3982 (set_attr "pool_range" "256")
3983 (set_attr "neg_pool_range" "244")]
3984 )
3985
3986 (define_insn "*arm_zero_extendhisi2_v6"
3987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3988 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3989 "TARGET_ARM && arm_arch6"
3990 "@
3991 uxth%?\\t%0, %1
3992 ldr%(h%)\\t%0, %1"
3993 [(set_attr "type" "alu_shift,load_byte")
3994 (set_attr "predicable" "yes")
3995 (set_attr "pool_range" "*,256")
3996 (set_attr "neg_pool_range" "*,244")]
3997 )
3998
3999 (define_insn "*arm_zero_extendhisi2addsi"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4002 (match_operand:SI 2 "s_register_operand" "r")))]
4003 "TARGET_INT_SIMD"
4004 "uxtah%?\\t%0, %2, %1"
4005 [(set_attr "type" "alu_shift")
4006 (set_attr "predicable" "yes")]
4007 )
4008
4009 (define_expand "zero_extendqisi2"
4010 [(set (match_operand:SI 0 "s_register_operand" "")
4011 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4012 "TARGET_EITHER"
4013 "
4014 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4015 {
4016 if (TARGET_ARM)
4017 {
4018 emit_insn (gen_andsi3 (operands[0],
4019 gen_lowpart (SImode, operands[1]),
4020 GEN_INT (255)));
4021 }
4022 else /* TARGET_THUMB */
4023 {
4024 rtx temp = gen_reg_rtx (SImode);
4025 rtx ops[3];
4026
4027 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4028 operands[1] = gen_lowpart (SImode, operands[1]);
4029
4030 ops[0] = temp;
4031 ops[1] = operands[1];
4032 ops[2] = GEN_INT (24);
4033
4034 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4035 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4036
4037 ops[0] = operands[0];
4038 ops[1] = temp;
4039 ops[2] = GEN_INT (24);
4040
4041 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4042 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4043 }
4044 DONE;
4045 }
4046 "
4047 )
4048
4049 (define_insn "*thumb1_zero_extendqisi2"
4050 [(set (match_operand:SI 0 "register_operand" "=l")
4051 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4052 "TARGET_THUMB1 && !arm_arch6"
4053 "ldrb\\t%0, %1"
4054 [(set_attr "length" "2")
4055 (set_attr "type" "load_byte")
4056 (set_attr "pool_range" "32")]
4057 )
4058
4059 (define_insn "*thumb1_zero_extendqisi2_v6"
4060 [(set (match_operand:SI 0 "register_operand" "=l,l")
4061 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4062 "TARGET_THUMB1 && arm_arch6"
4063 "@
4064 uxtb\\t%0, %1
4065 ldrb\\t%0, %1"
4066 [(set_attr "length" "2,2")
4067 (set_attr "type" "alu_shift,load_byte")
4068 (set_attr "pool_range" "*,32")]
4069 )
4070
4071 (define_insn "*arm_zero_extendqisi2"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r")
4073 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4074 "TARGET_ARM && !arm_arch6"
4075 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4076 [(set_attr "type" "load_byte")
4077 (set_attr "predicable" "yes")
4078 (set_attr "pool_range" "4096")
4079 (set_attr "neg_pool_range" "4084")]
4080 )
4081
4082 (define_insn "*arm_zero_extendqisi2_v6"
4083 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4084 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4085 "TARGET_ARM && arm_arch6"
4086 "@
4087 uxtb%(%)\\t%0, %1
4088 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4089 [(set_attr "type" "alu_shift,load_byte")
4090 (set_attr "predicable" "yes")
4091 (set_attr "pool_range" "*,4096")
4092 (set_attr "neg_pool_range" "*,4084")]
4093 )
4094
4095 (define_insn "*arm_zero_extendqisi2addsi"
4096 [(set (match_operand:SI 0 "s_register_operand" "=r")
4097 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4098 (match_operand:SI 2 "s_register_operand" "r")))]
4099 "TARGET_INT_SIMD"
4100 "uxtab%?\\t%0, %2, %1"
4101 [(set_attr "predicable" "yes")
4102 (set_attr "insn" "xtab")
4103 (set_attr "type" "alu_shift")]
4104 )
4105
4106 (define_split
4107 [(set (match_operand:SI 0 "s_register_operand" "")
4108 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4109 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4110 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4111 [(set (match_dup 2) (match_dup 1))
4112 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4113 ""
4114 )
4115
4116 (define_split
4117 [(set (match_operand:SI 0 "s_register_operand" "")
4118 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4119 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4120 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4121 [(set (match_dup 2) (match_dup 1))
4122 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4123 ""
4124 )
4125
4126 (define_insn "*compareqi_eq0"
4127 [(set (reg:CC_Z CC_REGNUM)
4128 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4129 (const_int 0)))]
4130 "TARGET_32BIT"
4131 "tst\\t%0, #255"
4132 [(set_attr "conds" "set")]
4133 )
4134
4135 (define_expand "extendhisi2"
4136 [(set (match_dup 2)
4137 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4138 (const_int 16)))
4139 (set (match_operand:SI 0 "s_register_operand" "")
4140 (ashiftrt:SI (match_dup 2)
4141 (const_int 16)))]
4142 "TARGET_EITHER"
4143 "
4144 {
4145 if (GET_CODE (operands[1]) == MEM)
4146 {
4147 if (TARGET_THUMB1)
4148 {
4149 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4150 DONE;
4151 }
4152 else if (arm_arch4)
4153 {
4154 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4155 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4156 DONE;
4157 }
4158 }
4159
4160 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4161 {
4162 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4163 DONE;
4164 }
4165
4166 if (!s_register_operand (operands[1], HImode))
4167 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4168
4169 if (arm_arch6)
4170 {
4171 if (TARGET_THUMB1)
4172 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4173 else
4174 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4175 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4176
4177 DONE;
4178 }
4179
4180 operands[1] = gen_lowpart (SImode, operands[1]);
4181 operands[2] = gen_reg_rtx (SImode);
4182 }"
4183 )
4184
4185 (define_insn "thumb1_extendhisi2"
4186 [(set (match_operand:SI 0 "register_operand" "=l")
4187 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4188 (clobber (match_scratch:SI 2 "=&l"))]
4189 "TARGET_THUMB1 && !arm_arch6"
4190 "*
4191 {
4192 rtx ops[4];
4193 rtx mem = XEXP (operands[1], 0);
4194
4195 /* This code used to try to use 'V', and fix the address only if it was
4196 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4197 range of QImode offsets, and offsettable_address_p does a QImode
4198 address check. */
4199
4200 if (GET_CODE (mem) == CONST)
4201 mem = XEXP (mem, 0);
4202
4203 if (GET_CODE (mem) == LABEL_REF)
4204 return \"ldr\\t%0, %1\";
4205
4206 if (GET_CODE (mem) == PLUS)
4207 {
4208 rtx a = XEXP (mem, 0);
4209 rtx b = XEXP (mem, 1);
4210
4211 if (GET_CODE (a) == LABEL_REF
4212 && GET_CODE (b) == CONST_INT)
4213 return \"ldr\\t%0, %1\";
4214
4215 if (GET_CODE (b) == REG)
4216 return \"ldrsh\\t%0, %1\";
4217
4218 ops[1] = a;
4219 ops[2] = b;
4220 }
4221 else
4222 {
4223 ops[1] = mem;
4224 ops[2] = const0_rtx;
4225 }
4226
4227 gcc_assert (GET_CODE (ops[1]) == REG);
4228
4229 ops[0] = operands[0];
4230 ops[3] = operands[2];
4231 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4232 return \"\";
4233 }"
4234 [(set_attr "length" "4")
4235 (set_attr "type" "load_byte")
4236 (set_attr "pool_range" "1020")]
4237 )
4238
4239 ;; We used to have an early-clobber on the scratch register here.
4240 ;; However, there's a bug somewhere in reload which means that this
4241 ;; can be partially ignored during spill allocation if the memory
4242 ;; address also needs reloading; this causes us to die later on when
4243 ;; we try to verify the operands. Fortunately, we don't really need
4244 ;; the early-clobber: we can always use operand 0 if operand 2
4245 ;; overlaps the address.
4246 (define_insn "*thumb1_extendhisi2_insn_v6"
4247 [(set (match_operand:SI 0 "register_operand" "=l,l")
4248 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4249 (clobber (match_scratch:SI 2 "=X,l"))]
4250 "TARGET_THUMB1 && arm_arch6"
4251 "*
4252 {
4253 rtx ops[4];
4254 rtx mem;
4255
4256 if (which_alternative == 0)
4257 return \"sxth\\t%0, %1\";
4258
4259 mem = XEXP (operands[1], 0);
4260
4261 /* This code used to try to use 'V', and fix the address only if it was
4262 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4263 range of QImode offsets, and offsettable_address_p does a QImode
4264 address check. */
4265
4266 if (GET_CODE (mem) == CONST)
4267 mem = XEXP (mem, 0);
4268
4269 if (GET_CODE (mem) == LABEL_REF)
4270 return \"ldr\\t%0, %1\";
4271
4272 if (GET_CODE (mem) == PLUS)
4273 {
4274 rtx a = XEXP (mem, 0);
4275 rtx b = XEXP (mem, 1);
4276
4277 if (GET_CODE (a) == LABEL_REF
4278 && GET_CODE (b) == CONST_INT)
4279 return \"ldr\\t%0, %1\";
4280
4281 if (GET_CODE (b) == REG)
4282 return \"ldrsh\\t%0, %1\";
4283
4284 ops[1] = a;
4285 ops[2] = b;
4286 }
4287 else
4288 {
4289 ops[1] = mem;
4290 ops[2] = const0_rtx;
4291 }
4292
4293 gcc_assert (GET_CODE (ops[1]) == REG);
4294
4295 ops[0] = operands[0];
4296 if (reg_mentioned_p (operands[2], ops[1]))
4297 ops[3] = ops[0];
4298 else
4299 ops[3] = operands[2];
4300 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4301 return \"\";
4302 }"
4303 [(set_attr "length" "2,4")
4304 (set_attr "type" "alu_shift,load_byte")
4305 (set_attr "pool_range" "*,1020")]
4306 )
4307
4308 ;; This pattern will only be used when ldsh is not available
4309 (define_expand "extendhisi2_mem"
4310 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4311 (set (match_dup 3)
4312 (zero_extend:SI (match_dup 7)))
4313 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4314 (set (match_operand:SI 0 "" "")
4315 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4316 "TARGET_ARM"
4317 "
4318 {
4319 rtx mem1, mem2;
4320 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4321
4322 mem1 = change_address (operands[1], QImode, addr);
4323 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4324 operands[0] = gen_lowpart (SImode, operands[0]);
4325 operands[1] = mem1;
4326 operands[2] = gen_reg_rtx (SImode);
4327 operands[3] = gen_reg_rtx (SImode);
4328 operands[6] = gen_reg_rtx (SImode);
4329 operands[7] = mem2;
4330
4331 if (BYTES_BIG_ENDIAN)
4332 {
4333 operands[4] = operands[2];
4334 operands[5] = operands[3];
4335 }
4336 else
4337 {
4338 operands[4] = operands[3];
4339 operands[5] = operands[2];
4340 }
4341 }"
4342 )
4343
4344 (define_insn "*arm_extendhisi2"
4345 [(set (match_operand:SI 0 "s_register_operand" "=r")
4346 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4347 "TARGET_ARM && arm_arch4 && !arm_arch6"
4348 "ldr%(sh%)\\t%0, %1"
4349 [(set_attr "type" "load_byte")
4350 (set_attr "predicable" "yes")
4351 (set_attr "pool_range" "256")
4352 (set_attr "neg_pool_range" "244")]
4353 )
4354
4355 ;; ??? Check Thumb-2 pool range
4356 (define_insn "*arm_extendhisi2_v6"
4357 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4358 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4359 "TARGET_32BIT && arm_arch6"
4360 "@
4361 sxth%?\\t%0, %1
4362 ldr%(sh%)\\t%0, %1"
4363 [(set_attr "type" "alu_shift,load_byte")
4364 (set_attr "predicable" "yes")
4365 (set_attr "pool_range" "*,256")
4366 (set_attr "neg_pool_range" "*,244")]
4367 )
4368
4369 (define_insn "*arm_extendhisi2addsi"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4372 (match_operand:SI 2 "s_register_operand" "r")))]
4373 "TARGET_INT_SIMD"
4374 "sxtah%?\\t%0, %2, %1"
4375 )
4376
4377 (define_expand "extendqihi2"
4378 [(set (match_dup 2)
4379 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4380 (const_int 24)))
4381 (set (match_operand:HI 0 "s_register_operand" "")
4382 (ashiftrt:SI (match_dup 2)
4383 (const_int 24)))]
4384 "TARGET_ARM"
4385 "
4386 {
4387 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4388 {
4389 emit_insn (gen_rtx_SET (VOIDmode,
4390 operands[0],
4391 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4392 DONE;
4393 }
4394 if (!s_register_operand (operands[1], QImode))
4395 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4396 operands[0] = gen_lowpart (SImode, operands[0]);
4397 operands[1] = gen_lowpart (SImode, operands[1]);
4398 operands[2] = gen_reg_rtx (SImode);
4399 }"
4400 )
4401
4402 (define_insn "*arm_extendqihi_insn"
4403 [(set (match_operand:HI 0 "s_register_operand" "=r")
4404 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4405 "TARGET_ARM && arm_arch4"
4406 "ldr%(sb%)\\t%0, %1"
4407 [(set_attr "type" "load_byte")
4408 (set_attr "predicable" "yes")
4409 (set_attr "pool_range" "256")
4410 (set_attr "neg_pool_range" "244")]
4411 )
4412
4413 (define_expand "extendqisi2"
4414 [(set (match_dup 2)
4415 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4416 (const_int 24)))
4417 (set (match_operand:SI 0 "s_register_operand" "")
4418 (ashiftrt:SI (match_dup 2)
4419 (const_int 24)))]
4420 "TARGET_EITHER"
4421 "
4422 {
4423 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4424 {
4425 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4426 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4427 DONE;
4428 }
4429
4430 if (!s_register_operand (operands[1], QImode))
4431 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4432
4433 if (arm_arch6)
4434 {
4435 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4436 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4437 DONE;
4438 }
4439
4440 operands[1] = gen_lowpart (SImode, operands[1]);
4441 operands[2] = gen_reg_rtx (SImode);
4442 }"
4443 )
4444
4445 (define_insn "*arm_extendqisi"
4446 [(set (match_operand:SI 0 "s_register_operand" "=r")
4447 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4448 "TARGET_ARM && arm_arch4 && !arm_arch6"
4449 "ldr%(sb%)\\t%0, %1"
4450 [(set_attr "type" "load_byte")
4451 (set_attr "predicable" "yes")
4452 (set_attr "pool_range" "256")
4453 (set_attr "neg_pool_range" "244")]
4454 )
4455
4456 (define_insn "*arm_extendqisi_v6"
4457 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4458 (sign_extend:SI
4459 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4460 "TARGET_ARM && arm_arch6"
4461 "@
4462 sxtb%?\\t%0, %1
4463 ldr%(sb%)\\t%0, %1"
4464 [(set_attr "type" "alu_shift,load_byte")
4465 (set_attr "predicable" "yes")
4466 (set_attr "pool_range" "*,256")
4467 (set_attr "neg_pool_range" "*,244")]
4468 )
4469
4470 (define_insn "*arm_extendqisi2addsi"
4471 [(set (match_operand:SI 0 "s_register_operand" "=r")
4472 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4473 (match_operand:SI 2 "s_register_operand" "r")))]
4474 "TARGET_INT_SIMD"
4475 "sxtab%?\\t%0, %2, %1"
4476 [(set_attr "type" "alu_shift")
4477 (set_attr "insn" "xtab")
4478 (set_attr "predicable" "yes")]
4479 )
4480
4481 (define_insn "*thumb1_extendqisi2"
4482 [(set (match_operand:SI 0 "register_operand" "=l,l")
4483 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4484 "TARGET_THUMB1 && !arm_arch6"
4485 "*
4486 {
4487 rtx ops[3];
4488 rtx mem = XEXP (operands[1], 0);
4489
4490 if (GET_CODE (mem) == CONST)
4491 mem = XEXP (mem, 0);
4492
4493 if (GET_CODE (mem) == LABEL_REF)
4494 return \"ldr\\t%0, %1\";
4495
4496 if (GET_CODE (mem) == PLUS
4497 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4498 return \"ldr\\t%0, %1\";
4499
4500 if (which_alternative == 0)
4501 return \"ldrsb\\t%0, %1\";
4502
4503 ops[0] = operands[0];
4504
4505 if (GET_CODE (mem) == PLUS)
4506 {
4507 rtx a = XEXP (mem, 0);
4508 rtx b = XEXP (mem, 1);
4509
4510 ops[1] = a;
4511 ops[2] = b;
4512
4513 if (GET_CODE (a) == REG)
4514 {
4515 if (GET_CODE (b) == REG)
4516 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4517 else if (REGNO (a) == REGNO (ops[0]))
4518 {
4519 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4520 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4521 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4522 }
4523 else
4524 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4525 }
4526 else
4527 {
4528 gcc_assert (GET_CODE (b) == REG);
4529 if (REGNO (b) == REGNO (ops[0]))
4530 {
4531 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4532 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4533 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4534 }
4535 else
4536 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4537 }
4538 }
4539 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4540 {
4541 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4542 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4543 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4544 }
4545 else
4546 {
4547 ops[1] = mem;
4548 ops[2] = const0_rtx;
4549
4550 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4551 }
4552 return \"\";
4553 }"
4554 [(set_attr "length" "2,6")
4555 (set_attr "type" "load_byte,load_byte")
4556 (set_attr "pool_range" "32,32")]
4557 )
4558
4559 (define_insn "*thumb1_extendqisi2_v6"
4560 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4561 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4562 "TARGET_THUMB1 && arm_arch6"
4563 "*
4564 {
4565 rtx ops[3];
4566 rtx mem;
4567
4568 if (which_alternative == 0)
4569 return \"sxtb\\t%0, %1\";
4570
4571 mem = XEXP (operands[1], 0);
4572
4573 if (GET_CODE (mem) == CONST)
4574 mem = XEXP (mem, 0);
4575
4576 if (GET_CODE (mem) == LABEL_REF)
4577 return \"ldr\\t%0, %1\";
4578
4579 if (GET_CODE (mem) == PLUS
4580 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4581 return \"ldr\\t%0, %1\";
4582
4583 if (which_alternative == 0)
4584 return \"ldrsb\\t%0, %1\";
4585
4586 ops[0] = operands[0];
4587
4588 if (GET_CODE (mem) == PLUS)
4589 {
4590 rtx a = XEXP (mem, 0);
4591 rtx b = XEXP (mem, 1);
4592
4593 ops[1] = a;
4594 ops[2] = b;
4595
4596 if (GET_CODE (a) == REG)
4597 {
4598 if (GET_CODE (b) == REG)
4599 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4600 else if (REGNO (a) == REGNO (ops[0]))
4601 {
4602 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4603 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4604 }
4605 else
4606 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4607 }
4608 else
4609 {
4610 gcc_assert (GET_CODE (b) == REG);
4611 if (REGNO (b) == REGNO (ops[0]))
4612 {
4613 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4614 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4615 }
4616 else
4617 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4618 }
4619 }
4620 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4621 {
4622 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4623 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4624 }
4625 else
4626 {
4627 ops[1] = mem;
4628 ops[2] = const0_rtx;
4629
4630 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4631 }
4632 return \"\";
4633 }"
4634 [(set_attr "length" "2,2,4")
4635 (set_attr "type" "alu_shift,load_byte,load_byte")
4636 (set_attr "pool_range" "*,32,32")]
4637 )
4638
4639 (define_expand "extendsfdf2"
4640 [(set (match_operand:DF 0 "s_register_operand" "")
4641 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4642 "TARGET_32BIT && TARGET_HARD_FLOAT"
4643 ""
4644 )
4645 \f
4646 ;; Move insns (including loads and stores)
4647
4648 ;; XXX Just some ideas about movti.
4649 ;; I don't think these are a good idea on the arm, there just aren't enough
4650 ;; registers
4651 ;;(define_expand "loadti"
4652 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4653 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4654 ;; "" "")
4655
4656 ;;(define_expand "storeti"
4657 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4658 ;; (match_operand:TI 1 "s_register_operand" ""))]
4659 ;; "" "")
4660
4661 ;;(define_expand "movti"
4662 ;; [(set (match_operand:TI 0 "general_operand" "")
4663 ;; (match_operand:TI 1 "general_operand" ""))]
4664 ;; ""
4665 ;; "
4666 ;;{
4667 ;; rtx insn;
4668 ;;
4669 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4670 ;; operands[1] = copy_to_reg (operands[1]);
4671 ;; if (GET_CODE (operands[0]) == MEM)
4672 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4673 ;; else if (GET_CODE (operands[1]) == MEM)
4674 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4675 ;; else
4676 ;; FAIL;
4677 ;;
4678 ;; emit_insn (insn);
4679 ;; DONE;
4680 ;;}")
4681
4682 ;; Recognize garbage generated above.
4683
4684 ;;(define_insn ""
4685 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4686 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4687 ;; ""
4688 ;; "*
4689 ;; {
4690 ;; register mem = (which_alternative < 3);
4691 ;; register const char *template;
4692 ;;
4693 ;; operands[mem] = XEXP (operands[mem], 0);
4694 ;; switch (which_alternative)
4695 ;; {
4696 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4697 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4698 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4699 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4700 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4701 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4702 ;; }
4703 ;; output_asm_insn (template, operands);
4704 ;; return \"\";
4705 ;; }")
4706
4707 (define_expand "movdi"
4708 [(set (match_operand:DI 0 "general_operand" "")
4709 (match_operand:DI 1 "general_operand" ""))]
4710 "TARGET_EITHER"
4711 "
4712 if (can_create_pseudo_p ())
4713 {
4714 if (GET_CODE (operands[0]) != REG)
4715 operands[1] = force_reg (DImode, operands[1]);
4716 }
4717 "
4718 )
4719
4720 (define_insn "*arm_movdi"
4721 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4722 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4723 "TARGET_ARM
4724 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4725 && !TARGET_IWMMXT
4726 && ( register_operand (operands[0], DImode)
4727 || register_operand (operands[1], DImode))"
4728 "*
4729 switch (which_alternative)
4730 {
4731 case 0:
4732 case 1:
4733 case 2:
4734 return \"#\";
4735 default:
4736 return output_move_double (operands);
4737 }
4738 "
4739 [(set_attr "length" "8,12,16,8,8")
4740 (set_attr "type" "*,*,*,load2,store2")
4741 (set_attr "pool_range" "*,*,*,1020,*")
4742 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4743 )
4744
4745 (define_split
4746 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4747 (match_operand:ANY64 1 "const_double_operand" ""))]
4748 "TARGET_32BIT
4749 && reload_completed
4750 && (arm_const_double_inline_cost (operands[1])
4751 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4752 [(const_int 0)]
4753 "
4754 arm_split_constant (SET, SImode, curr_insn,
4755 INTVAL (gen_lowpart (SImode, operands[1])),
4756 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4757 arm_split_constant (SET, SImode, curr_insn,
4758 INTVAL (gen_highpart_mode (SImode,
4759 GET_MODE (operands[0]),
4760 operands[1])),
4761 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4762 DONE;
4763 "
4764 )
4765
4766 ; If optimizing for size, or if we have load delay slots, then
4767 ; we want to split the constant into two separate operations.
4768 ; In both cases this may split a trivial part into a single data op
4769 ; leaving a single complex constant to load. We can also get longer
4770 ; offsets in a LDR which means we get better chances of sharing the pool
4771 ; entries. Finally, we can normally do a better job of scheduling
4772 ; LDR instructions than we can with LDM.
4773 ; This pattern will only match if the one above did not.
4774 (define_split
4775 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4776 (match_operand:ANY64 1 "const_double_operand" ""))]
4777 "TARGET_ARM && reload_completed
4778 && arm_const_double_by_parts (operands[1])"
4779 [(set (match_dup 0) (match_dup 1))
4780 (set (match_dup 2) (match_dup 3))]
4781 "
4782 operands[2] = gen_highpart (SImode, operands[0]);
4783 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4784 operands[1]);
4785 operands[0] = gen_lowpart (SImode, operands[0]);
4786 operands[1] = gen_lowpart (SImode, operands[1]);
4787 "
4788 )
4789
4790 (define_split
4791 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4792 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4793 "TARGET_EITHER && reload_completed"
4794 [(set (match_dup 0) (match_dup 1))
4795 (set (match_dup 2) (match_dup 3))]
4796 "
4797 operands[2] = gen_highpart (SImode, operands[0]);
4798 operands[3] = gen_highpart (SImode, operands[1]);
4799 operands[0] = gen_lowpart (SImode, operands[0]);
4800 operands[1] = gen_lowpart (SImode, operands[1]);
4801
4802 /* Handle a partial overlap. */
4803 if (rtx_equal_p (operands[0], operands[3]))
4804 {
4805 rtx tmp0 = operands[0];
4806 rtx tmp1 = operands[1];
4807
4808 operands[0] = operands[2];
4809 operands[1] = operands[3];
4810 operands[2] = tmp0;
4811 operands[3] = tmp1;
4812 }
4813 "
4814 )
4815
4816 ;; We can't actually do base+index doubleword loads if the index and
4817 ;; destination overlap. Split here so that we at least have chance to
4818 ;; schedule.
4819 (define_split
4820 [(set (match_operand:DI 0 "s_register_operand" "")
4821 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4822 (match_operand:SI 2 "s_register_operand" ""))))]
4823 "TARGET_LDRD
4824 && reg_overlap_mentioned_p (operands[0], operands[1])
4825 && reg_overlap_mentioned_p (operands[0], operands[2])"
4826 [(set (match_dup 4)
4827 (plus:SI (match_dup 1)
4828 (match_dup 2)))
4829 (set (match_dup 0)
4830 (mem:DI (match_dup 4)))]
4831 "
4832 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4833 "
4834 )
4835
4836 ;;; ??? This should have alternatives for constants.
4837 ;;; ??? This was originally identical to the movdf_insn pattern.
4838 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4839 ;;; thumb_reorg with a memory reference.
4840 (define_insn "*thumb1_movdi_insn"
4841 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4842 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4843 "TARGET_THUMB1
4844 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4845 && ( register_operand (operands[0], DImode)
4846 || register_operand (operands[1], DImode))"
4847 "*
4848 {
4849 switch (which_alternative)
4850 {
4851 default:
4852 case 0:
4853 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4854 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4855 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4856 case 1:
4857 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4858 case 2:
4859 operands[1] = GEN_INT (- INTVAL (operands[1]));
4860 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4861 case 3:
4862 return \"ldmia\\t%1, {%0, %H0}\";
4863 case 4:
4864 return \"stmia\\t%0, {%1, %H1}\";
4865 case 5:
4866 return thumb_load_double_from_address (operands);
4867 case 6:
4868 operands[2] = gen_rtx_MEM (SImode,
4869 plus_constant (XEXP (operands[0], 0), 4));
4870 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4871 return \"\";
4872 case 7:
4873 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4874 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4875 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4876 }
4877 }"
4878 [(set_attr "length" "4,4,6,2,2,6,4,4")
4879 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4880 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4881 )
4882
4883 (define_expand "movsi"
4884 [(set (match_operand:SI 0 "general_operand" "")
4885 (match_operand:SI 1 "general_operand" ""))]
4886 "TARGET_EITHER"
4887 "
4888 {
4889 rtx base, offset, tmp;
4890
4891 if (TARGET_32BIT)
4892 {
4893 /* Everything except mem = const or mem = mem can be done easily. */
4894 if (GET_CODE (operands[0]) == MEM)
4895 operands[1] = force_reg (SImode, operands[1]);
4896 if (arm_general_register_operand (operands[0], SImode)
4897 && GET_CODE (operands[1]) == CONST_INT
4898 && !(const_ok_for_arm (INTVAL (operands[1]))
4899 || const_ok_for_arm (~INTVAL (operands[1]))))
4900 {
4901 arm_split_constant (SET, SImode, NULL_RTX,
4902 INTVAL (operands[1]), operands[0], NULL_RTX,
4903 optimize && can_create_pseudo_p ());
4904 DONE;
4905 }
4906
4907 if (TARGET_USE_MOVT && !target_word_relocations
4908 && GET_CODE (operands[1]) == SYMBOL_REF
4909 && !flag_pic && !arm_tls_referenced_p (operands[1]))
4910 {
4911 arm_emit_movpair (operands[0], operands[1]);
4912 DONE;
4913 }
4914 }
4915 else /* TARGET_THUMB1... */
4916 {
4917 if (can_create_pseudo_p ())
4918 {
4919 if (GET_CODE (operands[0]) != REG)
4920 operands[1] = force_reg (SImode, operands[1]);
4921 }
4922 }
4923
4924 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4925 {
4926 split_const (operands[1], &base, &offset);
4927 if (GET_CODE (base) == SYMBOL_REF
4928 && !offset_within_block_p (base, INTVAL (offset)))
4929 {
4930 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4931 emit_move_insn (tmp, base);
4932 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4933 DONE;
4934 }
4935 }
4936
4937 /* Recognize the case where operand[1] is a reference to thread-local
4938 data and load its address to a register. */
4939 if (arm_tls_referenced_p (operands[1]))
4940 {
4941 rtx tmp = operands[1];
4942 rtx addend = NULL;
4943
4944 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4945 {
4946 addend = XEXP (XEXP (tmp, 0), 1);
4947 tmp = XEXP (XEXP (tmp, 0), 0);
4948 }
4949
4950 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4951 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4952
4953 tmp = legitimize_tls_address (tmp,
4954 !can_create_pseudo_p () ? operands[0] : 0);
4955 if (addend)
4956 {
4957 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4958 tmp = force_operand (tmp, operands[0]);
4959 }
4960 operands[1] = tmp;
4961 }
4962 else if (flag_pic
4963 && (CONSTANT_P (operands[1])
4964 || symbol_mentioned_p (operands[1])
4965 || label_mentioned_p (operands[1])))
4966 operands[1] = legitimize_pic_address (operands[1], SImode,
4967 (!can_create_pseudo_p ()
4968 ? operands[0]
4969 : 0));
4970 }
4971 "
4972 )
4973
4974 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
4975 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
4976 ;; so this does not matter.
4977 (define_insn "*arm_movt"
4978 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4979 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
4980 (match_operand:SI 2 "general_operand" "i")))]
4981 "TARGET_32BIT"
4982 "movt%?\t%0, #:upper16:%c2"
4983 [(set_attr "predicable" "yes")
4984 (set_attr "length" "4")]
4985 )
4986
4987 (define_insn "*arm_movw"
4988 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
4989 (high:SI (match_operand:SI 1 "general_operand" "i")))]
4990 "TARGET_32BIT"
4991 "movw%?\t%0, #:lower16:%c1"
4992 [(set_attr "predicable" "yes")
4993 (set_attr "length" "4")]
4994 )
4995
4996 (define_insn "*arm_movsi_insn"
4997 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4998 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4999 "TARGET_ARM && ! TARGET_IWMMXT
5000 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5001 && ( register_operand (operands[0], SImode)
5002 || register_operand (operands[1], SImode))"
5003 "@
5004 mov%?\\t%0, %1
5005 mov%?\\t%0, %1
5006 mvn%?\\t%0, #%B1
5007 movw%?\\t%0, %1
5008 ldr%?\\t%0, %1
5009 str%?\\t%1, %0"
5010 [(set_attr "type" "*,*,*,*,load1,store1")
5011 (set_attr "predicable" "yes")
5012 (set_attr "pool_range" "*,*,*,*,4096,*")
5013 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5014 )
5015
5016 (define_split
5017 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5018 (match_operand:SI 1 "const_int_operand" ""))]
5019 "TARGET_32BIT
5020 && (!(const_ok_for_arm (INTVAL (operands[1]))
5021 || const_ok_for_arm (~INTVAL (operands[1]))))"
5022 [(clobber (const_int 0))]
5023 "
5024 arm_split_constant (SET, SImode, NULL_RTX,
5025 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5026 DONE;
5027 "
5028 )
5029
5030 (define_insn "*thumb1_movsi_insn"
5031 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5032 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5033 "TARGET_THUMB1
5034 && ( register_operand (operands[0], SImode)
5035 || register_operand (operands[1], SImode))"
5036 "@
5037 mov %0, %1
5038 mov %0, %1
5039 #
5040 #
5041 ldmia\\t%1, {%0}
5042 stmia\\t%0, {%1}
5043 ldr\\t%0, %1
5044 str\\t%1, %0
5045 mov\\t%0, %1"
5046 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5047 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5048 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5049 )
5050
5051 (define_split
5052 [(set (match_operand:SI 0 "register_operand" "")
5053 (match_operand:SI 1 "const_int_operand" ""))]
5054 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5055 [(set (match_dup 0) (match_dup 1))
5056 (set (match_dup 0) (neg:SI (match_dup 0)))]
5057 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5058 )
5059
5060 (define_split
5061 [(set (match_operand:SI 0 "register_operand" "")
5062 (match_operand:SI 1 "const_int_operand" ""))]
5063 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5064 [(set (match_dup 0) (match_dup 1))
5065 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5066 "
5067 {
5068 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
5069 unsigned HOST_WIDE_INT mask = 0xff;
5070 int i;
5071
5072 for (i = 0; i < 25; i++)
5073 if ((val & (mask << i)) == val)
5074 break;
5075
5076 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5077 if (i == 0)
5078 FAIL;
5079
5080 operands[1] = GEN_INT (val >> i);
5081 operands[2] = GEN_INT (i);
5082 }"
5083 )
5084
5085 ;; When generating pic, we need to load the symbol offset into a register.
5086 ;; So that the optimizer does not confuse this with a normal symbol load
5087 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5088 ;; since that is the only type of relocation we can use.
5089
5090 ;; The rather odd constraints on the following are to force reload to leave
5091 ;; the insn alone, and to force the minipool generation pass to then move
5092 ;; the GOT symbol to memory.
5093
5094 (define_insn "pic_load_addr_arm"
5095 [(set (match_operand:SI 0 "s_register_operand" "=r")
5096 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5097 "TARGET_ARM && flag_pic"
5098 "ldr%?\\t%0, %1"
5099 [(set_attr "type" "load1")
5100 (set (attr "pool_range") (const_int 4096))
5101 (set (attr "neg_pool_range") (const_int 4084))]
5102 )
5103
5104 (define_insn "pic_load_addr_thumb1"
5105 [(set (match_operand:SI 0 "s_register_operand" "=l")
5106 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5107 "TARGET_THUMB1 && flag_pic"
5108 "ldr\\t%0, %1"
5109 [(set_attr "type" "load1")
5110 (set (attr "pool_range") (const_int 1024))]
5111 )
5112
5113 (define_insn "pic_add_dot_plus_four"
5114 [(set (match_operand:SI 0 "register_operand" "=r")
5115 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5116 (const_int 4)
5117 (match_operand 2 "" "")]
5118 UNSPEC_PIC_BASE))]
5119 "TARGET_THUMB1"
5120 "*
5121 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5122 INTVAL (operands[2]));
5123 return \"add\\t%0, %|pc\";
5124 "
5125 [(set_attr "length" "2")]
5126 )
5127
5128 (define_insn "pic_add_dot_plus_eight"
5129 [(set (match_operand:SI 0 "register_operand" "=r")
5130 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5131 (const_int 8)
5132 (match_operand 2 "" "")]
5133 UNSPEC_PIC_BASE))]
5134 "TARGET_ARM"
5135 "*
5136 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5137 INTVAL (operands[2]));
5138 return \"add%?\\t%0, %|pc, %1\";
5139 "
5140 [(set_attr "predicable" "yes")]
5141 )
5142
5143 (define_insn "tls_load_dot_plus_eight"
5144 [(set (match_operand:SI 0 "register_operand" "+r")
5145 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5146 (const_int 8)
5147 (match_operand 2 "" "")]
5148 UNSPEC_PIC_BASE)))]
5149 "TARGET_ARM"
5150 "*
5151 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5152 INTVAL (operands[2]));
5153 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5154 "
5155 [(set_attr "predicable" "yes")]
5156 )
5157
5158 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5159 ;; followed by a load. These sequences can be crunched down to
5160 ;; tls_load_dot_plus_eight by a peephole.
5161
5162 (define_peephole2
5163 [(set (match_operand:SI 0 "register_operand" "")
5164 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5165 (const_int 8)
5166 (match_operand 1 "" "")]
5167 UNSPEC_PIC_BASE))
5168 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5169 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5170 [(set (match_dup 2)
5171 (mem:SI (unspec:SI [(match_dup 3)
5172 (const_int 8)
5173 (match_dup 1)]
5174 UNSPEC_PIC_BASE)))]
5175 ""
5176 )
5177
5178 (define_insn "pic_offset_arm"
5179 [(set (match_operand:SI 0 "register_operand" "=r")
5180 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5181 (unspec:SI [(match_operand:SI 2 "" "X")]
5182 UNSPEC_PIC_OFFSET))))]
5183 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5184 "ldr%?\\t%0, [%1,%2]"
5185 [(set_attr "type" "load1")]
5186 )
5187
5188 (define_expand "builtin_setjmp_receiver"
5189 [(label_ref (match_operand 0 "" ""))]
5190 "flag_pic"
5191 "
5192 {
5193 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5194 register. */
5195 if (arm_pic_register != INVALID_REGNUM)
5196 arm_load_pic_register (1UL << 3);
5197 DONE;
5198 }")
5199
5200 ;; If copying one reg to another we can set the condition codes according to
5201 ;; its value. Such a move is common after a return from subroutine and the
5202 ;; result is being tested against zero.
5203
5204 (define_insn "*movsi_compare0"
5205 [(set (reg:CC CC_REGNUM)
5206 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5207 (const_int 0)))
5208 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5209 (match_dup 1))]
5210 "TARGET_32BIT"
5211 "@
5212 cmp%?\\t%0, #0
5213 sub%.\\t%0, %1, #0"
5214 [(set_attr "conds" "set")]
5215 )
5216
5217 ;; Subroutine to store a half word from a register into memory.
5218 ;; Operand 0 is the source register (HImode)
5219 ;; Operand 1 is the destination address in a register (SImode)
5220
5221 ;; In both this routine and the next, we must be careful not to spill
5222 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5223 ;; can generate unrecognizable rtl.
5224
5225 (define_expand "storehi"
5226 [;; store the low byte
5227 (set (match_operand 1 "" "") (match_dup 3))
5228 ;; extract the high byte
5229 (set (match_dup 2)
5230 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5231 ;; store the high byte
5232 (set (match_dup 4) (match_dup 5))]
5233 "TARGET_ARM"
5234 "
5235 {
5236 rtx op1 = operands[1];
5237 rtx addr = XEXP (op1, 0);
5238 enum rtx_code code = GET_CODE (addr);
5239
5240 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5241 || code == MINUS)
5242 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5243
5244 operands[4] = adjust_address (op1, QImode, 1);
5245 operands[1] = adjust_address (operands[1], QImode, 0);
5246 operands[3] = gen_lowpart (QImode, operands[0]);
5247 operands[0] = gen_lowpart (SImode, operands[0]);
5248 operands[2] = gen_reg_rtx (SImode);
5249 operands[5] = gen_lowpart (QImode, operands[2]);
5250 }"
5251 )
5252
5253 (define_expand "storehi_bigend"
5254 [(set (match_dup 4) (match_dup 3))
5255 (set (match_dup 2)
5256 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5257 (set (match_operand 1 "" "") (match_dup 5))]
5258 "TARGET_ARM"
5259 "
5260 {
5261 rtx op1 = operands[1];
5262 rtx addr = XEXP (op1, 0);
5263 enum rtx_code code = GET_CODE (addr);
5264
5265 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5266 || code == MINUS)
5267 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5268
5269 operands[4] = adjust_address (op1, QImode, 1);
5270 operands[1] = adjust_address (operands[1], QImode, 0);
5271 operands[3] = gen_lowpart (QImode, operands[0]);
5272 operands[0] = gen_lowpart (SImode, operands[0]);
5273 operands[2] = gen_reg_rtx (SImode);
5274 operands[5] = gen_lowpart (QImode, operands[2]);
5275 }"
5276 )
5277
5278 ;; Subroutine to store a half word integer constant into memory.
5279 (define_expand "storeinthi"
5280 [(set (match_operand 0 "" "")
5281 (match_operand 1 "" ""))
5282 (set (match_dup 3) (match_dup 2))]
5283 "TARGET_ARM"
5284 "
5285 {
5286 HOST_WIDE_INT value = INTVAL (operands[1]);
5287 rtx addr = XEXP (operands[0], 0);
5288 rtx op0 = operands[0];
5289 enum rtx_code code = GET_CODE (addr);
5290
5291 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5292 || code == MINUS)
5293 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5294
5295 operands[1] = gen_reg_rtx (SImode);
5296 if (BYTES_BIG_ENDIAN)
5297 {
5298 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5299 if ((value & 255) == ((value >> 8) & 255))
5300 operands[2] = operands[1];
5301 else
5302 {
5303 operands[2] = gen_reg_rtx (SImode);
5304 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5305 }
5306 }
5307 else
5308 {
5309 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5310 if ((value & 255) == ((value >> 8) & 255))
5311 operands[2] = operands[1];
5312 else
5313 {
5314 operands[2] = gen_reg_rtx (SImode);
5315 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5316 }
5317 }
5318
5319 operands[3] = adjust_address (op0, QImode, 1);
5320 operands[0] = adjust_address (operands[0], QImode, 0);
5321 operands[2] = gen_lowpart (QImode, operands[2]);
5322 operands[1] = gen_lowpart (QImode, operands[1]);
5323 }"
5324 )
5325
5326 (define_expand "storehi_single_op"
5327 [(set (match_operand:HI 0 "memory_operand" "")
5328 (match_operand:HI 1 "general_operand" ""))]
5329 "TARGET_32BIT && arm_arch4"
5330 "
5331 if (!s_register_operand (operands[1], HImode))
5332 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5333 "
5334 )
5335
5336 (define_expand "movhi"
5337 [(set (match_operand:HI 0 "general_operand" "")
5338 (match_operand:HI 1 "general_operand" ""))]
5339 "TARGET_EITHER"
5340 "
5341 if (TARGET_ARM)
5342 {
5343 if (can_create_pseudo_p ())
5344 {
5345 if (GET_CODE (operands[0]) == MEM)
5346 {
5347 if (arm_arch4)
5348 {
5349 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5350 DONE;
5351 }
5352 if (GET_CODE (operands[1]) == CONST_INT)
5353 emit_insn (gen_storeinthi (operands[0], operands[1]));
5354 else
5355 {
5356 if (GET_CODE (operands[1]) == MEM)
5357 operands[1] = force_reg (HImode, operands[1]);
5358 if (BYTES_BIG_ENDIAN)
5359 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5360 else
5361 emit_insn (gen_storehi (operands[1], operands[0]));
5362 }
5363 DONE;
5364 }
5365 /* Sign extend a constant, and keep it in an SImode reg. */
5366 else if (GET_CODE (operands[1]) == CONST_INT)
5367 {
5368 rtx reg = gen_reg_rtx (SImode);
5369 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5370
5371 /* If the constant is already valid, leave it alone. */
5372 if (!const_ok_for_arm (val))
5373 {
5374 /* If setting all the top bits will make the constant
5375 loadable in a single instruction, then set them.
5376 Otherwise, sign extend the number. */
5377
5378 if (const_ok_for_arm (~(val | ~0xffff)))
5379 val |= ~0xffff;
5380 else if (val & 0x8000)
5381 val |= ~0xffff;
5382 }
5383
5384 emit_insn (gen_movsi (reg, GEN_INT (val)));
5385 operands[1] = gen_lowpart (HImode, reg);
5386 }
5387 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5388 && GET_CODE (operands[1]) == MEM)
5389 {
5390 rtx reg = gen_reg_rtx (SImode);
5391
5392 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5393 operands[1] = gen_lowpart (HImode, reg);
5394 }
5395 else if (!arm_arch4)
5396 {
5397 if (GET_CODE (operands[1]) == MEM)
5398 {
5399 rtx base;
5400 rtx offset = const0_rtx;
5401 rtx reg = gen_reg_rtx (SImode);
5402
5403 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5404 || (GET_CODE (base) == PLUS
5405 && (GET_CODE (offset = XEXP (base, 1))
5406 == CONST_INT)
5407 && ((INTVAL(offset) & 1) != 1)
5408 && GET_CODE (base = XEXP (base, 0)) == REG))
5409 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5410 {
5411 rtx new_rtx;
5412
5413 new_rtx = widen_memory_access (operands[1], SImode,
5414 ((INTVAL (offset) & ~3)
5415 - INTVAL (offset)));
5416 emit_insn (gen_movsi (reg, new_rtx));
5417 if (((INTVAL (offset) & 2) != 0)
5418 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5419 {
5420 rtx reg2 = gen_reg_rtx (SImode);
5421
5422 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5423 reg = reg2;
5424 }
5425 }
5426 else
5427 emit_insn (gen_movhi_bytes (reg, operands[1]));
5428
5429 operands[1] = gen_lowpart (HImode, reg);
5430 }
5431 }
5432 }
5433 /* Handle loading a large integer during reload. */
5434 else if (GET_CODE (operands[1]) == CONST_INT
5435 && !const_ok_for_arm (INTVAL (operands[1]))
5436 && !const_ok_for_arm (~INTVAL (operands[1])))
5437 {
5438 /* Writing a constant to memory needs a scratch, which should
5439 be handled with SECONDARY_RELOADs. */
5440 gcc_assert (GET_CODE (operands[0]) == REG);
5441
5442 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5443 emit_insn (gen_movsi (operands[0], operands[1]));
5444 DONE;
5445 }
5446 }
5447 else if (TARGET_THUMB2)
5448 {
5449 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5450 if (can_create_pseudo_p ())
5451 {
5452 if (GET_CODE (operands[0]) != REG)
5453 operands[1] = force_reg (HImode, operands[1]);
5454 /* Zero extend a constant, and keep it in an SImode reg. */
5455 else if (GET_CODE (operands[1]) == CONST_INT)
5456 {
5457 rtx reg = gen_reg_rtx (SImode);
5458 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5459
5460 emit_insn (gen_movsi (reg, GEN_INT (val)));
5461 operands[1] = gen_lowpart (HImode, reg);
5462 }
5463 }
5464 }
5465 else /* TARGET_THUMB1 */
5466 {
5467 if (can_create_pseudo_p ())
5468 {
5469 if (GET_CODE (operands[1]) == CONST_INT)
5470 {
5471 rtx reg = gen_reg_rtx (SImode);
5472
5473 emit_insn (gen_movsi (reg, operands[1]));
5474 operands[1] = gen_lowpart (HImode, reg);
5475 }
5476
5477 /* ??? We shouldn't really get invalid addresses here, but this can
5478 happen if we are passed a SP (never OK for HImode/QImode) or
5479 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5480 HImode/QImode) relative address. */
5481 /* ??? This should perhaps be fixed elsewhere, for instance, in
5482 fixup_stack_1, by checking for other kinds of invalid addresses,
5483 e.g. a bare reference to a virtual register. This may confuse the
5484 alpha though, which must handle this case differently. */
5485 if (GET_CODE (operands[0]) == MEM
5486 && !memory_address_p (GET_MODE (operands[0]),
5487 XEXP (operands[0], 0)))
5488 operands[0]
5489 = replace_equiv_address (operands[0],
5490 copy_to_reg (XEXP (operands[0], 0)));
5491
5492 if (GET_CODE (operands[1]) == MEM
5493 && !memory_address_p (GET_MODE (operands[1]),
5494 XEXP (operands[1], 0)))
5495 operands[1]
5496 = replace_equiv_address (operands[1],
5497 copy_to_reg (XEXP (operands[1], 0)));
5498
5499 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5500 {
5501 rtx reg = gen_reg_rtx (SImode);
5502
5503 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5504 operands[1] = gen_lowpart (HImode, reg);
5505 }
5506
5507 if (GET_CODE (operands[0]) == MEM)
5508 operands[1] = force_reg (HImode, operands[1]);
5509 }
5510 else if (GET_CODE (operands[1]) == CONST_INT
5511 && !satisfies_constraint_I (operands[1]))
5512 {
5513 /* Handle loading a large integer during reload. */
5514
5515 /* Writing a constant to memory needs a scratch, which should
5516 be handled with SECONDARY_RELOADs. */
5517 gcc_assert (GET_CODE (operands[0]) == REG);
5518
5519 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5520 emit_insn (gen_movsi (operands[0], operands[1]));
5521 DONE;
5522 }
5523 }
5524 "
5525 )
5526
5527 (define_insn "*thumb1_movhi_insn"
5528 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5529 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5530 "TARGET_THUMB1
5531 && ( register_operand (operands[0], HImode)
5532 || register_operand (operands[1], HImode))"
5533 "*
5534 switch (which_alternative)
5535 {
5536 case 0: return \"add %0, %1, #0\";
5537 case 2: return \"strh %1, %0\";
5538 case 3: return \"mov %0, %1\";
5539 case 4: return \"mov %0, %1\";
5540 case 5: return \"mov %0, %1\";
5541 default: gcc_unreachable ();
5542 case 1:
5543 /* The stack pointer can end up being taken as an index register.
5544 Catch this case here and deal with it. */
5545 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5546 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5547 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5548 {
5549 rtx ops[2];
5550 ops[0] = operands[0];
5551 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5552
5553 output_asm_insn (\"mov %0, %1\", ops);
5554
5555 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5556
5557 }
5558 return \"ldrh %0, %1\";
5559 }"
5560 [(set_attr "length" "2,4,2,2,2,2")
5561 (set_attr "type" "*,load1,store1,*,*,*")]
5562 )
5563
5564
5565 (define_expand "movhi_bytes"
5566 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5567 (set (match_dup 3)
5568 (zero_extend:SI (match_dup 6)))
5569 (set (match_operand:SI 0 "" "")
5570 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5571 "TARGET_ARM"
5572 "
5573 {
5574 rtx mem1, mem2;
5575 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5576
5577 mem1 = change_address (operands[1], QImode, addr);
5578 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5579 operands[0] = gen_lowpart (SImode, operands[0]);
5580 operands[1] = mem1;
5581 operands[2] = gen_reg_rtx (SImode);
5582 operands[3] = gen_reg_rtx (SImode);
5583 operands[6] = mem2;
5584
5585 if (BYTES_BIG_ENDIAN)
5586 {
5587 operands[4] = operands[2];
5588 operands[5] = operands[3];
5589 }
5590 else
5591 {
5592 operands[4] = operands[3];
5593 operands[5] = operands[2];
5594 }
5595 }"
5596 )
5597
5598 (define_expand "movhi_bigend"
5599 [(set (match_dup 2)
5600 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5601 (const_int 16)))
5602 (set (match_dup 3)
5603 (ashiftrt:SI (match_dup 2) (const_int 16)))
5604 (set (match_operand:HI 0 "s_register_operand" "")
5605 (match_dup 4))]
5606 "TARGET_ARM"
5607 "
5608 operands[2] = gen_reg_rtx (SImode);
5609 operands[3] = gen_reg_rtx (SImode);
5610 operands[4] = gen_lowpart (HImode, operands[3]);
5611 "
5612 )
5613
5614 ;; Pattern to recognize insn generated default case above
5615 (define_insn "*movhi_insn_arch4"
5616 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5617 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5618 "TARGET_ARM
5619 && arm_arch4
5620 && (GET_CODE (operands[1]) != CONST_INT
5621 || const_ok_for_arm (INTVAL (operands[1]))
5622 || const_ok_for_arm (~INTVAL (operands[1])))"
5623 "@
5624 mov%?\\t%0, %1\\t%@ movhi
5625 mvn%?\\t%0, #%B1\\t%@ movhi
5626 str%(h%)\\t%1, %0\\t%@ movhi
5627 ldr%(h%)\\t%0, %1\\t%@ movhi"
5628 [(set_attr "type" "*,*,store1,load1")
5629 (set_attr "predicable" "yes")
5630 (set_attr "pool_range" "*,*,*,256")
5631 (set_attr "neg_pool_range" "*,*,*,244")]
5632 )
5633
5634 (define_insn "*movhi_bytes"
5635 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5636 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5637 "TARGET_ARM"
5638 "@
5639 mov%?\\t%0, %1\\t%@ movhi
5640 mvn%?\\t%0, #%B1\\t%@ movhi"
5641 [(set_attr "predicable" "yes")]
5642 )
5643
5644 (define_expand "thumb_movhi_clobber"
5645 [(set (match_operand:HI 0 "memory_operand" "")
5646 (match_operand:HI 1 "register_operand" ""))
5647 (clobber (match_operand:DI 2 "register_operand" ""))]
5648 "TARGET_THUMB1"
5649 "
5650 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5651 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5652 {
5653 emit_insn (gen_movhi (operands[0], operands[1]));
5654 DONE;
5655 }
5656 /* XXX Fixme, need to handle other cases here as well. */
5657 gcc_unreachable ();
5658 "
5659 )
5660
5661 ;; We use a DImode scratch because we may occasionally need an additional
5662 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5663 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5664 (define_expand "reload_outhi"
5665 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5666 (match_operand:HI 1 "s_register_operand" "r")
5667 (match_operand:DI 2 "s_register_operand" "=&l")])]
5668 "TARGET_EITHER"
5669 "if (TARGET_ARM)
5670 arm_reload_out_hi (operands);
5671 else
5672 thumb_reload_out_hi (operands);
5673 DONE;
5674 "
5675 )
5676
5677 (define_expand "reload_inhi"
5678 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5679 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5680 (match_operand:DI 2 "s_register_operand" "=&r")])]
5681 "TARGET_EITHER"
5682 "
5683 if (TARGET_ARM)
5684 arm_reload_in_hi (operands);
5685 else
5686 thumb_reload_out_hi (operands);
5687 DONE;
5688 ")
5689
5690 (define_expand "movqi"
5691 [(set (match_operand:QI 0 "general_operand" "")
5692 (match_operand:QI 1 "general_operand" ""))]
5693 "TARGET_EITHER"
5694 "
5695 /* Everything except mem = const or mem = mem can be done easily */
5696
5697 if (can_create_pseudo_p ())
5698 {
5699 if (GET_CODE (operands[1]) == CONST_INT)
5700 {
5701 rtx reg = gen_reg_rtx (SImode);
5702
5703 emit_insn (gen_movsi (reg, operands[1]));
5704 operands[1] = gen_lowpart (QImode, reg);
5705 }
5706
5707 if (TARGET_THUMB)
5708 {
5709 /* ??? We shouldn't really get invalid addresses here, but this can
5710 happen if we are passed a SP (never OK for HImode/QImode) or
5711 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5712 HImode/QImode) relative address. */
5713 /* ??? This should perhaps be fixed elsewhere, for instance, in
5714 fixup_stack_1, by checking for other kinds of invalid addresses,
5715 e.g. a bare reference to a virtual register. This may confuse the
5716 alpha though, which must handle this case differently. */
5717 if (GET_CODE (operands[0]) == MEM
5718 && !memory_address_p (GET_MODE (operands[0]),
5719 XEXP (operands[0], 0)))
5720 operands[0]
5721 = replace_equiv_address (operands[0],
5722 copy_to_reg (XEXP (operands[0], 0)));
5723 if (GET_CODE (operands[1]) == MEM
5724 && !memory_address_p (GET_MODE (operands[1]),
5725 XEXP (operands[1], 0)))
5726 operands[1]
5727 = replace_equiv_address (operands[1],
5728 copy_to_reg (XEXP (operands[1], 0)));
5729 }
5730
5731 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5732 {
5733 rtx reg = gen_reg_rtx (SImode);
5734
5735 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5736 operands[1] = gen_lowpart (QImode, reg);
5737 }
5738
5739 if (GET_CODE (operands[0]) == MEM)
5740 operands[1] = force_reg (QImode, operands[1]);
5741 }
5742 else if (TARGET_THUMB
5743 && GET_CODE (operands[1]) == CONST_INT
5744 && !satisfies_constraint_I (operands[1]))
5745 {
5746 /* Handle loading a large integer during reload. */
5747
5748 /* Writing a constant to memory needs a scratch, which should
5749 be handled with SECONDARY_RELOADs. */
5750 gcc_assert (GET_CODE (operands[0]) == REG);
5751
5752 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5753 emit_insn (gen_movsi (operands[0], operands[1]));
5754 DONE;
5755 }
5756 "
5757 )
5758
5759
5760 (define_insn "*arm_movqi_insn"
5761 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5762 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5763 "TARGET_32BIT
5764 && ( register_operand (operands[0], QImode)
5765 || register_operand (operands[1], QImode))"
5766 "@
5767 mov%?\\t%0, %1
5768 mvn%?\\t%0, #%B1
5769 ldr%(b%)\\t%0, %1
5770 str%(b%)\\t%1, %0"
5771 [(set_attr "type" "*,*,load1,store1")
5772 (set_attr "predicable" "yes")]
5773 )
5774
5775 (define_insn "*thumb1_movqi_insn"
5776 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5777 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5778 "TARGET_THUMB1
5779 && ( register_operand (operands[0], QImode)
5780 || register_operand (operands[1], QImode))"
5781 "@
5782 add\\t%0, %1, #0
5783 ldrb\\t%0, %1
5784 strb\\t%1, %0
5785 mov\\t%0, %1
5786 mov\\t%0, %1
5787 mov\\t%0, %1"
5788 [(set_attr "length" "2")
5789 (set_attr "type" "*,load1,store1,*,*,*")
5790 (set_attr "pool_range" "*,32,*,*,*,*")]
5791 )
5792
5793 (define_expand "movsf"
5794 [(set (match_operand:SF 0 "general_operand" "")
5795 (match_operand:SF 1 "general_operand" ""))]
5796 "TARGET_EITHER"
5797 "
5798 if (TARGET_32BIT)
5799 {
5800 if (GET_CODE (operands[0]) == MEM)
5801 operands[1] = force_reg (SFmode, operands[1]);
5802 }
5803 else /* TARGET_THUMB1 */
5804 {
5805 if (can_create_pseudo_p ())
5806 {
5807 if (GET_CODE (operands[0]) != REG)
5808 operands[1] = force_reg (SFmode, operands[1]);
5809 }
5810 }
5811 "
5812 )
5813
5814 ;; Transform a floating-point move of a constant into a core register into
5815 ;; an SImode operation.
5816 (define_split
5817 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5818 (match_operand:SF 1 "immediate_operand" ""))]
5819 "TARGET_32BIT
5820 && reload_completed
5821 && GET_CODE (operands[1]) == CONST_DOUBLE"
5822 [(set (match_dup 2) (match_dup 3))]
5823 "
5824 operands[2] = gen_lowpart (SImode, operands[0]);
5825 operands[3] = gen_lowpart (SImode, operands[1]);
5826 if (operands[2] == 0 || operands[3] == 0)
5827 FAIL;
5828 "
5829 )
5830
5831 (define_insn "*arm_movsf_soft_insn"
5832 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5833 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5834 "TARGET_ARM
5835 && TARGET_SOFT_FLOAT
5836 && (GET_CODE (operands[0]) != MEM
5837 || register_operand (operands[1], SFmode))"
5838 "@
5839 mov%?\\t%0, %1
5840 ldr%?\\t%0, %1\\t%@ float
5841 str%?\\t%1, %0\\t%@ float"
5842 [(set_attr "length" "4,4,4")
5843 (set_attr "predicable" "yes")
5844 (set_attr "type" "*,load1,store1")
5845 (set_attr "pool_range" "*,4096,*")
5846 (set_attr "neg_pool_range" "*,4084,*")]
5847 )
5848
5849 ;;; ??? This should have alternatives for constants.
5850 (define_insn "*thumb1_movsf_insn"
5851 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5852 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5853 "TARGET_THUMB1
5854 && ( register_operand (operands[0], SFmode)
5855 || register_operand (operands[1], SFmode))"
5856 "@
5857 add\\t%0, %1, #0
5858 ldmia\\t%1, {%0}
5859 stmia\\t%0, {%1}
5860 ldr\\t%0, %1
5861 str\\t%1, %0
5862 mov\\t%0, %1
5863 mov\\t%0, %1"
5864 [(set_attr "length" "2")
5865 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5866 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5867 )
5868
5869 (define_expand "movdf"
5870 [(set (match_operand:DF 0 "general_operand" "")
5871 (match_operand:DF 1 "general_operand" ""))]
5872 "TARGET_EITHER"
5873 "
5874 if (TARGET_32BIT)
5875 {
5876 if (GET_CODE (operands[0]) == MEM)
5877 operands[1] = force_reg (DFmode, operands[1]);
5878 }
5879 else /* TARGET_THUMB */
5880 {
5881 if (can_create_pseudo_p ())
5882 {
5883 if (GET_CODE (operands[0]) != REG)
5884 operands[1] = force_reg (DFmode, operands[1]);
5885 }
5886 }
5887 "
5888 )
5889
5890 ;; Reloading a df mode value stored in integer regs to memory can require a
5891 ;; scratch reg.
5892 (define_expand "reload_outdf"
5893 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5894 (match_operand:DF 1 "s_register_operand" "r")
5895 (match_operand:SI 2 "s_register_operand" "=&r")]
5896 "TARGET_32BIT"
5897 "
5898 {
5899 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5900
5901 if (code == REG)
5902 operands[2] = XEXP (operands[0], 0);
5903 else if (code == POST_INC || code == PRE_DEC)
5904 {
5905 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5906 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5907 emit_insn (gen_movdi (operands[0], operands[1]));
5908 DONE;
5909 }
5910 else if (code == PRE_INC)
5911 {
5912 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5913
5914 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5915 operands[2] = reg;
5916 }
5917 else if (code == POST_DEC)
5918 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5919 else
5920 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5921 XEXP (XEXP (operands[0], 0), 1)));
5922
5923 emit_insn (gen_rtx_SET (VOIDmode,
5924 replace_equiv_address (operands[0], operands[2]),
5925 operands[1]));
5926
5927 if (code == POST_DEC)
5928 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5929
5930 DONE;
5931 }"
5932 )
5933
5934 (define_insn "*movdf_soft_insn"
5935 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5936 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5937 "TARGET_ARM && TARGET_SOFT_FLOAT
5938 && ( register_operand (operands[0], DFmode)
5939 || register_operand (operands[1], DFmode))"
5940 "*
5941 switch (which_alternative)
5942 {
5943 case 0:
5944 case 1:
5945 case 2:
5946 return \"#\";
5947 default:
5948 return output_move_double (operands);
5949 }
5950 "
5951 [(set_attr "length" "8,12,16,8,8")
5952 (set_attr "type" "*,*,*,load2,store2")
5953 (set_attr "pool_range" "1020")
5954 (set_attr "neg_pool_range" "1008")]
5955 )
5956
5957 ;;; ??? This should have alternatives for constants.
5958 ;;; ??? This was originally identical to the movdi_insn pattern.
5959 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5960 ;;; thumb_reorg with a memory reference.
5961 (define_insn "*thumb_movdf_insn"
5962 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5963 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5964 "TARGET_THUMB1
5965 && ( register_operand (operands[0], DFmode)
5966 || register_operand (operands[1], DFmode))"
5967 "*
5968 switch (which_alternative)
5969 {
5970 default:
5971 case 0:
5972 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5973 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5974 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5975 case 1:
5976 return \"ldmia\\t%1, {%0, %H0}\";
5977 case 2:
5978 return \"stmia\\t%0, {%1, %H1}\";
5979 case 3:
5980 return thumb_load_double_from_address (operands);
5981 case 4:
5982 operands[2] = gen_rtx_MEM (SImode,
5983 plus_constant (XEXP (operands[0], 0), 4));
5984 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5985 return \"\";
5986 case 5:
5987 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5988 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5989 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5990 }
5991 "
5992 [(set_attr "length" "4,2,2,6,4,4")
5993 (set_attr "type" "*,load2,store2,load2,store2,*")
5994 (set_attr "pool_range" "*,*,*,1020,*,*")]
5995 )
5996
5997 (define_expand "movxf"
5998 [(set (match_operand:XF 0 "general_operand" "")
5999 (match_operand:XF 1 "general_operand" ""))]
6000 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6001 "
6002 if (GET_CODE (operands[0]) == MEM)
6003 operands[1] = force_reg (XFmode, operands[1]);
6004 "
6005 )
6006
6007 \f
6008
6009 ;; load- and store-multiple insns
6010 ;; The arm can load/store any set of registers, provided that they are in
6011 ;; ascending order; but that is beyond GCC so stick with what it knows.
6012
6013 (define_expand "load_multiple"
6014 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6015 (match_operand:SI 1 "" ""))
6016 (use (match_operand:SI 2 "" ""))])]
6017 "TARGET_32BIT"
6018 {
6019 HOST_WIDE_INT offset = 0;
6020
6021 /* Support only fixed point registers. */
6022 if (GET_CODE (operands[2]) != CONST_INT
6023 || INTVAL (operands[2]) > 14
6024 || INTVAL (operands[2]) < 2
6025 || GET_CODE (operands[1]) != MEM
6026 || GET_CODE (operands[0]) != REG
6027 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6028 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6029 FAIL;
6030
6031 operands[3]
6032 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6033 force_reg (SImode, XEXP (operands[1], 0)),
6034 TRUE, FALSE, operands[1], &offset);
6035 })
6036
6037 ;; Load multiple with write-back
6038
6039 (define_insn "*ldmsi_postinc4"
6040 [(match_parallel 0 "load_multiple_operation"
6041 [(set (match_operand:SI 1 "s_register_operand" "=r")
6042 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6043 (const_int 16)))
6044 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6045 (mem:SI (match_dup 2)))
6046 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6047 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6048 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6049 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6050 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6051 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6052 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6053 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6054 [(set_attr "type" "load4")
6055 (set_attr "predicable" "yes")]
6056 )
6057
6058 (define_insn "*ldmsi_postinc4_thumb1"
6059 [(match_parallel 0 "load_multiple_operation"
6060 [(set (match_operand:SI 1 "s_register_operand" "=l")
6061 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6062 (const_int 16)))
6063 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6064 (mem:SI (match_dup 2)))
6065 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6066 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6067 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6068 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6069 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6070 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6071 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6072 "ldmia\\t%1!, {%3, %4, %5, %6}"
6073 [(set_attr "type" "load4")]
6074 )
6075
6076 (define_insn "*ldmsi_postinc3"
6077 [(match_parallel 0 "load_multiple_operation"
6078 [(set (match_operand:SI 1 "s_register_operand" "=r")
6079 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6080 (const_int 12)))
6081 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6082 (mem:SI (match_dup 2)))
6083 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6084 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6085 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6086 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6087 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6088 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6089 [(set_attr "type" "load3")
6090 (set_attr "predicable" "yes")]
6091 )
6092
6093 (define_insn "*ldmsi_postinc2"
6094 [(match_parallel 0 "load_multiple_operation"
6095 [(set (match_operand:SI 1 "s_register_operand" "=r")
6096 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6097 (const_int 8)))
6098 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6099 (mem:SI (match_dup 2)))
6100 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6101 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6102 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6103 "ldm%(ia%)\\t%1!, {%3, %4}"
6104 [(set_attr "type" "load2")
6105 (set_attr "predicable" "yes")]
6106 )
6107
6108 ;; Ordinary load multiple
6109
6110 (define_insn "*ldmsi4"
6111 [(match_parallel 0 "load_multiple_operation"
6112 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6113 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6114 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6115 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6116 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6117 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6118 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6119 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6120 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6121 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6122 [(set_attr "type" "load4")
6123 (set_attr "predicable" "yes")]
6124 )
6125
6126 (define_insn "*ldmsi3"
6127 [(match_parallel 0 "load_multiple_operation"
6128 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6129 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6130 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6131 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6132 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6133 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6134 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6135 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6136 [(set_attr "type" "load3")
6137 (set_attr "predicable" "yes")]
6138 )
6139
6140 (define_insn "*ldmsi2"
6141 [(match_parallel 0 "load_multiple_operation"
6142 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6143 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6144 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6145 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6146 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6147 "ldm%(ia%)\\t%1, {%2, %3}"
6148 [(set_attr "type" "load2")
6149 (set_attr "predicable" "yes")]
6150 )
6151
6152 (define_expand "store_multiple"
6153 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6154 (match_operand:SI 1 "" ""))
6155 (use (match_operand:SI 2 "" ""))])]
6156 "TARGET_32BIT"
6157 {
6158 HOST_WIDE_INT offset = 0;
6159
6160 /* Support only fixed point registers. */
6161 if (GET_CODE (operands[2]) != CONST_INT
6162 || INTVAL (operands[2]) > 14
6163 || INTVAL (operands[2]) < 2
6164 || GET_CODE (operands[1]) != REG
6165 || GET_CODE (operands[0]) != MEM
6166 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6167 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6168 FAIL;
6169
6170 operands[3]
6171 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6172 force_reg (SImode, XEXP (operands[0], 0)),
6173 TRUE, FALSE, operands[0], &offset);
6174 })
6175
6176 ;; Store multiple with write-back
6177
6178 (define_insn "*stmsi_postinc4"
6179 [(match_parallel 0 "store_multiple_operation"
6180 [(set (match_operand:SI 1 "s_register_operand" "=r")
6181 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6182 (const_int 16)))
6183 (set (mem:SI (match_dup 2))
6184 (match_operand:SI 3 "arm_hard_register_operand" ""))
6185 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6186 (match_operand:SI 4 "arm_hard_register_operand" ""))
6187 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6188 (match_operand:SI 5 "arm_hard_register_operand" ""))
6189 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6190 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6191 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6192 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6193 [(set_attr "predicable" "yes")
6194 (set_attr "type" "store4")]
6195 )
6196
6197 (define_insn "*stmsi_postinc4_thumb1"
6198 [(match_parallel 0 "store_multiple_operation"
6199 [(set (match_operand:SI 1 "s_register_operand" "=l")
6200 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6201 (const_int 16)))
6202 (set (mem:SI (match_dup 2))
6203 (match_operand:SI 3 "arm_hard_register_operand" ""))
6204 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6205 (match_operand:SI 4 "arm_hard_register_operand" ""))
6206 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6207 (match_operand:SI 5 "arm_hard_register_operand" ""))
6208 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6209 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6210 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6211 "stmia\\t%1!, {%3, %4, %5, %6}"
6212 [(set_attr "type" "store4")]
6213 )
6214
6215 (define_insn "*stmsi_postinc3"
6216 [(match_parallel 0 "store_multiple_operation"
6217 [(set (match_operand:SI 1 "s_register_operand" "=r")
6218 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6219 (const_int 12)))
6220 (set (mem:SI (match_dup 2))
6221 (match_operand:SI 3 "arm_hard_register_operand" ""))
6222 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6223 (match_operand:SI 4 "arm_hard_register_operand" ""))
6224 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6225 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6226 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6227 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6228 [(set_attr "predicable" "yes")
6229 (set_attr "type" "store3")]
6230 )
6231
6232 (define_insn "*stmsi_postinc2"
6233 [(match_parallel 0 "store_multiple_operation"
6234 [(set (match_operand:SI 1 "s_register_operand" "=r")
6235 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6236 (const_int 8)))
6237 (set (mem:SI (match_dup 2))
6238 (match_operand:SI 3 "arm_hard_register_operand" ""))
6239 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6240 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6241 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6242 "stm%(ia%)\\t%1!, {%3, %4}"
6243 [(set_attr "predicable" "yes")
6244 (set_attr "type" "store2")]
6245 )
6246
6247 ;; Ordinary store multiple
6248
6249 (define_insn "*stmsi4"
6250 [(match_parallel 0 "store_multiple_operation"
6251 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6252 (match_operand:SI 2 "arm_hard_register_operand" ""))
6253 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6254 (match_operand:SI 3 "arm_hard_register_operand" ""))
6255 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6256 (match_operand:SI 4 "arm_hard_register_operand" ""))
6257 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6258 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6259 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6260 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6261 [(set_attr "predicable" "yes")
6262 (set_attr "type" "store4")]
6263 )
6264
6265 (define_insn "*stmsi3"
6266 [(match_parallel 0 "store_multiple_operation"
6267 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6268 (match_operand:SI 2 "arm_hard_register_operand" ""))
6269 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6270 (match_operand:SI 3 "arm_hard_register_operand" ""))
6271 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6272 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6273 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6274 "stm%(ia%)\\t%1, {%2, %3, %4}"
6275 [(set_attr "predicable" "yes")
6276 (set_attr "type" "store3")]
6277 )
6278
6279 (define_insn "*stmsi2"
6280 [(match_parallel 0 "store_multiple_operation"
6281 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6282 (match_operand:SI 2 "arm_hard_register_operand" ""))
6283 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6284 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6285 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6286 "stm%(ia%)\\t%1, {%2, %3}"
6287 [(set_attr "predicable" "yes")
6288 (set_attr "type" "store2")]
6289 )
6290
6291 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6292 ;; We could let this apply for blocks of less than this, but it clobbers so
6293 ;; many registers that there is then probably a better way.
6294
6295 (define_expand "movmemqi"
6296 [(match_operand:BLK 0 "general_operand" "")
6297 (match_operand:BLK 1 "general_operand" "")
6298 (match_operand:SI 2 "const_int_operand" "")
6299 (match_operand:SI 3 "const_int_operand" "")]
6300 "TARGET_EITHER"
6301 "
6302 if (TARGET_32BIT)
6303 {
6304 if (arm_gen_movmemqi (operands))
6305 DONE;
6306 FAIL;
6307 }
6308 else /* TARGET_THUMB1 */
6309 {
6310 if ( INTVAL (operands[3]) != 4
6311 || INTVAL (operands[2]) > 48)
6312 FAIL;
6313
6314 thumb_expand_movmemqi (operands);
6315 DONE;
6316 }
6317 "
6318 )
6319
6320 ;; Thumb block-move insns
6321
6322 (define_insn "movmem12b"
6323 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6324 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6325 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6326 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6327 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6328 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6329 (set (match_operand:SI 0 "register_operand" "=l")
6330 (plus:SI (match_dup 2) (const_int 12)))
6331 (set (match_operand:SI 1 "register_operand" "=l")
6332 (plus:SI (match_dup 3) (const_int 12)))
6333 (clobber (match_scratch:SI 4 "=&l"))
6334 (clobber (match_scratch:SI 5 "=&l"))
6335 (clobber (match_scratch:SI 6 "=&l"))]
6336 "TARGET_THUMB1"
6337 "* return thumb_output_move_mem_multiple (3, operands);"
6338 [(set_attr "length" "4")
6339 ; This isn't entirely accurate... It loads as well, but in terms of
6340 ; scheduling the following insn it is better to consider it as a store
6341 (set_attr "type" "store3")]
6342 )
6343
6344 (define_insn "movmem8b"
6345 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6346 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6347 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6348 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6349 (set (match_operand:SI 0 "register_operand" "=l")
6350 (plus:SI (match_dup 2) (const_int 8)))
6351 (set (match_operand:SI 1 "register_operand" "=l")
6352 (plus:SI (match_dup 3) (const_int 8)))
6353 (clobber (match_scratch:SI 4 "=&l"))
6354 (clobber (match_scratch:SI 5 "=&l"))]
6355 "TARGET_THUMB1"
6356 "* return thumb_output_move_mem_multiple (2, operands);"
6357 [(set_attr "length" "4")
6358 ; This isn't entirely accurate... It loads as well, but in terms of
6359 ; scheduling the following insn it is better to consider it as a store
6360 (set_attr "type" "store2")]
6361 )
6362
6363 \f
6364
6365 ;; Compare & branch insns
6366 ;; The range calculations are based as follows:
6367 ;; For forward branches, the address calculation returns the address of
6368 ;; the next instruction. This is 2 beyond the branch instruction.
6369 ;; For backward branches, the address calculation returns the address of
6370 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6371 ;; instruction for the shortest sequence, and 4 before the branch instruction
6372 ;; if we have to jump around an unconditional branch.
6373 ;; To the basic branch range the PC offset must be added (this is +4).
6374 ;; So for forward branches we have
6375 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6376 ;; And for backward branches we have
6377 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6378 ;;
6379 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6380 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6381
6382 (define_expand "cbranchsi4"
6383 [(set (pc) (if_then_else
6384 (match_operator 0 "arm_comparison_operator"
6385 [(match_operand:SI 1 "s_register_operand" "")
6386 (match_operand:SI 2 "nonmemory_operand" "")])
6387 (label_ref (match_operand 3 "" ""))
6388 (pc)))]
6389 "TARGET_THUMB1"
6390 "
6391 if (thumb1_cmpneg_operand (operands[2], SImode))
6392 {
6393 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6394 operands[3], operands[0]));
6395 DONE;
6396 }
6397 if (!thumb1_cmp_operand (operands[2], SImode))
6398 operands[2] = force_reg (SImode, operands[2]);
6399 ")
6400
6401 (define_insn "*cbranchsi4_insn"
6402 [(set (pc) (if_then_else
6403 (match_operator 0 "arm_comparison_operator"
6404 [(match_operand:SI 1 "s_register_operand" "l,*h")
6405 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6406 (label_ref (match_operand 3 "" ""))
6407 (pc)))]
6408 "TARGET_THUMB1"
6409 "*
6410 output_asm_insn (\"cmp\\t%1, %2\", operands);
6411
6412 switch (get_attr_length (insn))
6413 {
6414 case 4: return \"b%d0\\t%l3\";
6415 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6416 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6417 }
6418 "
6419 [(set (attr "far_jump")
6420 (if_then_else
6421 (eq_attr "length" "8")
6422 (const_string "yes")
6423 (const_string "no")))
6424 (set (attr "length")
6425 (if_then_else
6426 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6427 (le (minus (match_dup 3) (pc)) (const_int 256)))
6428 (const_int 4)
6429 (if_then_else
6430 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6431 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6432 (const_int 6)
6433 (const_int 8))))]
6434 )
6435
6436 (define_insn "cbranchsi4_scratch"
6437 [(set (pc) (if_then_else
6438 (match_operator 4 "arm_comparison_operator"
6439 [(match_operand:SI 1 "s_register_operand" "l,0")
6440 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6441 (label_ref (match_operand 3 "" ""))
6442 (pc)))
6443 (clobber (match_scratch:SI 0 "=l,l"))]
6444 "TARGET_THUMB1"
6445 "*
6446 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6447
6448 switch (get_attr_length (insn))
6449 {
6450 case 4: return \"b%d4\\t%l3\";
6451 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6452 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6453 }
6454 "
6455 [(set (attr "far_jump")
6456 (if_then_else
6457 (eq_attr "length" "8")
6458 (const_string "yes")
6459 (const_string "no")))
6460 (set (attr "length")
6461 (if_then_else
6462 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6463 (le (minus (match_dup 3) (pc)) (const_int 256)))
6464 (const_int 4)
6465 (if_then_else
6466 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6467 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6468 (const_int 6)
6469 (const_int 8))))]
6470 )
6471 (define_insn "*movsi_cbranchsi4"
6472 [(set (pc)
6473 (if_then_else
6474 (match_operator 3 "arm_comparison_operator"
6475 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6476 (const_int 0)])
6477 (label_ref (match_operand 2 "" ""))
6478 (pc)))
6479 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6480 (match_dup 1))]
6481 "TARGET_THUMB1"
6482 "*{
6483 if (which_alternative == 0)
6484 output_asm_insn (\"cmp\t%0, #0\", operands);
6485 else if (which_alternative == 1)
6486 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6487 else
6488 {
6489 output_asm_insn (\"cmp\t%1, #0\", operands);
6490 if (which_alternative == 2)
6491 output_asm_insn (\"mov\t%0, %1\", operands);
6492 else
6493 output_asm_insn (\"str\t%1, %0\", operands);
6494 }
6495 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6496 {
6497 case 4: return \"b%d3\\t%l2\";
6498 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6499 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6500 }
6501 }"
6502 [(set (attr "far_jump")
6503 (if_then_else
6504 (ior (and (gt (symbol_ref ("which_alternative"))
6505 (const_int 1))
6506 (eq_attr "length" "8"))
6507 (eq_attr "length" "10"))
6508 (const_string "yes")
6509 (const_string "no")))
6510 (set (attr "length")
6511 (if_then_else
6512 (le (symbol_ref ("which_alternative"))
6513 (const_int 1))
6514 (if_then_else
6515 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6516 (le (minus (match_dup 2) (pc)) (const_int 256)))
6517 (const_int 4)
6518 (if_then_else
6519 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6520 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6521 (const_int 6)
6522 (const_int 8)))
6523 (if_then_else
6524 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6525 (le (minus (match_dup 2) (pc)) (const_int 256)))
6526 (const_int 6)
6527 (if_then_else
6528 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6529 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6530 (const_int 8)
6531 (const_int 10)))))]
6532 )
6533
6534 (define_insn "*negated_cbranchsi4"
6535 [(set (pc)
6536 (if_then_else
6537 (match_operator 0 "equality_operator"
6538 [(match_operand:SI 1 "s_register_operand" "l")
6539 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6540 (label_ref (match_operand 3 "" ""))
6541 (pc)))]
6542 "TARGET_THUMB1"
6543 "*
6544 output_asm_insn (\"cmn\\t%1, %2\", operands);
6545 switch (get_attr_length (insn))
6546 {
6547 case 4: return \"b%d0\\t%l3\";
6548 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6549 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6550 }
6551 "
6552 [(set (attr "far_jump")
6553 (if_then_else
6554 (eq_attr "length" "8")
6555 (const_string "yes")
6556 (const_string "no")))
6557 (set (attr "length")
6558 (if_then_else
6559 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6560 (le (minus (match_dup 3) (pc)) (const_int 256)))
6561 (const_int 4)
6562 (if_then_else
6563 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6564 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6565 (const_int 6)
6566 (const_int 8))))]
6567 )
6568
6569 (define_insn "*tbit_cbranch"
6570 [(set (pc)
6571 (if_then_else
6572 (match_operator 0 "equality_operator"
6573 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6574 (const_int 1)
6575 (match_operand:SI 2 "const_int_operand" "i"))
6576 (const_int 0)])
6577 (label_ref (match_operand 3 "" ""))
6578 (pc)))
6579 (clobber (match_scratch:SI 4 "=l"))]
6580 "TARGET_THUMB1"
6581 "*
6582 {
6583 rtx op[3];
6584 op[0] = operands[4];
6585 op[1] = operands[1];
6586 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6587
6588 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6589 switch (get_attr_length (insn))
6590 {
6591 case 4: return \"b%d0\\t%l3\";
6592 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6593 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6594 }
6595 }"
6596 [(set (attr "far_jump")
6597 (if_then_else
6598 (eq_attr "length" "8")
6599 (const_string "yes")
6600 (const_string "no")))
6601 (set (attr "length")
6602 (if_then_else
6603 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6604 (le (minus (match_dup 3) (pc)) (const_int 256)))
6605 (const_int 4)
6606 (if_then_else
6607 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6608 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6609 (const_int 6)
6610 (const_int 8))))]
6611 )
6612
6613 (define_insn "*tlobits_cbranch"
6614 [(set (pc)
6615 (if_then_else
6616 (match_operator 0 "equality_operator"
6617 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6618 (match_operand:SI 2 "const_int_operand" "i")
6619 (const_int 0))
6620 (const_int 0)])
6621 (label_ref (match_operand 3 "" ""))
6622 (pc)))
6623 (clobber (match_scratch:SI 4 "=l"))]
6624 "TARGET_THUMB1"
6625 "*
6626 {
6627 rtx op[3];
6628 op[0] = operands[4];
6629 op[1] = operands[1];
6630 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6631
6632 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6633 switch (get_attr_length (insn))
6634 {
6635 case 4: return \"b%d0\\t%l3\";
6636 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6637 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6638 }
6639 }"
6640 [(set (attr "far_jump")
6641 (if_then_else
6642 (eq_attr "length" "8")
6643 (const_string "yes")
6644 (const_string "no")))
6645 (set (attr "length")
6646 (if_then_else
6647 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6648 (le (minus (match_dup 3) (pc)) (const_int 256)))
6649 (const_int 4)
6650 (if_then_else
6651 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6652 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6653 (const_int 6)
6654 (const_int 8))))]
6655 )
6656
6657 (define_insn "*tstsi3_cbranch"
6658 [(set (pc)
6659 (if_then_else
6660 (match_operator 3 "equality_operator"
6661 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6662 (match_operand:SI 1 "s_register_operand" "l"))
6663 (const_int 0)])
6664 (label_ref (match_operand 2 "" ""))
6665 (pc)))]
6666 "TARGET_THUMB1"
6667 "*
6668 {
6669 output_asm_insn (\"tst\\t%0, %1\", operands);
6670 switch (get_attr_length (insn))
6671 {
6672 case 4: return \"b%d3\\t%l2\";
6673 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6674 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6675 }
6676 }"
6677 [(set (attr "far_jump")
6678 (if_then_else
6679 (eq_attr "length" "8")
6680 (const_string "yes")
6681 (const_string "no")))
6682 (set (attr "length")
6683 (if_then_else
6684 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6685 (le (minus (match_dup 2) (pc)) (const_int 256)))
6686 (const_int 4)
6687 (if_then_else
6688 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6689 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6690 (const_int 6)
6691 (const_int 8))))]
6692 )
6693
6694 (define_insn "*andsi3_cbranch"
6695 [(set (pc)
6696 (if_then_else
6697 (match_operator 5 "equality_operator"
6698 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6699 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6700 (const_int 0)])
6701 (label_ref (match_operand 4 "" ""))
6702 (pc)))
6703 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6704 (and:SI (match_dup 2) (match_dup 3)))
6705 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6706 "TARGET_THUMB1"
6707 "*
6708 {
6709 if (which_alternative == 0)
6710 output_asm_insn (\"and\\t%0, %3\", operands);
6711 else if (which_alternative == 1)
6712 {
6713 output_asm_insn (\"and\\t%1, %3\", operands);
6714 output_asm_insn (\"mov\\t%0, %1\", operands);
6715 }
6716 else
6717 {
6718 output_asm_insn (\"and\\t%1, %3\", operands);
6719 output_asm_insn (\"str\\t%1, %0\", operands);
6720 }
6721
6722 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6723 {
6724 case 4: return \"b%d5\\t%l4\";
6725 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6726 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6727 }
6728 }"
6729 [(set (attr "far_jump")
6730 (if_then_else
6731 (ior (and (eq (symbol_ref ("which_alternative"))
6732 (const_int 0))
6733 (eq_attr "length" "8"))
6734 (eq_attr "length" "10"))
6735 (const_string "yes")
6736 (const_string "no")))
6737 (set (attr "length")
6738 (if_then_else
6739 (eq (symbol_ref ("which_alternative"))
6740 (const_int 0))
6741 (if_then_else
6742 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6743 (le (minus (match_dup 4) (pc)) (const_int 256)))
6744 (const_int 4)
6745 (if_then_else
6746 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6747 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6748 (const_int 6)
6749 (const_int 8)))
6750 (if_then_else
6751 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6752 (le (minus (match_dup 4) (pc)) (const_int 256)))
6753 (const_int 6)
6754 (if_then_else
6755 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6756 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6757 (const_int 8)
6758 (const_int 10)))))]
6759 )
6760
6761 (define_insn "*orrsi3_cbranch_scratch"
6762 [(set (pc)
6763 (if_then_else
6764 (match_operator 4 "equality_operator"
6765 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6766 (match_operand:SI 2 "s_register_operand" "l"))
6767 (const_int 0)])
6768 (label_ref (match_operand 3 "" ""))
6769 (pc)))
6770 (clobber (match_scratch:SI 0 "=l"))]
6771 "TARGET_THUMB1"
6772 "*
6773 {
6774 output_asm_insn (\"orr\\t%0, %2\", operands);
6775 switch (get_attr_length (insn))
6776 {
6777 case 4: return \"b%d4\\t%l3\";
6778 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6779 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6780 }
6781 }"
6782 [(set (attr "far_jump")
6783 (if_then_else
6784 (eq_attr "length" "8")
6785 (const_string "yes")
6786 (const_string "no")))
6787 (set (attr "length")
6788 (if_then_else
6789 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6790 (le (minus (match_dup 3) (pc)) (const_int 256)))
6791 (const_int 4)
6792 (if_then_else
6793 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6794 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6795 (const_int 6)
6796 (const_int 8))))]
6797 )
6798
6799 (define_insn "*orrsi3_cbranch"
6800 [(set (pc)
6801 (if_then_else
6802 (match_operator 5 "equality_operator"
6803 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6804 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6805 (const_int 0)])
6806 (label_ref (match_operand 4 "" ""))
6807 (pc)))
6808 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6809 (ior:SI (match_dup 2) (match_dup 3)))
6810 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6811 "TARGET_THUMB1"
6812 "*
6813 {
6814 if (which_alternative == 0)
6815 output_asm_insn (\"orr\\t%0, %3\", operands);
6816 else if (which_alternative == 1)
6817 {
6818 output_asm_insn (\"orr\\t%1, %3\", operands);
6819 output_asm_insn (\"mov\\t%0, %1\", operands);
6820 }
6821 else
6822 {
6823 output_asm_insn (\"orr\\t%1, %3\", operands);
6824 output_asm_insn (\"str\\t%1, %0\", operands);
6825 }
6826
6827 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6828 {
6829 case 4: return \"b%d5\\t%l4\";
6830 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6831 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6832 }
6833 }"
6834 [(set (attr "far_jump")
6835 (if_then_else
6836 (ior (and (eq (symbol_ref ("which_alternative"))
6837 (const_int 0))
6838 (eq_attr "length" "8"))
6839 (eq_attr "length" "10"))
6840 (const_string "yes")
6841 (const_string "no")))
6842 (set (attr "length")
6843 (if_then_else
6844 (eq (symbol_ref ("which_alternative"))
6845 (const_int 0))
6846 (if_then_else
6847 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6848 (le (minus (match_dup 4) (pc)) (const_int 256)))
6849 (const_int 4)
6850 (if_then_else
6851 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6852 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6853 (const_int 6)
6854 (const_int 8)))
6855 (if_then_else
6856 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6857 (le (minus (match_dup 4) (pc)) (const_int 256)))
6858 (const_int 6)
6859 (if_then_else
6860 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6861 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6862 (const_int 8)
6863 (const_int 10)))))]
6864 )
6865
6866 (define_insn "*xorsi3_cbranch_scratch"
6867 [(set (pc)
6868 (if_then_else
6869 (match_operator 4 "equality_operator"
6870 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6871 (match_operand:SI 2 "s_register_operand" "l"))
6872 (const_int 0)])
6873 (label_ref (match_operand 3 "" ""))
6874 (pc)))
6875 (clobber (match_scratch:SI 0 "=l"))]
6876 "TARGET_THUMB1"
6877 "*
6878 {
6879 output_asm_insn (\"eor\\t%0, %2\", operands);
6880 switch (get_attr_length (insn))
6881 {
6882 case 4: return \"b%d4\\t%l3\";
6883 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6884 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6885 }
6886 }"
6887 [(set (attr "far_jump")
6888 (if_then_else
6889 (eq_attr "length" "8")
6890 (const_string "yes")
6891 (const_string "no")))
6892 (set (attr "length")
6893 (if_then_else
6894 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6895 (le (minus (match_dup 3) (pc)) (const_int 256)))
6896 (const_int 4)
6897 (if_then_else
6898 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6899 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6900 (const_int 6)
6901 (const_int 8))))]
6902 )
6903
6904 (define_insn "*xorsi3_cbranch"
6905 [(set (pc)
6906 (if_then_else
6907 (match_operator 5 "equality_operator"
6908 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6909 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6910 (const_int 0)])
6911 (label_ref (match_operand 4 "" ""))
6912 (pc)))
6913 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6914 (xor:SI (match_dup 2) (match_dup 3)))
6915 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6916 "TARGET_THUMB1"
6917 "*
6918 {
6919 if (which_alternative == 0)
6920 output_asm_insn (\"eor\\t%0, %3\", operands);
6921 else if (which_alternative == 1)
6922 {
6923 output_asm_insn (\"eor\\t%1, %3\", operands);
6924 output_asm_insn (\"mov\\t%0, %1\", operands);
6925 }
6926 else
6927 {
6928 output_asm_insn (\"eor\\t%1, %3\", operands);
6929 output_asm_insn (\"str\\t%1, %0\", operands);
6930 }
6931
6932 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6933 {
6934 case 4: return \"b%d5\\t%l4\";
6935 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6936 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6937 }
6938 }"
6939 [(set (attr "far_jump")
6940 (if_then_else
6941 (ior (and (eq (symbol_ref ("which_alternative"))
6942 (const_int 0))
6943 (eq_attr "length" "8"))
6944 (eq_attr "length" "10"))
6945 (const_string "yes")
6946 (const_string "no")))
6947 (set (attr "length")
6948 (if_then_else
6949 (eq (symbol_ref ("which_alternative"))
6950 (const_int 0))
6951 (if_then_else
6952 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6953 (le (minus (match_dup 4) (pc)) (const_int 256)))
6954 (const_int 4)
6955 (if_then_else
6956 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6957 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6958 (const_int 6)
6959 (const_int 8)))
6960 (if_then_else
6961 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6962 (le (minus (match_dup 4) (pc)) (const_int 256)))
6963 (const_int 6)
6964 (if_then_else
6965 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6966 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6967 (const_int 8)
6968 (const_int 10)))))]
6969 )
6970
6971 (define_insn "*bicsi3_cbranch_scratch"
6972 [(set (pc)
6973 (if_then_else
6974 (match_operator 4 "equality_operator"
6975 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6976 (match_operand:SI 1 "s_register_operand" "0"))
6977 (const_int 0)])
6978 (label_ref (match_operand 3 "" ""))
6979 (pc)))
6980 (clobber (match_scratch:SI 0 "=l"))]
6981 "TARGET_THUMB1"
6982 "*
6983 {
6984 output_asm_insn (\"bic\\t%0, %2\", operands);
6985 switch (get_attr_length (insn))
6986 {
6987 case 4: return \"b%d4\\t%l3\";
6988 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6989 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6990 }
6991 }"
6992 [(set (attr "far_jump")
6993 (if_then_else
6994 (eq_attr "length" "8")
6995 (const_string "yes")
6996 (const_string "no")))
6997 (set (attr "length")
6998 (if_then_else
6999 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7000 (le (minus (match_dup 3) (pc)) (const_int 256)))
7001 (const_int 4)
7002 (if_then_else
7003 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7004 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7005 (const_int 6)
7006 (const_int 8))))]
7007 )
7008
7009 (define_insn "*bicsi3_cbranch"
7010 [(set (pc)
7011 (if_then_else
7012 (match_operator 5 "equality_operator"
7013 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7014 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7015 (const_int 0)])
7016 (label_ref (match_operand 4 "" ""))
7017 (pc)))
7018 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7019 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7020 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7021 "TARGET_THUMB1"
7022 "*
7023 {
7024 if (which_alternative == 0)
7025 output_asm_insn (\"bic\\t%0, %3\", operands);
7026 else if (which_alternative <= 2)
7027 {
7028 output_asm_insn (\"bic\\t%1, %3\", operands);
7029 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7030 conditions again, since we're only testing for equality. */
7031 output_asm_insn (\"mov\\t%0, %1\", operands);
7032 }
7033 else
7034 {
7035 output_asm_insn (\"bic\\t%1, %3\", operands);
7036 output_asm_insn (\"str\\t%1, %0\", operands);
7037 }
7038
7039 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7040 {
7041 case 4: return \"b%d5\\t%l4\";
7042 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7043 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7044 }
7045 }"
7046 [(set (attr "far_jump")
7047 (if_then_else
7048 (ior (and (eq (symbol_ref ("which_alternative"))
7049 (const_int 0))
7050 (eq_attr "length" "8"))
7051 (eq_attr "length" "10"))
7052 (const_string "yes")
7053 (const_string "no")))
7054 (set (attr "length")
7055 (if_then_else
7056 (eq (symbol_ref ("which_alternative"))
7057 (const_int 0))
7058 (if_then_else
7059 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7060 (le (minus (match_dup 4) (pc)) (const_int 256)))
7061 (const_int 4)
7062 (if_then_else
7063 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7064 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7065 (const_int 6)
7066 (const_int 8)))
7067 (if_then_else
7068 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7069 (le (minus (match_dup 4) (pc)) (const_int 256)))
7070 (const_int 6)
7071 (if_then_else
7072 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7073 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7074 (const_int 8)
7075 (const_int 10)))))]
7076 )
7077
7078 (define_insn "*cbranchne_decr1"
7079 [(set (pc)
7080 (if_then_else (match_operator 3 "equality_operator"
7081 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7082 (const_int 0)])
7083 (label_ref (match_operand 4 "" ""))
7084 (pc)))
7085 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7086 (plus:SI (match_dup 2) (const_int -1)))
7087 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7088 "TARGET_THUMB1"
7089 "*
7090 {
7091 rtx cond[2];
7092 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7093 ? GEU : LTU),
7094 VOIDmode, operands[2], const1_rtx);
7095 cond[1] = operands[4];
7096
7097 if (which_alternative == 0)
7098 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7099 else if (which_alternative == 1)
7100 {
7101 /* We must provide an alternative for a hi reg because reload
7102 cannot handle output reloads on a jump instruction, but we
7103 can't subtract into that. Fortunately a mov from lo to hi
7104 does not clobber the condition codes. */
7105 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7106 output_asm_insn (\"mov\\t%0, %1\", operands);
7107 }
7108 else
7109 {
7110 /* Similarly, but the target is memory. */
7111 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7112 output_asm_insn (\"str\\t%1, %0\", operands);
7113 }
7114
7115 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7116 {
7117 case 4:
7118 output_asm_insn (\"b%d0\\t%l1\", cond);
7119 return \"\";
7120 case 6:
7121 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7122 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7123 default:
7124 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7125 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7126 }
7127 }
7128 "
7129 [(set (attr "far_jump")
7130 (if_then_else
7131 (ior (and (eq (symbol_ref ("which_alternative"))
7132 (const_int 0))
7133 (eq_attr "length" "8"))
7134 (eq_attr "length" "10"))
7135 (const_string "yes")
7136 (const_string "no")))
7137 (set_attr_alternative "length"
7138 [
7139 ;; Alternative 0
7140 (if_then_else
7141 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7142 (le (minus (match_dup 4) (pc)) (const_int 256)))
7143 (const_int 4)
7144 (if_then_else
7145 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7146 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7147 (const_int 6)
7148 (const_int 8)))
7149 ;; Alternative 1
7150 (if_then_else
7151 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7152 (le (minus (match_dup 4) (pc)) (const_int 256)))
7153 (const_int 6)
7154 (if_then_else
7155 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7156 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7157 (const_int 8)
7158 (const_int 10)))
7159 ;; Alternative 2
7160 (if_then_else
7161 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7162 (le (minus (match_dup 4) (pc)) (const_int 256)))
7163 (const_int 6)
7164 (if_then_else
7165 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7166 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7167 (const_int 8)
7168 (const_int 10)))
7169 ;; Alternative 3
7170 (if_then_else
7171 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7172 (le (minus (match_dup 4) (pc)) (const_int 256)))
7173 (const_int 6)
7174 (if_then_else
7175 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7176 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7177 (const_int 8)
7178 (const_int 10)))])]
7179 )
7180
7181 (define_insn "*addsi3_cbranch"
7182 [(set (pc)
7183 (if_then_else
7184 (match_operator 4 "comparison_operator"
7185 [(plus:SI
7186 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7187 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7188 (const_int 0)])
7189 (label_ref (match_operand 5 "" ""))
7190 (pc)))
7191 (set
7192 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7193 (plus:SI (match_dup 2) (match_dup 3)))
7194 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7195 "TARGET_THUMB1
7196 && (GET_CODE (operands[4]) == EQ
7197 || GET_CODE (operands[4]) == NE
7198 || GET_CODE (operands[4]) == GE
7199 || GET_CODE (operands[4]) == LT)"
7200 "*
7201 {
7202 rtx cond[3];
7203
7204
7205 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7206 cond[1] = operands[2];
7207 cond[2] = operands[3];
7208
7209 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7210 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7211 else
7212 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7213
7214 if (which_alternative >= 3
7215 && which_alternative < 4)
7216 output_asm_insn (\"mov\\t%0, %1\", operands);
7217 else if (which_alternative >= 4)
7218 output_asm_insn (\"str\\t%1, %0\", operands);
7219
7220 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7221 {
7222 case 4:
7223 return \"b%d4\\t%l5\";
7224 case 6:
7225 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7226 default:
7227 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7228 }
7229 }
7230 "
7231 [(set (attr "far_jump")
7232 (if_then_else
7233 (ior (and (lt (symbol_ref ("which_alternative"))
7234 (const_int 3))
7235 (eq_attr "length" "8"))
7236 (eq_attr "length" "10"))
7237 (const_string "yes")
7238 (const_string "no")))
7239 (set (attr "length")
7240 (if_then_else
7241 (lt (symbol_ref ("which_alternative"))
7242 (const_int 3))
7243 (if_then_else
7244 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7245 (le (minus (match_dup 5) (pc)) (const_int 256)))
7246 (const_int 4)
7247 (if_then_else
7248 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7249 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7250 (const_int 6)
7251 (const_int 8)))
7252 (if_then_else
7253 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7254 (le (minus (match_dup 5) (pc)) (const_int 256)))
7255 (const_int 6)
7256 (if_then_else
7257 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7258 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7259 (const_int 8)
7260 (const_int 10)))))]
7261 )
7262
7263 (define_insn "*addsi3_cbranch_scratch"
7264 [(set (pc)
7265 (if_then_else
7266 (match_operator 3 "comparison_operator"
7267 [(plus:SI
7268 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7269 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7270 (const_int 0)])
7271 (label_ref (match_operand 4 "" ""))
7272 (pc)))
7273 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7274 "TARGET_THUMB1
7275 && (GET_CODE (operands[3]) == EQ
7276 || GET_CODE (operands[3]) == NE
7277 || GET_CODE (operands[3]) == GE
7278 || GET_CODE (operands[3]) == LT)"
7279 "*
7280 {
7281 switch (which_alternative)
7282 {
7283 case 0:
7284 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7285 break;
7286 case 1:
7287 output_asm_insn (\"cmn\t%1, %2\", operands);
7288 break;
7289 case 2:
7290 if (INTVAL (operands[2]) < 0)
7291 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7292 else
7293 output_asm_insn (\"add\t%0, %1, %2\", operands);
7294 break;
7295 case 3:
7296 if (INTVAL (operands[2]) < 0)
7297 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7298 else
7299 output_asm_insn (\"add\t%0, %0, %2\", operands);
7300 break;
7301 }
7302
7303 switch (get_attr_length (insn))
7304 {
7305 case 4:
7306 return \"b%d3\\t%l4\";
7307 case 6:
7308 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7309 default:
7310 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7311 }
7312 }
7313 "
7314 [(set (attr "far_jump")
7315 (if_then_else
7316 (eq_attr "length" "8")
7317 (const_string "yes")
7318 (const_string "no")))
7319 (set (attr "length")
7320 (if_then_else
7321 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7322 (le (minus (match_dup 4) (pc)) (const_int 256)))
7323 (const_int 4)
7324 (if_then_else
7325 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7326 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7327 (const_int 6)
7328 (const_int 8))))]
7329 )
7330
7331 (define_insn "*subsi3_cbranch"
7332 [(set (pc)
7333 (if_then_else
7334 (match_operator 4 "comparison_operator"
7335 [(minus:SI
7336 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7337 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7338 (const_int 0)])
7339 (label_ref (match_operand 5 "" ""))
7340 (pc)))
7341 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7342 (minus:SI (match_dup 2) (match_dup 3)))
7343 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7344 "TARGET_THUMB1
7345 && (GET_CODE (operands[4]) == EQ
7346 || GET_CODE (operands[4]) == NE
7347 || GET_CODE (operands[4]) == GE
7348 || GET_CODE (operands[4]) == LT)"
7349 "*
7350 {
7351 if (which_alternative == 0)
7352 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7353 else if (which_alternative == 1)
7354 {
7355 /* We must provide an alternative for a hi reg because reload
7356 cannot handle output reloads on a jump instruction, but we
7357 can't subtract into that. Fortunately a mov from lo to hi
7358 does not clobber the condition codes. */
7359 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7360 output_asm_insn (\"mov\\t%0, %1\", operands);
7361 }
7362 else
7363 {
7364 /* Similarly, but the target is memory. */
7365 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7366 output_asm_insn (\"str\\t%1, %0\", operands);
7367 }
7368
7369 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7370 {
7371 case 4:
7372 return \"b%d4\\t%l5\";
7373 case 6:
7374 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7375 default:
7376 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7377 }
7378 }
7379 "
7380 [(set (attr "far_jump")
7381 (if_then_else
7382 (ior (and (eq (symbol_ref ("which_alternative"))
7383 (const_int 0))
7384 (eq_attr "length" "8"))
7385 (eq_attr "length" "10"))
7386 (const_string "yes")
7387 (const_string "no")))
7388 (set (attr "length")
7389 (if_then_else
7390 (eq (symbol_ref ("which_alternative"))
7391 (const_int 0))
7392 (if_then_else
7393 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7394 (le (minus (match_dup 5) (pc)) (const_int 256)))
7395 (const_int 4)
7396 (if_then_else
7397 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7398 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7399 (const_int 6)
7400 (const_int 8)))
7401 (if_then_else
7402 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7403 (le (minus (match_dup 5) (pc)) (const_int 256)))
7404 (const_int 6)
7405 (if_then_else
7406 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7407 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7408 (const_int 8)
7409 (const_int 10)))))]
7410 )
7411
7412 (define_insn "*subsi3_cbranch_scratch"
7413 [(set (pc)
7414 (if_then_else
7415 (match_operator 0 "arm_comparison_operator"
7416 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7417 (match_operand:SI 2 "nonmemory_operand" "l"))
7418 (const_int 0)])
7419 (label_ref (match_operand 3 "" ""))
7420 (pc)))]
7421 "TARGET_THUMB1
7422 && (GET_CODE (operands[0]) == EQ
7423 || GET_CODE (operands[0]) == NE
7424 || GET_CODE (operands[0]) == GE
7425 || GET_CODE (operands[0]) == LT)"
7426 "*
7427 output_asm_insn (\"cmp\\t%1, %2\", operands);
7428 switch (get_attr_length (insn))
7429 {
7430 case 4: return \"b%d0\\t%l3\";
7431 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7432 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7433 }
7434 "
7435 [(set (attr "far_jump")
7436 (if_then_else
7437 (eq_attr "length" "8")
7438 (const_string "yes")
7439 (const_string "no")))
7440 (set (attr "length")
7441 (if_then_else
7442 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7443 (le (minus (match_dup 3) (pc)) (const_int 256)))
7444 (const_int 4)
7445 (if_then_else
7446 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7447 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7448 (const_int 6)
7449 (const_int 8))))]
7450 )
7451
7452 ;; Comparison and test insns
7453
7454 (define_expand "cmpsi"
7455 [(match_operand:SI 0 "s_register_operand" "")
7456 (match_operand:SI 1 "arm_add_operand" "")]
7457 "TARGET_32BIT"
7458 "{
7459 arm_compare_op0 = operands[0];
7460 arm_compare_op1 = operands[1];
7461 DONE;
7462 }"
7463 )
7464
7465 (define_expand "cmpsf"
7466 [(match_operand:SF 0 "s_register_operand" "")
7467 (match_operand:SF 1 "arm_float_compare_operand" "")]
7468 "TARGET_32BIT && TARGET_HARD_FLOAT"
7469 "
7470 arm_compare_op0 = operands[0];
7471 arm_compare_op1 = operands[1];
7472 DONE;
7473 "
7474 )
7475
7476 (define_expand "cmpdf"
7477 [(match_operand:DF 0 "s_register_operand" "")
7478 (match_operand:DF 1 "arm_float_compare_operand" "")]
7479 "TARGET_32BIT && TARGET_HARD_FLOAT"
7480 "
7481 arm_compare_op0 = operands[0];
7482 arm_compare_op1 = operands[1];
7483 DONE;
7484 "
7485 )
7486
7487 (define_insn "*arm_cmpsi_insn"
7488 [(set (reg:CC CC_REGNUM)
7489 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7490 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7491 "TARGET_32BIT"
7492 "@
7493 cmp%?\\t%0, %1
7494 cmn%?\\t%0, #%n1"
7495 [(set_attr "conds" "set")]
7496 )
7497
7498 (define_insn "*arm_cmpsi_shiftsi"
7499 [(set (reg:CC CC_REGNUM)
7500 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7501 (match_operator:SI 3 "shift_operator"
7502 [(match_operand:SI 1 "s_register_operand" "r")
7503 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7504 "TARGET_ARM"
7505 "cmp%?\\t%0, %1%S3"
7506 [(set_attr "conds" "set")
7507 (set_attr "shift" "1")
7508 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7509 (const_string "alu_shift")
7510 (const_string "alu_shift_reg")))]
7511 )
7512
7513 (define_insn "*arm_cmpsi_shiftsi_swp"
7514 [(set (reg:CC_SWP CC_REGNUM)
7515 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7516 [(match_operand:SI 1 "s_register_operand" "r")
7517 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7518 (match_operand:SI 0 "s_register_operand" "r")))]
7519 "TARGET_ARM"
7520 "cmp%?\\t%0, %1%S3"
7521 [(set_attr "conds" "set")
7522 (set_attr "shift" "1")
7523 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7524 (const_string "alu_shift")
7525 (const_string "alu_shift_reg")))]
7526 )
7527
7528 (define_insn "*arm_cmpsi_negshiftsi_si"
7529 [(set (reg:CC_Z CC_REGNUM)
7530 (compare:CC_Z
7531 (neg:SI (match_operator:SI 1 "shift_operator"
7532 [(match_operand:SI 2 "s_register_operand" "r")
7533 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7534 (match_operand:SI 0 "s_register_operand" "r")))]
7535 "TARGET_ARM"
7536 "cmn%?\\t%0, %2%S1"
7537 [(set_attr "conds" "set")
7538 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7539 (const_string "alu_shift")
7540 (const_string "alu_shift_reg")))]
7541 )
7542
7543 ;; Cirrus SF compare instruction
7544 (define_insn "*cirrus_cmpsf"
7545 [(set (reg:CCFP CC_REGNUM)
7546 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7547 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7548 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7549 "cfcmps%?\\tr15, %V0, %V1"
7550 [(set_attr "type" "mav_farith")
7551 (set_attr "cirrus" "compare")]
7552 )
7553
7554 ;; Cirrus DF compare instruction
7555 (define_insn "*cirrus_cmpdf"
7556 [(set (reg:CCFP CC_REGNUM)
7557 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7558 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7559 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7560 "cfcmpd%?\\tr15, %V0, %V1"
7561 [(set_attr "type" "mav_farith")
7562 (set_attr "cirrus" "compare")]
7563 )
7564
7565 ;; Cirrus DI compare instruction
7566 (define_expand "cmpdi"
7567 [(match_operand:DI 0 "cirrus_fp_register" "")
7568 (match_operand:DI 1 "cirrus_fp_register" "")]
7569 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7570 "{
7571 arm_compare_op0 = operands[0];
7572 arm_compare_op1 = operands[1];
7573 DONE;
7574 }")
7575
7576 (define_insn "*cirrus_cmpdi"
7577 [(set (reg:CC CC_REGNUM)
7578 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7579 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7580 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7581 "cfcmp64%?\\tr15, %V0, %V1"
7582 [(set_attr "type" "mav_farith")
7583 (set_attr "cirrus" "compare")]
7584 )
7585
7586 ; This insn allows redundant compares to be removed by cse, nothing should
7587 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7588 ; is deleted later on. The match_dup will match the mode here, so that
7589 ; mode changes of the condition codes aren't lost by this even though we don't
7590 ; specify what they are.
7591
7592 (define_insn "*deleted_compare"
7593 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7594 "TARGET_32BIT"
7595 "\\t%@ deleted compare"
7596 [(set_attr "conds" "set")
7597 (set_attr "length" "0")]
7598 )
7599
7600 \f
7601 ;; Conditional branch insns
7602
7603 (define_expand "beq"
7604 [(set (pc)
7605 (if_then_else (eq (match_dup 1) (const_int 0))
7606 (label_ref (match_operand 0 "" ""))
7607 (pc)))]
7608 "TARGET_32BIT"
7609 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7610 )
7611
7612 (define_expand "bne"
7613 [(set (pc)
7614 (if_then_else (ne (match_dup 1) (const_int 0))
7615 (label_ref (match_operand 0 "" ""))
7616 (pc)))]
7617 "TARGET_32BIT"
7618 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7619 )
7620
7621 (define_expand "bgt"
7622 [(set (pc)
7623 (if_then_else (gt (match_dup 1) (const_int 0))
7624 (label_ref (match_operand 0 "" ""))
7625 (pc)))]
7626 "TARGET_32BIT"
7627 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7628 )
7629
7630 (define_expand "ble"
7631 [(set (pc)
7632 (if_then_else (le (match_dup 1) (const_int 0))
7633 (label_ref (match_operand 0 "" ""))
7634 (pc)))]
7635 "TARGET_32BIT"
7636 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7637 )
7638
7639 (define_expand "bge"
7640 [(set (pc)
7641 (if_then_else (ge (match_dup 1) (const_int 0))
7642 (label_ref (match_operand 0 "" ""))
7643 (pc)))]
7644 "TARGET_32BIT"
7645 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7646 )
7647
7648 (define_expand "blt"
7649 [(set (pc)
7650 (if_then_else (lt (match_dup 1) (const_int 0))
7651 (label_ref (match_operand 0 "" ""))
7652 (pc)))]
7653 "TARGET_32BIT"
7654 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7655 )
7656
7657 (define_expand "bgtu"
7658 [(set (pc)
7659 (if_then_else (gtu (match_dup 1) (const_int 0))
7660 (label_ref (match_operand 0 "" ""))
7661 (pc)))]
7662 "TARGET_32BIT"
7663 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7664 )
7665
7666 (define_expand "bleu"
7667 [(set (pc)
7668 (if_then_else (leu (match_dup 1) (const_int 0))
7669 (label_ref (match_operand 0 "" ""))
7670 (pc)))]
7671 "TARGET_32BIT"
7672 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7673 )
7674
7675 (define_expand "bgeu"
7676 [(set (pc)
7677 (if_then_else (geu (match_dup 1) (const_int 0))
7678 (label_ref (match_operand 0 "" ""))
7679 (pc)))]
7680 "TARGET_32BIT"
7681 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7682 )
7683
7684 (define_expand "bltu"
7685 [(set (pc)
7686 (if_then_else (ltu (match_dup 1) (const_int 0))
7687 (label_ref (match_operand 0 "" ""))
7688 (pc)))]
7689 "TARGET_32BIT"
7690 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7691 )
7692
7693 (define_expand "bunordered"
7694 [(set (pc)
7695 (if_then_else (unordered (match_dup 1) (const_int 0))
7696 (label_ref (match_operand 0 "" ""))
7697 (pc)))]
7698 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7699 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7700 arm_compare_op1);"
7701 )
7702
7703 (define_expand "bordered"
7704 [(set (pc)
7705 (if_then_else (ordered (match_dup 1) (const_int 0))
7706 (label_ref (match_operand 0 "" ""))
7707 (pc)))]
7708 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7709 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7710 arm_compare_op1);"
7711 )
7712
7713 (define_expand "bungt"
7714 [(set (pc)
7715 (if_then_else (ungt (match_dup 1) (const_int 0))
7716 (label_ref (match_operand 0 "" ""))
7717 (pc)))]
7718 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7719 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7720 )
7721
7722 (define_expand "bunlt"
7723 [(set (pc)
7724 (if_then_else (unlt (match_dup 1) (const_int 0))
7725 (label_ref (match_operand 0 "" ""))
7726 (pc)))]
7727 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7728 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7729 )
7730
7731 (define_expand "bunge"
7732 [(set (pc)
7733 (if_then_else (unge (match_dup 1) (const_int 0))
7734 (label_ref (match_operand 0 "" ""))
7735 (pc)))]
7736 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7737 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7738 )
7739
7740 (define_expand "bunle"
7741 [(set (pc)
7742 (if_then_else (unle (match_dup 1) (const_int 0))
7743 (label_ref (match_operand 0 "" ""))
7744 (pc)))]
7745 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7746 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7747 )
7748
7749 ;; The following two patterns need two branch instructions, since there is
7750 ;; no single instruction that will handle all cases.
7751 (define_expand "buneq"
7752 [(set (pc)
7753 (if_then_else (uneq (match_dup 1) (const_int 0))
7754 (label_ref (match_operand 0 "" ""))
7755 (pc)))]
7756 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7757 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7758 )
7759
7760 (define_expand "bltgt"
7761 [(set (pc)
7762 (if_then_else (ltgt (match_dup 1) (const_int 0))
7763 (label_ref (match_operand 0 "" ""))
7764 (pc)))]
7765 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7766 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7767 )
7768
7769 ;;
7770 ;; Patterns to match conditional branch insns.
7771 ;;
7772
7773 ; Special pattern to match UNEQ.
7774 (define_insn "*arm_buneq"
7775 [(set (pc)
7776 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7777 (label_ref (match_operand 0 "" ""))
7778 (pc)))]
7779 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7780 "*
7781 gcc_assert (!arm_ccfsm_state);
7782
7783 return \"bvs\\t%l0\;beq\\t%l0\";
7784 "
7785 [(set_attr "conds" "jump_clob")
7786 (set_attr "length" "8")]
7787 )
7788
7789 ; Special pattern to match LTGT.
7790 (define_insn "*arm_bltgt"
7791 [(set (pc)
7792 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7793 (label_ref (match_operand 0 "" ""))
7794 (pc)))]
7795 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7796 "*
7797 gcc_assert (!arm_ccfsm_state);
7798
7799 return \"bmi\\t%l0\;bgt\\t%l0\";
7800 "
7801 [(set_attr "conds" "jump_clob")
7802 (set_attr "length" "8")]
7803 )
7804
7805 (define_insn "*arm_cond_branch"
7806 [(set (pc)
7807 (if_then_else (match_operator 1 "arm_comparison_operator"
7808 [(match_operand 2 "cc_register" "") (const_int 0)])
7809 (label_ref (match_operand 0 "" ""))
7810 (pc)))]
7811 "TARGET_32BIT"
7812 "*
7813 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7814 {
7815 arm_ccfsm_state += 2;
7816 return \"\";
7817 }
7818 return \"b%d1\\t%l0\";
7819 "
7820 [(set_attr "conds" "use")
7821 (set_attr "type" "branch")]
7822 )
7823
7824 ; Special pattern to match reversed UNEQ.
7825 (define_insn "*arm_buneq_reversed"
7826 [(set (pc)
7827 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7828 (pc)
7829 (label_ref (match_operand 0 "" ""))))]
7830 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7831 "*
7832 gcc_assert (!arm_ccfsm_state);
7833
7834 return \"bmi\\t%l0\;bgt\\t%l0\";
7835 "
7836 [(set_attr "conds" "jump_clob")
7837 (set_attr "length" "8")]
7838 )
7839
7840 ; Special pattern to match reversed LTGT.
7841 (define_insn "*arm_bltgt_reversed"
7842 [(set (pc)
7843 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7844 (pc)
7845 (label_ref (match_operand 0 "" ""))))]
7846 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7847 "*
7848 gcc_assert (!arm_ccfsm_state);
7849
7850 return \"bvs\\t%l0\;beq\\t%l0\";
7851 "
7852 [(set_attr "conds" "jump_clob")
7853 (set_attr "length" "8")]
7854 )
7855
7856 (define_insn "*arm_cond_branch_reversed"
7857 [(set (pc)
7858 (if_then_else (match_operator 1 "arm_comparison_operator"
7859 [(match_operand 2 "cc_register" "") (const_int 0)])
7860 (pc)
7861 (label_ref (match_operand 0 "" ""))))]
7862 "TARGET_32BIT"
7863 "*
7864 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7865 {
7866 arm_ccfsm_state += 2;
7867 return \"\";
7868 }
7869 return \"b%D1\\t%l0\";
7870 "
7871 [(set_attr "conds" "use")
7872 (set_attr "type" "branch")]
7873 )
7874
7875 \f
7876
7877 ; scc insns
7878
7879 (define_expand "seq"
7880 [(set (match_operand:SI 0 "s_register_operand" "")
7881 (eq:SI (match_dup 1) (const_int 0)))]
7882 "TARGET_32BIT"
7883 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7884 )
7885
7886 (define_expand "sne"
7887 [(set (match_operand:SI 0 "s_register_operand" "")
7888 (ne:SI (match_dup 1) (const_int 0)))]
7889 "TARGET_32BIT"
7890 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7891 )
7892
7893 (define_expand "sgt"
7894 [(set (match_operand:SI 0 "s_register_operand" "")
7895 (gt:SI (match_dup 1) (const_int 0)))]
7896 "TARGET_32BIT"
7897 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7898 )
7899
7900 (define_expand "sle"
7901 [(set (match_operand:SI 0 "s_register_operand" "")
7902 (le:SI (match_dup 1) (const_int 0)))]
7903 "TARGET_32BIT"
7904 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7905 )
7906
7907 (define_expand "sge"
7908 [(set (match_operand:SI 0 "s_register_operand" "")
7909 (ge:SI (match_dup 1) (const_int 0)))]
7910 "TARGET_32BIT"
7911 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7912 )
7913
7914 (define_expand "slt"
7915 [(set (match_operand:SI 0 "s_register_operand" "")
7916 (lt:SI (match_dup 1) (const_int 0)))]
7917 "TARGET_32BIT"
7918 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7919 )
7920
7921 (define_expand "sgtu"
7922 [(set (match_operand:SI 0 "s_register_operand" "")
7923 (gtu:SI (match_dup 1) (const_int 0)))]
7924 "TARGET_32BIT"
7925 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7926 )
7927
7928 (define_expand "sleu"
7929 [(set (match_operand:SI 0 "s_register_operand" "")
7930 (leu:SI (match_dup 1) (const_int 0)))]
7931 "TARGET_32BIT"
7932 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7933 )
7934
7935 (define_expand "sgeu"
7936 [(set (match_operand:SI 0 "s_register_operand" "")
7937 (geu:SI (match_dup 1) (const_int 0)))]
7938 "TARGET_32BIT"
7939 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7940 )
7941
7942 (define_expand "sltu"
7943 [(set (match_operand:SI 0 "s_register_operand" "")
7944 (ltu:SI (match_dup 1) (const_int 0)))]
7945 "TARGET_32BIT"
7946 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7947 )
7948
7949 (define_expand "sunordered"
7950 [(set (match_operand:SI 0 "s_register_operand" "")
7951 (unordered:SI (match_dup 1) (const_int 0)))]
7952 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7953 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7954 arm_compare_op1);"
7955 )
7956
7957 (define_expand "sordered"
7958 [(set (match_operand:SI 0 "s_register_operand" "")
7959 (ordered:SI (match_dup 1) (const_int 0)))]
7960 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7961 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7962 arm_compare_op1);"
7963 )
7964
7965 (define_expand "sungt"
7966 [(set (match_operand:SI 0 "s_register_operand" "")
7967 (ungt:SI (match_dup 1) (const_int 0)))]
7968 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7969 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7970 arm_compare_op1);"
7971 )
7972
7973 (define_expand "sunge"
7974 [(set (match_operand:SI 0 "s_register_operand" "")
7975 (unge:SI (match_dup 1) (const_int 0)))]
7976 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7977 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7978 arm_compare_op1);"
7979 )
7980
7981 (define_expand "sunlt"
7982 [(set (match_operand:SI 0 "s_register_operand" "")
7983 (unlt:SI (match_dup 1) (const_int 0)))]
7984 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7985 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7986 arm_compare_op1);"
7987 )
7988
7989 (define_expand "sunle"
7990 [(set (match_operand:SI 0 "s_register_operand" "")
7991 (unle:SI (match_dup 1) (const_int 0)))]
7992 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7993 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7994 arm_compare_op1);"
7995 )
7996
7997 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7998 ;;; simple ARM instructions.
7999 ;
8000 ; (define_expand "suneq"
8001 ; [(set (match_operand:SI 0 "s_register_operand" "")
8002 ; (uneq:SI (match_dup 1) (const_int 0)))]
8003 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8004 ; "gcc_unreachable ();"
8005 ; )
8006 ;
8007 ; (define_expand "sltgt"
8008 ; [(set (match_operand:SI 0 "s_register_operand" "")
8009 ; (ltgt:SI (match_dup 1) (const_int 0)))]
8010 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8011 ; "gcc_unreachable ();"
8012 ; )
8013
8014 (define_insn "*mov_scc"
8015 [(set (match_operand:SI 0 "s_register_operand" "=r")
8016 (match_operator:SI 1 "arm_comparison_operator"
8017 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8018 "TARGET_ARM"
8019 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8020 [(set_attr "conds" "use")
8021 (set_attr "length" "8")]
8022 )
8023
8024 (define_insn "*mov_negscc"
8025 [(set (match_operand:SI 0 "s_register_operand" "=r")
8026 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8027 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8028 "TARGET_ARM"
8029 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8030 [(set_attr "conds" "use")
8031 (set_attr "length" "8")]
8032 )
8033
8034 (define_insn "*mov_notscc"
8035 [(set (match_operand:SI 0 "s_register_operand" "=r")
8036 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8037 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8038 "TARGET_ARM"
8039 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8040 [(set_attr "conds" "use")
8041 (set_attr "length" "8")]
8042 )
8043
8044 (define_expand "cstoresi4"
8045 [(set (match_operand:SI 0 "s_register_operand" "")
8046 (match_operator:SI 1 "arm_comparison_operator"
8047 [(match_operand:SI 2 "s_register_operand" "")
8048 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8049 "TARGET_THUMB1"
8050 "{
8051 rtx op3, scratch, scratch2;
8052
8053 if (operands[3] == const0_rtx)
8054 {
8055 switch (GET_CODE (operands[1]))
8056 {
8057 case EQ:
8058 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8059 break;
8060
8061 case NE:
8062 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8063 break;
8064
8065 case LE:
8066 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8067 NULL_RTX, 0, OPTAB_WIDEN);
8068 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8069 NULL_RTX, 0, OPTAB_WIDEN);
8070 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8071 operands[0], 1, OPTAB_WIDEN);
8072 break;
8073
8074 case GE:
8075 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8076 NULL_RTX, 1);
8077 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8078 NULL_RTX, 1, OPTAB_WIDEN);
8079 break;
8080
8081 case GT:
8082 scratch = expand_binop (SImode, ashr_optab, operands[2],
8083 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8084 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8085 NULL_RTX, 0, OPTAB_WIDEN);
8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8087 0, OPTAB_WIDEN);
8088 break;
8089
8090 /* LT is handled by generic code. No need for unsigned with 0. */
8091 default:
8092 FAIL;
8093 }
8094 DONE;
8095 }
8096
8097 switch (GET_CODE (operands[1]))
8098 {
8099 case EQ:
8100 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8101 NULL_RTX, 0, OPTAB_WIDEN);
8102 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8103 break;
8104
8105 case NE:
8106 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8107 NULL_RTX, 0, OPTAB_WIDEN);
8108 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8109 break;
8110
8111 case LE:
8112 op3 = force_reg (SImode, operands[3]);
8113
8114 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8115 NULL_RTX, 1, OPTAB_WIDEN);
8116 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8117 NULL_RTX, 0, OPTAB_WIDEN);
8118 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8119 op3, operands[2]));
8120 break;
8121
8122 case GE:
8123 op3 = operands[3];
8124 if (!thumb1_cmp_operand (op3, SImode))
8125 op3 = force_reg (SImode, op3);
8126 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8127 NULL_RTX, 0, OPTAB_WIDEN);
8128 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8129 NULL_RTX, 1, OPTAB_WIDEN);
8130 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8131 operands[2], op3));
8132 break;
8133
8134 case LEU:
8135 op3 = force_reg (SImode, operands[3]);
8136 scratch = force_reg (SImode, const0_rtx);
8137 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8138 op3, operands[2]));
8139 break;
8140
8141 case GEU:
8142 op3 = operands[3];
8143 if (!thumb1_cmp_operand (op3, SImode))
8144 op3 = force_reg (SImode, op3);
8145 scratch = force_reg (SImode, const0_rtx);
8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8147 operands[2], op3));
8148 break;
8149
8150 case LTU:
8151 op3 = operands[3];
8152 if (!thumb1_cmp_operand (op3, SImode))
8153 op3 = force_reg (SImode, op3);
8154 scratch = gen_reg_rtx (SImode);
8155 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8156 emit_insn (gen_negsi2 (operands[0], scratch));
8157 break;
8158
8159 case GTU:
8160 op3 = force_reg (SImode, operands[3]);
8161 scratch = gen_reg_rtx (SImode);
8162 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8163 emit_insn (gen_negsi2 (operands[0], scratch));
8164 break;
8165
8166 /* No good sequences for GT, LT. */
8167 default:
8168 FAIL;
8169 }
8170 DONE;
8171 }")
8172
8173 (define_expand "cstoresi_eq0_thumb1"
8174 [(parallel
8175 [(set (match_operand:SI 0 "s_register_operand" "")
8176 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8177 (const_int 0)))
8178 (clobber (match_dup:SI 2))])]
8179 "TARGET_THUMB1"
8180 "operands[2] = gen_reg_rtx (SImode);"
8181 )
8182
8183 (define_expand "cstoresi_ne0_thumb1"
8184 [(parallel
8185 [(set (match_operand:SI 0 "s_register_operand" "")
8186 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8187 (const_int 0)))
8188 (clobber (match_dup:SI 2))])]
8189 "TARGET_THUMB1"
8190 "operands[2] = gen_reg_rtx (SImode);"
8191 )
8192
8193 (define_insn "*cstoresi_eq0_thumb1_insn"
8194 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8195 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8196 (const_int 0)))
8197 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8198 "TARGET_THUMB1"
8199 "@
8200 neg\\t%0, %1\;adc\\t%0, %0, %1
8201 neg\\t%2, %1\;adc\\t%0, %1, %2"
8202 [(set_attr "length" "4")]
8203 )
8204
8205 (define_insn "*cstoresi_ne0_thumb1_insn"
8206 [(set (match_operand:SI 0 "s_register_operand" "=l")
8207 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8208 (const_int 0)))
8209 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8210 "TARGET_THUMB1"
8211 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8212 [(set_attr "length" "4")]
8213 )
8214
8215 (define_insn "cstoresi_nltu_thumb1"
8216 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8217 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8218 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8219 "TARGET_THUMB1"
8220 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8221 [(set_attr "length" "4")]
8222 )
8223
8224 ;; Used as part of the expansion of thumb les sequence.
8225 (define_insn "thumb1_addsi3_addgeu"
8226 [(set (match_operand:SI 0 "s_register_operand" "=l")
8227 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8228 (match_operand:SI 2 "s_register_operand" "l"))
8229 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8230 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8231 "TARGET_THUMB1"
8232 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8233 [(set_attr "length" "4")]
8234 )
8235
8236 \f
8237 ;; Conditional move insns
8238
8239 (define_expand "movsicc"
8240 [(set (match_operand:SI 0 "s_register_operand" "")
8241 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8242 (match_operand:SI 2 "arm_not_operand" "")
8243 (match_operand:SI 3 "arm_not_operand" "")))]
8244 "TARGET_32BIT"
8245 "
8246 {
8247 enum rtx_code code = GET_CODE (operands[1]);
8248 rtx ccreg;
8249
8250 if (code == UNEQ || code == LTGT)
8251 FAIL;
8252
8253 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8254 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8255 }"
8256 )
8257
8258 (define_expand "movsfcc"
8259 [(set (match_operand:SF 0 "s_register_operand" "")
8260 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8261 (match_operand:SF 2 "s_register_operand" "")
8262 (match_operand:SF 3 "nonmemory_operand" "")))]
8263 "TARGET_32BIT && TARGET_HARD_FLOAT"
8264 "
8265 {
8266 enum rtx_code code = GET_CODE (operands[1]);
8267 rtx ccreg;
8268
8269 if (code == UNEQ || code == LTGT)
8270 FAIL;
8271
8272 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8273 Otherwise, ensure it is a valid FP add operand */
8274 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8275 || (!arm_float_add_operand (operands[3], SFmode)))
8276 operands[3] = force_reg (SFmode, operands[3]);
8277
8278 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8279 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8280 }"
8281 )
8282
8283 (define_expand "movdfcc"
8284 [(set (match_operand:DF 0 "s_register_operand" "")
8285 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8286 (match_operand:DF 2 "s_register_operand" "")
8287 (match_operand:DF 3 "arm_float_add_operand" "")))]
8288 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8289 "
8290 {
8291 enum rtx_code code = GET_CODE (operands[1]);
8292 rtx ccreg;
8293
8294 if (code == UNEQ || code == LTGT)
8295 FAIL;
8296
8297 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8298 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8299 }"
8300 )
8301
8302 (define_insn "*movsicc_insn"
8303 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8304 (if_then_else:SI
8305 (match_operator 3 "arm_comparison_operator"
8306 [(match_operand 4 "cc_register" "") (const_int 0)])
8307 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8308 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8309 "TARGET_ARM"
8310 "@
8311 mov%D3\\t%0, %2
8312 mvn%D3\\t%0, #%B2
8313 mov%d3\\t%0, %1
8314 mvn%d3\\t%0, #%B1
8315 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8316 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8317 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8318 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8319 [(set_attr "length" "4,4,4,4,8,8,8,8")
8320 (set_attr "conds" "use")]
8321 )
8322
8323 (define_insn "*movsfcc_soft_insn"
8324 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8325 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8326 [(match_operand 4 "cc_register" "") (const_int 0)])
8327 (match_operand:SF 1 "s_register_operand" "0,r")
8328 (match_operand:SF 2 "s_register_operand" "r,0")))]
8329 "TARGET_ARM && TARGET_SOFT_FLOAT"
8330 "@
8331 mov%D3\\t%0, %2
8332 mov%d3\\t%0, %1"
8333 [(set_attr "conds" "use")]
8334 )
8335
8336 \f
8337 ;; Jump and linkage insns
8338
8339 (define_expand "jump"
8340 [(set (pc)
8341 (label_ref (match_operand 0 "" "")))]
8342 "TARGET_EITHER"
8343 ""
8344 )
8345
8346 (define_insn "*arm_jump"
8347 [(set (pc)
8348 (label_ref (match_operand 0 "" "")))]
8349 "TARGET_32BIT"
8350 "*
8351 {
8352 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8353 {
8354 arm_ccfsm_state += 2;
8355 return \"\";
8356 }
8357 return \"b%?\\t%l0\";
8358 }
8359 "
8360 [(set_attr "predicable" "yes")]
8361 )
8362
8363 (define_insn "*thumb_jump"
8364 [(set (pc)
8365 (label_ref (match_operand 0 "" "")))]
8366 "TARGET_THUMB1"
8367 "*
8368 if (get_attr_length (insn) == 2)
8369 return \"b\\t%l0\";
8370 return \"bl\\t%l0\\t%@ far jump\";
8371 "
8372 [(set (attr "far_jump")
8373 (if_then_else
8374 (eq_attr "length" "4")
8375 (const_string "yes")
8376 (const_string "no")))
8377 (set (attr "length")
8378 (if_then_else
8379 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8380 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8381 (const_int 2)
8382 (const_int 4)))]
8383 )
8384
8385 (define_expand "call"
8386 [(parallel [(call (match_operand 0 "memory_operand" "")
8387 (match_operand 1 "general_operand" ""))
8388 (use (match_operand 2 "" ""))
8389 (clobber (reg:SI LR_REGNUM))])]
8390 "TARGET_EITHER"
8391 "
8392 {
8393 rtx callee, pat;
8394
8395 /* In an untyped call, we can get NULL for operand 2. */
8396 if (operands[2] == NULL_RTX)
8397 operands[2] = const0_rtx;
8398
8399 /* Decide if we should generate indirect calls by loading the
8400 32-bit address of the callee into a register before performing the
8401 branch and link. */
8402 callee = XEXP (operands[0], 0);
8403 if (GET_CODE (callee) == SYMBOL_REF
8404 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8405 : !REG_P (callee))
8406 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8407
8408 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8409 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8410 DONE;
8411 }"
8412 )
8413
8414 (define_expand "call_internal"
8415 [(parallel [(call (match_operand 0 "memory_operand" "")
8416 (match_operand 1 "general_operand" ""))
8417 (use (match_operand 2 "" ""))
8418 (clobber (reg:SI LR_REGNUM))])])
8419
8420 (define_insn "*call_reg_armv5"
8421 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8422 (match_operand 1 "" ""))
8423 (use (match_operand 2 "" ""))
8424 (clobber (reg:SI LR_REGNUM))]
8425 "TARGET_ARM && arm_arch5"
8426 "blx%?\\t%0"
8427 [(set_attr "type" "call")]
8428 )
8429
8430 (define_insn "*call_reg_arm"
8431 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8432 (match_operand 1 "" ""))
8433 (use (match_operand 2 "" ""))
8434 (clobber (reg:SI LR_REGNUM))]
8435 "TARGET_ARM && !arm_arch5"
8436 "*
8437 return output_call (operands);
8438 "
8439 ;; length is worst case, normally it is only two
8440 [(set_attr "length" "12")
8441 (set_attr "type" "call")]
8442 )
8443
8444 (define_insn "*call_mem"
8445 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8446 (match_operand 1 "" ""))
8447 (use (match_operand 2 "" ""))
8448 (clobber (reg:SI LR_REGNUM))]
8449 "TARGET_ARM"
8450 "*
8451 return output_call_mem (operands);
8452 "
8453 [(set_attr "length" "12")
8454 (set_attr "type" "call")]
8455 )
8456
8457 (define_insn "*call_reg_thumb1_v5"
8458 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8459 (match_operand 1 "" ""))
8460 (use (match_operand 2 "" ""))
8461 (clobber (reg:SI LR_REGNUM))]
8462 "TARGET_THUMB1 && arm_arch5"
8463 "blx\\t%0"
8464 [(set_attr "length" "2")
8465 (set_attr "type" "call")]
8466 )
8467
8468 (define_insn "*call_reg_thumb1"
8469 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8470 (match_operand 1 "" ""))
8471 (use (match_operand 2 "" ""))
8472 (clobber (reg:SI LR_REGNUM))]
8473 "TARGET_THUMB1 && !arm_arch5"
8474 "*
8475 {
8476 if (!TARGET_CALLER_INTERWORKING)
8477 return thumb_call_via_reg (operands[0]);
8478 else if (operands[1] == const0_rtx)
8479 return \"bl\\t%__interwork_call_via_%0\";
8480 else if (frame_pointer_needed)
8481 return \"bl\\t%__interwork_r7_call_via_%0\";
8482 else
8483 return \"bl\\t%__interwork_r11_call_via_%0\";
8484 }"
8485 [(set_attr "type" "call")]
8486 )
8487
8488 (define_expand "call_value"
8489 [(parallel [(set (match_operand 0 "" "")
8490 (call (match_operand 1 "memory_operand" "")
8491 (match_operand 2 "general_operand" "")))
8492 (use (match_operand 3 "" ""))
8493 (clobber (reg:SI LR_REGNUM))])]
8494 "TARGET_EITHER"
8495 "
8496 {
8497 rtx pat, callee;
8498
8499 /* In an untyped call, we can get NULL for operand 2. */
8500 if (operands[3] == 0)
8501 operands[3] = const0_rtx;
8502
8503 /* Decide if we should generate indirect calls by loading the
8504 32-bit address of the callee into a register before performing the
8505 branch and link. */
8506 callee = XEXP (operands[1], 0);
8507 if (GET_CODE (callee) == SYMBOL_REF
8508 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8509 : !REG_P (callee))
8510 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8511
8512 pat = gen_call_value_internal (operands[0], operands[1],
8513 operands[2], operands[3]);
8514 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8515 DONE;
8516 }"
8517 )
8518
8519 (define_expand "call_value_internal"
8520 [(parallel [(set (match_operand 0 "" "")
8521 (call (match_operand 1 "memory_operand" "")
8522 (match_operand 2 "general_operand" "")))
8523 (use (match_operand 3 "" ""))
8524 (clobber (reg:SI LR_REGNUM))])])
8525
8526 (define_insn "*call_value_reg_armv5"
8527 [(set (match_operand 0 "" "")
8528 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8529 (match_operand 2 "" "")))
8530 (use (match_operand 3 "" ""))
8531 (clobber (reg:SI LR_REGNUM))]
8532 "TARGET_ARM && arm_arch5"
8533 "blx%?\\t%1"
8534 [(set_attr "type" "call")]
8535 )
8536
8537 (define_insn "*call_value_reg_arm"
8538 [(set (match_operand 0 "" "")
8539 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8540 (match_operand 2 "" "")))
8541 (use (match_operand 3 "" ""))
8542 (clobber (reg:SI LR_REGNUM))]
8543 "TARGET_ARM && !arm_arch5"
8544 "*
8545 return output_call (&operands[1]);
8546 "
8547 [(set_attr "length" "12")
8548 (set_attr "type" "call")]
8549 )
8550
8551 (define_insn "*call_value_mem"
8552 [(set (match_operand 0 "" "")
8553 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8554 (match_operand 2 "" "")))
8555 (use (match_operand 3 "" ""))
8556 (clobber (reg:SI LR_REGNUM))]
8557 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8558 "*
8559 return output_call_mem (&operands[1]);
8560 "
8561 [(set_attr "length" "12")
8562 (set_attr "type" "call")]
8563 )
8564
8565 (define_insn "*call_value_reg_thumb1_v5"
8566 [(set (match_operand 0 "" "")
8567 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8568 (match_operand 2 "" "")))
8569 (use (match_operand 3 "" ""))
8570 (clobber (reg:SI LR_REGNUM))]
8571 "TARGET_THUMB1 && arm_arch5"
8572 "blx\\t%1"
8573 [(set_attr "length" "2")
8574 (set_attr "type" "call")]
8575 )
8576
8577 (define_insn "*call_value_reg_thumb1"
8578 [(set (match_operand 0 "" "")
8579 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8580 (match_operand 2 "" "")))
8581 (use (match_operand 3 "" ""))
8582 (clobber (reg:SI LR_REGNUM))]
8583 "TARGET_THUMB1 && !arm_arch5"
8584 "*
8585 {
8586 if (!TARGET_CALLER_INTERWORKING)
8587 return thumb_call_via_reg (operands[1]);
8588 else if (operands[2] == const0_rtx)
8589 return \"bl\\t%__interwork_call_via_%1\";
8590 else if (frame_pointer_needed)
8591 return \"bl\\t%__interwork_r7_call_via_%1\";
8592 else
8593 return \"bl\\t%__interwork_r11_call_via_%1\";
8594 }"
8595 [(set_attr "type" "call")]
8596 )
8597
8598 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8599 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8600
8601 (define_insn "*call_symbol"
8602 [(call (mem:SI (match_operand:SI 0 "" ""))
8603 (match_operand 1 "" ""))
8604 (use (match_operand 2 "" ""))
8605 (clobber (reg:SI LR_REGNUM))]
8606 "TARGET_ARM
8607 && (GET_CODE (operands[0]) == SYMBOL_REF)
8608 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8609 "*
8610 {
8611 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8612 }"
8613 [(set_attr "type" "call")]
8614 )
8615
8616 (define_insn "*call_value_symbol"
8617 [(set (match_operand 0 "" "")
8618 (call (mem:SI (match_operand:SI 1 "" ""))
8619 (match_operand:SI 2 "" "")))
8620 (use (match_operand 3 "" ""))
8621 (clobber (reg:SI LR_REGNUM))]
8622 "TARGET_ARM
8623 && (GET_CODE (operands[1]) == SYMBOL_REF)
8624 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8625 "*
8626 {
8627 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8628 }"
8629 [(set_attr "type" "call")]
8630 )
8631
8632 (define_insn "*call_insn"
8633 [(call (mem:SI (match_operand:SI 0 "" ""))
8634 (match_operand:SI 1 "" ""))
8635 (use (match_operand 2 "" ""))
8636 (clobber (reg:SI LR_REGNUM))]
8637 "TARGET_THUMB
8638 && GET_CODE (operands[0]) == SYMBOL_REF
8639 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8640 "bl\\t%a0"
8641 [(set_attr "length" "4")
8642 (set_attr "type" "call")]
8643 )
8644
8645 (define_insn "*call_value_insn"
8646 [(set (match_operand 0 "" "")
8647 (call (mem:SI (match_operand 1 "" ""))
8648 (match_operand 2 "" "")))
8649 (use (match_operand 3 "" ""))
8650 (clobber (reg:SI LR_REGNUM))]
8651 "TARGET_THUMB
8652 && GET_CODE (operands[1]) == SYMBOL_REF
8653 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8654 "bl\\t%a1"
8655 [(set_attr "length" "4")
8656 (set_attr "type" "call")]
8657 )
8658
8659 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8660 (define_expand "sibcall"
8661 [(parallel [(call (match_operand 0 "memory_operand" "")
8662 (match_operand 1 "general_operand" ""))
8663 (return)
8664 (use (match_operand 2 "" ""))])]
8665 "TARGET_ARM"
8666 "
8667 {
8668 if (operands[2] == NULL_RTX)
8669 operands[2] = const0_rtx;
8670 }"
8671 )
8672
8673 (define_expand "sibcall_value"
8674 [(parallel [(set (match_operand 0 "" "")
8675 (call (match_operand 1 "memory_operand" "")
8676 (match_operand 2 "general_operand" "")))
8677 (return)
8678 (use (match_operand 3 "" ""))])]
8679 "TARGET_ARM"
8680 "
8681 {
8682 if (operands[3] == NULL_RTX)
8683 operands[3] = const0_rtx;
8684 }"
8685 )
8686
8687 (define_insn "*sibcall_insn"
8688 [(call (mem:SI (match_operand:SI 0 "" "X"))
8689 (match_operand 1 "" ""))
8690 (return)
8691 (use (match_operand 2 "" ""))]
8692 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8693 "*
8694 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8695 "
8696 [(set_attr "type" "call")]
8697 )
8698
8699 (define_insn "*sibcall_value_insn"
8700 [(set (match_operand 0 "" "")
8701 (call (mem:SI (match_operand:SI 1 "" "X"))
8702 (match_operand 2 "" "")))
8703 (return)
8704 (use (match_operand 3 "" ""))]
8705 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8706 "*
8707 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8708 "
8709 [(set_attr "type" "call")]
8710 )
8711
8712 ;; Often the return insn will be the same as loading from memory, so set attr
8713 (define_insn "return"
8714 [(return)]
8715 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8716 "*
8717 {
8718 if (arm_ccfsm_state == 2)
8719 {
8720 arm_ccfsm_state += 2;
8721 return \"\";
8722 }
8723 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8724 }"
8725 [(set_attr "type" "load1")
8726 (set_attr "length" "12")
8727 (set_attr "predicable" "yes")]
8728 )
8729
8730 (define_insn "*cond_return"
8731 [(set (pc)
8732 (if_then_else (match_operator 0 "arm_comparison_operator"
8733 [(match_operand 1 "cc_register" "") (const_int 0)])
8734 (return)
8735 (pc)))]
8736 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8737 "*
8738 {
8739 if (arm_ccfsm_state == 2)
8740 {
8741 arm_ccfsm_state += 2;
8742 return \"\";
8743 }
8744 return output_return_instruction (operands[0], TRUE, FALSE);
8745 }"
8746 [(set_attr "conds" "use")
8747 (set_attr "length" "12")
8748 (set_attr "type" "load1")]
8749 )
8750
8751 (define_insn "*cond_return_inverted"
8752 [(set (pc)
8753 (if_then_else (match_operator 0 "arm_comparison_operator"
8754 [(match_operand 1 "cc_register" "") (const_int 0)])
8755 (pc)
8756 (return)))]
8757 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8758 "*
8759 {
8760 if (arm_ccfsm_state == 2)
8761 {
8762 arm_ccfsm_state += 2;
8763 return \"\";
8764 }
8765 return output_return_instruction (operands[0], TRUE, TRUE);
8766 }"
8767 [(set_attr "conds" "use")
8768 (set_attr "length" "12")
8769 (set_attr "type" "load1")]
8770 )
8771
8772 ;; Generate a sequence of instructions to determine if the processor is
8773 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8774 ;; mask.
8775
8776 (define_expand "return_addr_mask"
8777 [(set (match_dup 1)
8778 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8779 (const_int 0)))
8780 (set (match_operand:SI 0 "s_register_operand" "")
8781 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8782 (const_int -1)
8783 (const_int 67108860)))] ; 0x03fffffc
8784 "TARGET_ARM"
8785 "
8786 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8787 ")
8788
8789 (define_insn "*check_arch2"
8790 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8791 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8792 (const_int 0)))]
8793 "TARGET_ARM"
8794 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8795 [(set_attr "length" "8")
8796 (set_attr "conds" "set")]
8797 )
8798
8799 ;; Call subroutine returning any type.
8800
8801 (define_expand "untyped_call"
8802 [(parallel [(call (match_operand 0 "" "")
8803 (const_int 0))
8804 (match_operand 1 "" "")
8805 (match_operand 2 "" "")])]
8806 "TARGET_EITHER"
8807 "
8808 {
8809 int i;
8810 rtx par = gen_rtx_PARALLEL (VOIDmode,
8811 rtvec_alloc (XVECLEN (operands[2], 0)));
8812 rtx addr = gen_reg_rtx (Pmode);
8813 rtx mem;
8814 int size = 0;
8815
8816 emit_move_insn (addr, XEXP (operands[1], 0));
8817 mem = change_address (operands[1], BLKmode, addr);
8818
8819 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8820 {
8821 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8822
8823 /* Default code only uses r0 as a return value, but we could
8824 be using anything up to 4 registers. */
8825 if (REGNO (src) == R0_REGNUM)
8826 src = gen_rtx_REG (TImode, R0_REGNUM);
8827
8828 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8829 GEN_INT (size));
8830 size += GET_MODE_SIZE (GET_MODE (src));
8831 }
8832
8833 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8834 const0_rtx));
8835
8836 size = 0;
8837
8838 for (i = 0; i < XVECLEN (par, 0); i++)
8839 {
8840 HOST_WIDE_INT offset = 0;
8841 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8842
8843 if (size != 0)
8844 emit_move_insn (addr, plus_constant (addr, size));
8845
8846 mem = change_address (mem, GET_MODE (reg), NULL);
8847 if (REGNO (reg) == R0_REGNUM)
8848 {
8849 /* On thumb we have to use a write-back instruction. */
8850 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8851 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8852 size = TARGET_ARM ? 16 : 0;
8853 }
8854 else
8855 {
8856 emit_move_insn (mem, reg);
8857 size = GET_MODE_SIZE (GET_MODE (reg));
8858 }
8859 }
8860
8861 /* The optimizer does not know that the call sets the function value
8862 registers we stored in the result block. We avoid problems by
8863 claiming that all hard registers are used and clobbered at this
8864 point. */
8865 emit_insn (gen_blockage ());
8866
8867 DONE;
8868 }"
8869 )
8870
8871 (define_expand "untyped_return"
8872 [(match_operand:BLK 0 "memory_operand" "")
8873 (match_operand 1 "" "")]
8874 "TARGET_EITHER"
8875 "
8876 {
8877 int i;
8878 rtx addr = gen_reg_rtx (Pmode);
8879 rtx mem;
8880 int size = 0;
8881
8882 emit_move_insn (addr, XEXP (operands[0], 0));
8883 mem = change_address (operands[0], BLKmode, addr);
8884
8885 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8886 {
8887 HOST_WIDE_INT offset = 0;
8888 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8889
8890 if (size != 0)
8891 emit_move_insn (addr, plus_constant (addr, size));
8892
8893 mem = change_address (mem, GET_MODE (reg), NULL);
8894 if (REGNO (reg) == R0_REGNUM)
8895 {
8896 /* On thumb we have to use a write-back instruction. */
8897 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8898 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8899 size = TARGET_ARM ? 16 : 0;
8900 }
8901 else
8902 {
8903 emit_move_insn (reg, mem);
8904 size = GET_MODE_SIZE (GET_MODE (reg));
8905 }
8906 }
8907
8908 /* Emit USE insns before the return. */
8909 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8910 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8911
8912 /* Construct the return. */
8913 expand_naked_return ();
8914
8915 DONE;
8916 }"
8917 )
8918
8919 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8920 ;; all of memory. This blocks insns from being moved across this point.
8921
8922 (define_insn "blockage"
8923 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8924 "TARGET_EITHER"
8925 ""
8926 [(set_attr "length" "0")
8927 (set_attr "type" "block")]
8928 )
8929
8930 (define_expand "casesi"
8931 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8932 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8933 (match_operand:SI 2 "const_int_operand" "") ; total range
8934 (match_operand:SI 3 "" "") ; table label
8935 (match_operand:SI 4 "" "")] ; Out of range label
8936 "TARGET_32BIT"
8937 "
8938 {
8939 rtx reg;
8940 if (operands[1] != const0_rtx)
8941 {
8942 reg = gen_reg_rtx (SImode);
8943
8944 emit_insn (gen_addsi3 (reg, operands[0],
8945 GEN_INT (-INTVAL (operands[1]))));
8946 operands[0] = reg;
8947 }
8948
8949 if (!const_ok_for_arm (INTVAL (operands[2])))
8950 operands[2] = force_reg (SImode, operands[2]);
8951
8952 if (TARGET_ARM)
8953 {
8954 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8955 operands[3], operands[4]));
8956 }
8957 else if (flag_pic)
8958 {
8959 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8960 operands[2], operands[3], operands[4]));
8961 }
8962 else
8963 {
8964 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8965 operands[3], operands[4]));
8966 }
8967 DONE;
8968 }"
8969 )
8970
8971 ;; The USE in this pattern is needed to tell flow analysis that this is
8972 ;; a CASESI insn. It has no other purpose.
8973 (define_insn "arm_casesi_internal"
8974 [(parallel [(set (pc)
8975 (if_then_else
8976 (leu (match_operand:SI 0 "s_register_operand" "r")
8977 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8978 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8979 (label_ref (match_operand 2 "" ""))))
8980 (label_ref (match_operand 3 "" ""))))
8981 (clobber (reg:CC CC_REGNUM))
8982 (use (label_ref (match_dup 2)))])]
8983 "TARGET_ARM"
8984 "*
8985 if (flag_pic)
8986 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8987 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8988 "
8989 [(set_attr "conds" "clob")
8990 (set_attr "length" "12")]
8991 )
8992
8993 (define_expand "indirect_jump"
8994 [(set (pc)
8995 (match_operand:SI 0 "s_register_operand" ""))]
8996 "TARGET_EITHER"
8997 "
8998 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8999 address and use bx. */
9000 if (TARGET_THUMB2)
9001 {
9002 rtx tmp;
9003 tmp = gen_reg_rtx (SImode);
9004 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9005 operands[0] = tmp;
9006 }
9007 "
9008 )
9009
9010 ;; NB Never uses BX.
9011 (define_insn "*arm_indirect_jump"
9012 [(set (pc)
9013 (match_operand:SI 0 "s_register_operand" "r"))]
9014 "TARGET_ARM"
9015 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9016 [(set_attr "predicable" "yes")]
9017 )
9018
9019 (define_insn "*load_indirect_jump"
9020 [(set (pc)
9021 (match_operand:SI 0 "memory_operand" "m"))]
9022 "TARGET_ARM"
9023 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9024 [(set_attr "type" "load1")
9025 (set_attr "pool_range" "4096")
9026 (set_attr "neg_pool_range" "4084")
9027 (set_attr "predicable" "yes")]
9028 )
9029
9030 ;; NB Never uses BX.
9031 (define_insn "*thumb1_indirect_jump"
9032 [(set (pc)
9033 (match_operand:SI 0 "register_operand" "l*r"))]
9034 "TARGET_THUMB1"
9035 "mov\\tpc, %0"
9036 [(set_attr "conds" "clob")
9037 (set_attr "length" "2")]
9038 )
9039
9040 \f
9041 ;; Misc insns
9042
9043 (define_insn "nop"
9044 [(const_int 0)]
9045 "TARGET_EITHER"
9046 "*
9047 if (TARGET_UNIFIED_ASM)
9048 return \"nop\";
9049 if (TARGET_ARM)
9050 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9051 return \"mov\\tr8, r8\";
9052 "
9053 [(set (attr "length")
9054 (if_then_else (eq_attr "is_thumb" "yes")
9055 (const_int 2)
9056 (const_int 4)))]
9057 )
9058
9059 \f
9060 ;; Patterns to allow combination of arithmetic, cond code and shifts
9061
9062 (define_insn "*arith_shiftsi"
9063 [(set (match_operand:SI 0 "s_register_operand" "=r")
9064 (match_operator:SI 1 "shiftable_operator"
9065 [(match_operator:SI 3 "shift_operator"
9066 [(match_operand:SI 4 "s_register_operand" "r")
9067 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9068 (match_operand:SI 2 "s_register_operand" "r")]))]
9069 "TARGET_ARM"
9070 "%i1%?\\t%0, %2, %4%S3"
9071 [(set_attr "predicable" "yes")
9072 (set_attr "shift" "4")
9073 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9074 (const_string "alu_shift")
9075 (const_string "alu_shift_reg")))]
9076 )
9077
9078 (define_split
9079 [(set (match_operand:SI 0 "s_register_operand" "")
9080 (match_operator:SI 1 "shiftable_operator"
9081 [(match_operator:SI 2 "shiftable_operator"
9082 [(match_operator:SI 3 "shift_operator"
9083 [(match_operand:SI 4 "s_register_operand" "")
9084 (match_operand:SI 5 "reg_or_int_operand" "")])
9085 (match_operand:SI 6 "s_register_operand" "")])
9086 (match_operand:SI 7 "arm_rhs_operand" "")]))
9087 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9088 "TARGET_ARM"
9089 [(set (match_dup 8)
9090 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9091 (match_dup 6)]))
9092 (set (match_dup 0)
9093 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9094 "")
9095
9096 (define_insn "*arith_shiftsi_compare0"
9097 [(set (reg:CC_NOOV CC_REGNUM)
9098 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9099 [(match_operator:SI 3 "shift_operator"
9100 [(match_operand:SI 4 "s_register_operand" "r")
9101 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9102 (match_operand:SI 2 "s_register_operand" "r")])
9103 (const_int 0)))
9104 (set (match_operand:SI 0 "s_register_operand" "=r")
9105 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9106 (match_dup 2)]))]
9107 "TARGET_ARM"
9108 "%i1%.\\t%0, %2, %4%S3"
9109 [(set_attr "conds" "set")
9110 (set_attr "shift" "4")
9111 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9112 (const_string "alu_shift")
9113 (const_string "alu_shift_reg")))]
9114 )
9115
9116 (define_insn "*arith_shiftsi_compare0_scratch"
9117 [(set (reg:CC_NOOV CC_REGNUM)
9118 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9119 [(match_operator:SI 3 "shift_operator"
9120 [(match_operand:SI 4 "s_register_operand" "r")
9121 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9122 (match_operand:SI 2 "s_register_operand" "r")])
9123 (const_int 0)))
9124 (clobber (match_scratch:SI 0 "=r"))]
9125 "TARGET_ARM"
9126 "%i1%.\\t%0, %2, %4%S3"
9127 [(set_attr "conds" "set")
9128 (set_attr "shift" "4")
9129 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9130 (const_string "alu_shift")
9131 (const_string "alu_shift_reg")))]
9132 )
9133
9134 (define_insn "*sub_shiftsi"
9135 [(set (match_operand:SI 0 "s_register_operand" "=r")
9136 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9137 (match_operator:SI 2 "shift_operator"
9138 [(match_operand:SI 3 "s_register_operand" "r")
9139 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9140 "TARGET_ARM"
9141 "sub%?\\t%0, %1, %3%S2"
9142 [(set_attr "predicable" "yes")
9143 (set_attr "shift" "3")
9144 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9145 (const_string "alu_shift")
9146 (const_string "alu_shift_reg")))]
9147 )
9148
9149 (define_insn "*sub_shiftsi_compare0"
9150 [(set (reg:CC_NOOV CC_REGNUM)
9151 (compare:CC_NOOV
9152 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9153 (match_operator:SI 2 "shift_operator"
9154 [(match_operand:SI 3 "s_register_operand" "r")
9155 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9156 (const_int 0)))
9157 (set (match_operand:SI 0 "s_register_operand" "=r")
9158 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9159 (match_dup 4)])))]
9160 "TARGET_ARM"
9161 "sub%.\\t%0, %1, %3%S2"
9162 [(set_attr "conds" "set")
9163 (set_attr "shift" "3")
9164 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9165 (const_string "alu_shift")
9166 (const_string "alu_shift_reg")))]
9167 )
9168
9169 (define_insn "*sub_shiftsi_compare0_scratch"
9170 [(set (reg:CC_NOOV CC_REGNUM)
9171 (compare:CC_NOOV
9172 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9173 (match_operator:SI 2 "shift_operator"
9174 [(match_operand:SI 3 "s_register_operand" "r")
9175 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9176 (const_int 0)))
9177 (clobber (match_scratch:SI 0 "=r"))]
9178 "TARGET_ARM"
9179 "sub%.\\t%0, %1, %3%S2"
9180 [(set_attr "conds" "set")
9181 (set_attr "shift" "3")
9182 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9183 (const_string "alu_shift")
9184 (const_string "alu_shift_reg")))]
9185 )
9186
9187 \f
9188
9189 (define_insn "*and_scc"
9190 [(set (match_operand:SI 0 "s_register_operand" "=r")
9191 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9192 [(match_operand 3 "cc_register" "") (const_int 0)])
9193 (match_operand:SI 2 "s_register_operand" "r")))]
9194 "TARGET_ARM"
9195 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9196 [(set_attr "conds" "use")
9197 (set_attr "length" "8")]
9198 )
9199
9200 (define_insn "*ior_scc"
9201 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9202 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9203 [(match_operand 3 "cc_register" "") (const_int 0)])
9204 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9205 "TARGET_ARM"
9206 "@
9207 orr%d2\\t%0, %1, #1
9208 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9209 [(set_attr "conds" "use")
9210 (set_attr "length" "4,8")]
9211 )
9212
9213 (define_insn "*compare_scc"
9214 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9215 (match_operator:SI 1 "arm_comparison_operator"
9216 [(match_operand:SI 2 "s_register_operand" "r,r")
9217 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9218 (clobber (reg:CC CC_REGNUM))]
9219 "TARGET_ARM"
9220 "*
9221 if (operands[3] == const0_rtx)
9222 {
9223 if (GET_CODE (operands[1]) == LT)
9224 return \"mov\\t%0, %2, lsr #31\";
9225
9226 if (GET_CODE (operands[1]) == GE)
9227 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9228
9229 if (GET_CODE (operands[1]) == EQ)
9230 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9231 }
9232
9233 if (GET_CODE (operands[1]) == NE)
9234 {
9235 if (which_alternative == 1)
9236 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9237 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9238 }
9239 if (which_alternative == 1)
9240 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9241 else
9242 output_asm_insn (\"cmp\\t%2, %3\", operands);
9243 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9244 "
9245 [(set_attr "conds" "clob")
9246 (set_attr "length" "12")]
9247 )
9248
9249 (define_insn "*cond_move"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9251 (if_then_else:SI (match_operator 3 "equality_operator"
9252 [(match_operator 4 "arm_comparison_operator"
9253 [(match_operand 5 "cc_register" "") (const_int 0)])
9254 (const_int 0)])
9255 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9256 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9257 "TARGET_ARM"
9258 "*
9259 if (GET_CODE (operands[3]) == NE)
9260 {
9261 if (which_alternative != 1)
9262 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9263 if (which_alternative != 0)
9264 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9265 return \"\";
9266 }
9267 if (which_alternative != 0)
9268 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9269 if (which_alternative != 1)
9270 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9271 return \"\";
9272 "
9273 [(set_attr "conds" "use")
9274 (set_attr "length" "4,4,8")]
9275 )
9276
9277 (define_insn "*cond_arith"
9278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9279 (match_operator:SI 5 "shiftable_operator"
9280 [(match_operator:SI 4 "arm_comparison_operator"
9281 [(match_operand:SI 2 "s_register_operand" "r,r")
9282 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9283 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9284 (clobber (reg:CC CC_REGNUM))]
9285 "TARGET_ARM"
9286 "*
9287 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9288 return \"%i5\\t%0, %1, %2, lsr #31\";
9289
9290 output_asm_insn (\"cmp\\t%2, %3\", operands);
9291 if (GET_CODE (operands[5]) == AND)
9292 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9293 else if (GET_CODE (operands[5]) == MINUS)
9294 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9295 else if (which_alternative != 0)
9296 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9297 return \"%i5%d4\\t%0, %1, #1\";
9298 "
9299 [(set_attr "conds" "clob")
9300 (set_attr "length" "12")]
9301 )
9302
9303 (define_insn "*cond_sub"
9304 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9305 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9306 (match_operator:SI 4 "arm_comparison_operator"
9307 [(match_operand:SI 2 "s_register_operand" "r,r")
9308 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9309 (clobber (reg:CC CC_REGNUM))]
9310 "TARGET_ARM"
9311 "*
9312 output_asm_insn (\"cmp\\t%2, %3\", operands);
9313 if (which_alternative != 0)
9314 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9315 return \"sub%d4\\t%0, %1, #1\";
9316 "
9317 [(set_attr "conds" "clob")
9318 (set_attr "length" "8,12")]
9319 )
9320
9321 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9322 (define_insn "*cmp_ite0"
9323 [(set (match_operand 6 "dominant_cc_register" "")
9324 (compare
9325 (if_then_else:SI
9326 (match_operator 4 "arm_comparison_operator"
9327 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9328 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9329 (match_operator:SI 5 "arm_comparison_operator"
9330 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9331 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9332 (const_int 0))
9333 (const_int 0)))]
9334 "TARGET_ARM"
9335 "*
9336 {
9337 static const char * const opcodes[4][2] =
9338 {
9339 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9340 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9341 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9342 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9343 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9344 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9345 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9346 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9347 };
9348 int swap =
9349 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9350
9351 return opcodes[which_alternative][swap];
9352 }"
9353 [(set_attr "conds" "set")
9354 (set_attr "length" "8")]
9355 )
9356
9357 (define_insn "*cmp_ite1"
9358 [(set (match_operand 6 "dominant_cc_register" "")
9359 (compare
9360 (if_then_else:SI
9361 (match_operator 4 "arm_comparison_operator"
9362 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9363 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9364 (match_operator:SI 5 "arm_comparison_operator"
9365 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9366 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9367 (const_int 1))
9368 (const_int 0)))]
9369 "TARGET_ARM"
9370 "*
9371 {
9372 static const char * const opcodes[4][2] =
9373 {
9374 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9375 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9376 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9377 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9378 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9379 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9380 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9381 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9382 };
9383 int swap =
9384 comparison_dominates_p (GET_CODE (operands[5]),
9385 reverse_condition (GET_CODE (operands[4])));
9386
9387 return opcodes[which_alternative][swap];
9388 }"
9389 [(set_attr "conds" "set")
9390 (set_attr "length" "8")]
9391 )
9392
9393 (define_insn "*cmp_and"
9394 [(set (match_operand 6 "dominant_cc_register" "")
9395 (compare
9396 (and:SI
9397 (match_operator 4 "arm_comparison_operator"
9398 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9399 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9400 (match_operator:SI 5 "arm_comparison_operator"
9401 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9402 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9403 (const_int 0)))]
9404 "TARGET_ARM"
9405 "*
9406 {
9407 static const char *const opcodes[4][2] =
9408 {
9409 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9410 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9411 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9412 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9413 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9414 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9415 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9416 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9417 };
9418 int swap =
9419 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9420
9421 return opcodes[which_alternative][swap];
9422 }"
9423 [(set_attr "conds" "set")
9424 (set_attr "predicable" "no")
9425 (set_attr "length" "8")]
9426 )
9427
9428 (define_insn "*cmp_ior"
9429 [(set (match_operand 6 "dominant_cc_register" "")
9430 (compare
9431 (ior:SI
9432 (match_operator 4 "arm_comparison_operator"
9433 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9434 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9435 (match_operator:SI 5 "arm_comparison_operator"
9436 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9437 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9438 (const_int 0)))]
9439 "TARGET_ARM"
9440 "*
9441 {
9442 static const char *const opcodes[4][2] =
9443 {
9444 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9445 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9446 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9447 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9448 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9449 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9450 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9451 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9452 };
9453 int swap =
9454 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9455
9456 return opcodes[which_alternative][swap];
9457 }
9458 "
9459 [(set_attr "conds" "set")
9460 (set_attr "length" "8")]
9461 )
9462
9463 (define_insn_and_split "*ior_scc_scc"
9464 [(set (match_operand:SI 0 "s_register_operand" "=r")
9465 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9466 [(match_operand:SI 1 "s_register_operand" "r")
9467 (match_operand:SI 2 "arm_add_operand" "rIL")])
9468 (match_operator:SI 6 "arm_comparison_operator"
9469 [(match_operand:SI 4 "s_register_operand" "r")
9470 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9471 (clobber (reg:CC CC_REGNUM))]
9472 "TARGET_ARM
9473 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9474 != CCmode)"
9475 "#"
9476 "TARGET_ARM && reload_completed"
9477 [(set (match_dup 7)
9478 (compare
9479 (ior:SI
9480 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9481 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9482 (const_int 0)))
9483 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9484 "operands[7]
9485 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9486 DOM_CC_X_OR_Y),
9487 CC_REGNUM);"
9488 [(set_attr "conds" "clob")
9489 (set_attr "length" "16")])
9490
9491 ; If the above pattern is followed by a CMP insn, then the compare is
9492 ; redundant, since we can rework the conditional instruction that follows.
9493 (define_insn_and_split "*ior_scc_scc_cmp"
9494 [(set (match_operand 0 "dominant_cc_register" "")
9495 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9496 [(match_operand:SI 1 "s_register_operand" "r")
9497 (match_operand:SI 2 "arm_add_operand" "rIL")])
9498 (match_operator:SI 6 "arm_comparison_operator"
9499 [(match_operand:SI 4 "s_register_operand" "r")
9500 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9501 (const_int 0)))
9502 (set (match_operand:SI 7 "s_register_operand" "=r")
9503 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9504 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9505 "TARGET_ARM"
9506 "#"
9507 "TARGET_ARM && reload_completed"
9508 [(set (match_dup 0)
9509 (compare
9510 (ior:SI
9511 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9512 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9513 (const_int 0)))
9514 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9515 ""
9516 [(set_attr "conds" "set")
9517 (set_attr "length" "16")])
9518
9519 (define_insn_and_split "*and_scc_scc"
9520 [(set (match_operand:SI 0 "s_register_operand" "=r")
9521 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9522 [(match_operand:SI 1 "s_register_operand" "r")
9523 (match_operand:SI 2 "arm_add_operand" "rIL")])
9524 (match_operator:SI 6 "arm_comparison_operator"
9525 [(match_operand:SI 4 "s_register_operand" "r")
9526 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9527 (clobber (reg:CC CC_REGNUM))]
9528 "TARGET_ARM
9529 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9530 != CCmode)"
9531 "#"
9532 "TARGET_ARM && reload_completed
9533 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9534 != CCmode)"
9535 [(set (match_dup 7)
9536 (compare
9537 (and:SI
9538 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9539 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9540 (const_int 0)))
9541 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9542 "operands[7]
9543 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9544 DOM_CC_X_AND_Y),
9545 CC_REGNUM);"
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "16")])
9548
9549 ; If the above pattern is followed by a CMP insn, then the compare is
9550 ; redundant, since we can rework the conditional instruction that follows.
9551 (define_insn_and_split "*and_scc_scc_cmp"
9552 [(set (match_operand 0 "dominant_cc_register" "")
9553 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9554 [(match_operand:SI 1 "s_register_operand" "r")
9555 (match_operand:SI 2 "arm_add_operand" "rIL")])
9556 (match_operator:SI 6 "arm_comparison_operator"
9557 [(match_operand:SI 4 "s_register_operand" "r")
9558 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9559 (const_int 0)))
9560 (set (match_operand:SI 7 "s_register_operand" "=r")
9561 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9562 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9563 "TARGET_ARM"
9564 "#"
9565 "TARGET_ARM && reload_completed"
9566 [(set (match_dup 0)
9567 (compare
9568 (and:SI
9569 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9570 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9571 (const_int 0)))
9572 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9573 ""
9574 [(set_attr "conds" "set")
9575 (set_attr "length" "16")])
9576
9577 ;; If there is no dominance in the comparison, then we can still save an
9578 ;; instruction in the AND case, since we can know that the second compare
9579 ;; need only zero the value if false (if true, then the value is already
9580 ;; correct).
9581 (define_insn_and_split "*and_scc_scc_nodom"
9582 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9583 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9584 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9585 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9586 (match_operator:SI 6 "arm_comparison_operator"
9587 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9588 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9589 (clobber (reg:CC CC_REGNUM))]
9590 "TARGET_ARM
9591 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9592 == CCmode)"
9593 "#"
9594 "TARGET_ARM && reload_completed"
9595 [(parallel [(set (match_dup 0)
9596 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9597 (clobber (reg:CC CC_REGNUM))])
9598 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9599 (set (match_dup 0)
9600 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9601 (match_dup 0)
9602 (const_int 0)))]
9603 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9604 operands[4], operands[5]),
9605 CC_REGNUM);
9606 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9607 operands[5]);"
9608 [(set_attr "conds" "clob")
9609 (set_attr "length" "20")])
9610
9611 (define_split
9612 [(set (reg:CC_NOOV CC_REGNUM)
9613 (compare:CC_NOOV (ior:SI
9614 (and:SI (match_operand:SI 0 "s_register_operand" "")
9615 (const_int 1))
9616 (match_operator:SI 1 "comparison_operator"
9617 [(match_operand:SI 2 "s_register_operand" "")
9618 (match_operand:SI 3 "arm_add_operand" "")]))
9619 (const_int 0)))
9620 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9621 "TARGET_ARM"
9622 [(set (match_dup 4)
9623 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9624 (match_dup 0)))
9625 (set (reg:CC_NOOV CC_REGNUM)
9626 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9627 (const_int 0)))]
9628 "")
9629
9630 (define_split
9631 [(set (reg:CC_NOOV CC_REGNUM)
9632 (compare:CC_NOOV (ior:SI
9633 (match_operator:SI 1 "comparison_operator"
9634 [(match_operand:SI 2 "s_register_operand" "")
9635 (match_operand:SI 3 "arm_add_operand" "")])
9636 (and:SI (match_operand:SI 0 "s_register_operand" "")
9637 (const_int 1)))
9638 (const_int 0)))
9639 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9640 "TARGET_ARM"
9641 [(set (match_dup 4)
9642 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9643 (match_dup 0)))
9644 (set (reg:CC_NOOV CC_REGNUM)
9645 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9646 (const_int 0)))]
9647 "")
9648 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9649
9650 (define_insn "*negscc"
9651 [(set (match_operand:SI 0 "s_register_operand" "=r")
9652 (neg:SI (match_operator 3 "arm_comparison_operator"
9653 [(match_operand:SI 1 "s_register_operand" "r")
9654 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9655 (clobber (reg:CC CC_REGNUM))]
9656 "TARGET_ARM"
9657 "*
9658 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9659 return \"mov\\t%0, %1, asr #31\";
9660
9661 if (GET_CODE (operands[3]) == NE)
9662 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9663
9664 output_asm_insn (\"cmp\\t%1, %2\", operands);
9665 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9666 return \"mvn%d3\\t%0, #0\";
9667 "
9668 [(set_attr "conds" "clob")
9669 (set_attr "length" "12")]
9670 )
9671
9672 (define_insn "movcond"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9674 (if_then_else:SI
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9677 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9678 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9679 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9680 (clobber (reg:CC CC_REGNUM))]
9681 "TARGET_ARM"
9682 "*
9683 if (GET_CODE (operands[5]) == LT
9684 && (operands[4] == const0_rtx))
9685 {
9686 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9687 {
9688 if (operands[2] == const0_rtx)
9689 return \"and\\t%0, %1, %3, asr #31\";
9690 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9691 }
9692 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9693 {
9694 if (operands[1] == const0_rtx)
9695 return \"bic\\t%0, %2, %3, asr #31\";
9696 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9697 }
9698 /* The only case that falls through to here is when both ops 1 & 2
9699 are constants. */
9700 }
9701
9702 if (GET_CODE (operands[5]) == GE
9703 && (operands[4] == const0_rtx))
9704 {
9705 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9706 {
9707 if (operands[2] == const0_rtx)
9708 return \"bic\\t%0, %1, %3, asr #31\";
9709 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9710 }
9711 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9712 {
9713 if (operands[1] == const0_rtx)
9714 return \"and\\t%0, %2, %3, asr #31\";
9715 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9716 }
9717 /* The only case that falls through to here is when both ops 1 & 2
9718 are constants. */
9719 }
9720 if (GET_CODE (operands[4]) == CONST_INT
9721 && !const_ok_for_arm (INTVAL (operands[4])))
9722 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9723 else
9724 output_asm_insn (\"cmp\\t%3, %4\", operands);
9725 if (which_alternative != 0)
9726 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9727 if (which_alternative != 1)
9728 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9729 return \"\";
9730 "
9731 [(set_attr "conds" "clob")
9732 (set_attr "length" "8,8,12")]
9733 )
9734
9735 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9736
9737 (define_insn "*ifcompare_plus_move"
9738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9739 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9740 [(match_operand:SI 4 "s_register_operand" "r,r")
9741 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9742 (plus:SI
9743 (match_operand:SI 2 "s_register_operand" "r,r")
9744 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9745 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9746 (clobber (reg:CC CC_REGNUM))]
9747 "TARGET_ARM"
9748 "#"
9749 [(set_attr "conds" "clob")
9750 (set_attr "length" "8,12")]
9751 )
9752
9753 (define_insn "*if_plus_move"
9754 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9755 (if_then_else:SI
9756 (match_operator 4 "arm_comparison_operator"
9757 [(match_operand 5 "cc_register" "") (const_int 0)])
9758 (plus:SI
9759 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9760 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9761 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9762 "TARGET_ARM"
9763 "@
9764 add%d4\\t%0, %2, %3
9765 sub%d4\\t%0, %2, #%n3
9766 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9767 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9768 [(set_attr "conds" "use")
9769 (set_attr "length" "4,4,8,8")
9770 (set_attr "type" "*,*,*,*")]
9771 )
9772
9773 (define_insn "*ifcompare_move_plus"
9774 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9775 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9776 [(match_operand:SI 4 "s_register_operand" "r,r")
9777 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9778 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9779 (plus:SI
9780 (match_operand:SI 2 "s_register_operand" "r,r")
9781 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9782 (clobber (reg:CC CC_REGNUM))]
9783 "TARGET_ARM"
9784 "#"
9785 [(set_attr "conds" "clob")
9786 (set_attr "length" "8,12")]
9787 )
9788
9789 (define_insn "*if_move_plus"
9790 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9791 (if_then_else:SI
9792 (match_operator 4 "arm_comparison_operator"
9793 [(match_operand 5 "cc_register" "") (const_int 0)])
9794 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9795 (plus:SI
9796 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9797 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9798 "TARGET_ARM"
9799 "@
9800 add%D4\\t%0, %2, %3
9801 sub%D4\\t%0, %2, #%n3
9802 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9803 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9804 [(set_attr "conds" "use")
9805 (set_attr "length" "4,4,8,8")
9806 (set_attr "type" "*,*,*,*")]
9807 )
9808
9809 (define_insn "*ifcompare_arith_arith"
9810 [(set (match_operand:SI 0 "s_register_operand" "=r")
9811 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9812 [(match_operand:SI 5 "s_register_operand" "r")
9813 (match_operand:SI 6 "arm_add_operand" "rIL")])
9814 (match_operator:SI 8 "shiftable_operator"
9815 [(match_operand:SI 1 "s_register_operand" "r")
9816 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9817 (match_operator:SI 7 "shiftable_operator"
9818 [(match_operand:SI 3 "s_register_operand" "r")
9819 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9820 (clobber (reg:CC CC_REGNUM))]
9821 "TARGET_ARM"
9822 "#"
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "12")]
9825 )
9826
9827 (define_insn "*if_arith_arith"
9828 [(set (match_operand:SI 0 "s_register_operand" "=r")
9829 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9830 [(match_operand 8 "cc_register" "") (const_int 0)])
9831 (match_operator:SI 6 "shiftable_operator"
9832 [(match_operand:SI 1 "s_register_operand" "r")
9833 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9834 (match_operator:SI 7 "shiftable_operator"
9835 [(match_operand:SI 3 "s_register_operand" "r")
9836 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9837 "TARGET_ARM"
9838 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9839 [(set_attr "conds" "use")
9840 (set_attr "length" "8")]
9841 )
9842
9843 (define_insn "*ifcompare_arith_move"
9844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9845 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9846 [(match_operand:SI 2 "s_register_operand" "r,r")
9847 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9848 (match_operator:SI 7 "shiftable_operator"
9849 [(match_operand:SI 4 "s_register_operand" "r,r")
9850 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9851 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9852 (clobber (reg:CC CC_REGNUM))]
9853 "TARGET_ARM"
9854 "*
9855 /* If we have an operation where (op x 0) is the identity operation and
9856 the conditional operator is LT or GE and we are comparing against zero and
9857 everything is in registers then we can do this in two instructions. */
9858 if (operands[3] == const0_rtx
9859 && GET_CODE (operands[7]) != AND
9860 && GET_CODE (operands[5]) == REG
9861 && GET_CODE (operands[1]) == REG
9862 && REGNO (operands[1]) == REGNO (operands[4])
9863 && REGNO (operands[4]) != REGNO (operands[0]))
9864 {
9865 if (GET_CODE (operands[6]) == LT)
9866 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9867 else if (GET_CODE (operands[6]) == GE)
9868 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9869 }
9870 if (GET_CODE (operands[3]) == CONST_INT
9871 && !const_ok_for_arm (INTVAL (operands[3])))
9872 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9873 else
9874 output_asm_insn (\"cmp\\t%2, %3\", operands);
9875 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9876 if (which_alternative != 0)
9877 return \"mov%D6\\t%0, %1\";
9878 return \"\";
9879 "
9880 [(set_attr "conds" "clob")
9881 (set_attr "length" "8,12")]
9882 )
9883
9884 (define_insn "*if_arith_move"
9885 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9886 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9887 [(match_operand 6 "cc_register" "") (const_int 0)])
9888 (match_operator:SI 5 "shiftable_operator"
9889 [(match_operand:SI 2 "s_register_operand" "r,r")
9890 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9891 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9892 "TARGET_ARM"
9893 "@
9894 %I5%d4\\t%0, %2, %3
9895 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9896 [(set_attr "conds" "use")
9897 (set_attr "length" "4,8")
9898 (set_attr "type" "*,*")]
9899 )
9900
9901 (define_insn "*ifcompare_move_arith"
9902 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9903 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9904 [(match_operand:SI 4 "s_register_operand" "r,r")
9905 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9906 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9907 (match_operator:SI 7 "shiftable_operator"
9908 [(match_operand:SI 2 "s_register_operand" "r,r")
9909 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9910 (clobber (reg:CC CC_REGNUM))]
9911 "TARGET_ARM"
9912 "*
9913 /* If we have an operation where (op x 0) is the identity operation and
9914 the conditional operator is LT or GE and we are comparing against zero and
9915 everything is in registers then we can do this in two instructions */
9916 if (operands[5] == const0_rtx
9917 && GET_CODE (operands[7]) != AND
9918 && GET_CODE (operands[3]) == REG
9919 && GET_CODE (operands[1]) == REG
9920 && REGNO (operands[1]) == REGNO (operands[2])
9921 && REGNO (operands[2]) != REGNO (operands[0]))
9922 {
9923 if (GET_CODE (operands[6]) == GE)
9924 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9925 else if (GET_CODE (operands[6]) == LT)
9926 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9927 }
9928
9929 if (GET_CODE (operands[5]) == CONST_INT
9930 && !const_ok_for_arm (INTVAL (operands[5])))
9931 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9932 else
9933 output_asm_insn (\"cmp\\t%4, %5\", operands);
9934
9935 if (which_alternative != 0)
9936 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9937 return \"%I7%D6\\t%0, %2, %3\";
9938 "
9939 [(set_attr "conds" "clob")
9940 (set_attr "length" "8,12")]
9941 )
9942
9943 (define_insn "*if_move_arith"
9944 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9945 (if_then_else:SI
9946 (match_operator 4 "arm_comparison_operator"
9947 [(match_operand 6 "cc_register" "") (const_int 0)])
9948 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9949 (match_operator:SI 5 "shiftable_operator"
9950 [(match_operand:SI 2 "s_register_operand" "r,r")
9951 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9952 "TARGET_ARM"
9953 "@
9954 %I5%D4\\t%0, %2, %3
9955 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9956 [(set_attr "conds" "use")
9957 (set_attr "length" "4,8")
9958 (set_attr "type" "*,*")]
9959 )
9960
9961 (define_insn "*ifcompare_move_not"
9962 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9963 (if_then_else:SI
9964 (match_operator 5 "arm_comparison_operator"
9965 [(match_operand:SI 3 "s_register_operand" "r,r")
9966 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9967 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9968 (not:SI
9969 (match_operand:SI 2 "s_register_operand" "r,r"))))
9970 (clobber (reg:CC CC_REGNUM))]
9971 "TARGET_ARM"
9972 "#"
9973 [(set_attr "conds" "clob")
9974 (set_attr "length" "8,12")]
9975 )
9976
9977 (define_insn "*if_move_not"
9978 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9979 (if_then_else:SI
9980 (match_operator 4 "arm_comparison_operator"
9981 [(match_operand 3 "cc_register" "") (const_int 0)])
9982 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9983 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9984 "TARGET_ARM"
9985 "@
9986 mvn%D4\\t%0, %2
9987 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9988 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9989 [(set_attr "conds" "use")
9990 (set_attr "length" "4,8,8")]
9991 )
9992
9993 (define_insn "*ifcompare_not_move"
9994 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9995 (if_then_else:SI
9996 (match_operator 5 "arm_comparison_operator"
9997 [(match_operand:SI 3 "s_register_operand" "r,r")
9998 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9999 (not:SI
10000 (match_operand:SI 2 "s_register_operand" "r,r"))
10001 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10002 (clobber (reg:CC CC_REGNUM))]
10003 "TARGET_ARM"
10004 "#"
10005 [(set_attr "conds" "clob")
10006 (set_attr "length" "8,12")]
10007 )
10008
10009 (define_insn "*if_not_move"
10010 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10011 (if_then_else:SI
10012 (match_operator 4 "arm_comparison_operator"
10013 [(match_operand 3 "cc_register" "") (const_int 0)])
10014 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10015 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10016 "TARGET_ARM"
10017 "@
10018 mvn%d4\\t%0, %2
10019 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10020 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10021 [(set_attr "conds" "use")
10022 (set_attr "length" "4,8,8")]
10023 )
10024
10025 (define_insn "*ifcompare_shift_move"
10026 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10027 (if_then_else:SI
10028 (match_operator 6 "arm_comparison_operator"
10029 [(match_operand:SI 4 "s_register_operand" "r,r")
10030 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10031 (match_operator:SI 7 "shift_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r,r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10034 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10035 (clobber (reg:CC CC_REGNUM))]
10036 "TARGET_ARM"
10037 "#"
10038 [(set_attr "conds" "clob")
10039 (set_attr "length" "8,12")]
10040 )
10041
10042 (define_insn "*if_shift_move"
10043 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10044 (if_then_else:SI
10045 (match_operator 5 "arm_comparison_operator"
10046 [(match_operand 6 "cc_register" "") (const_int 0)])
10047 (match_operator:SI 4 "shift_operator"
10048 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10049 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10050 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10051 "TARGET_ARM"
10052 "@
10053 mov%d5\\t%0, %2%S4
10054 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10055 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10056 [(set_attr "conds" "use")
10057 (set_attr "shift" "2")
10058 (set_attr "length" "4,8,8")
10059 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10060 (const_string "alu_shift")
10061 (const_string "alu_shift_reg")))]
10062 )
10063
10064 (define_insn "*ifcompare_move_shift"
10065 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10066 (if_then_else:SI
10067 (match_operator 6 "arm_comparison_operator"
10068 [(match_operand:SI 4 "s_register_operand" "r,r")
10069 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10070 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10071 (match_operator:SI 7 "shift_operator"
10072 [(match_operand:SI 2 "s_register_operand" "r,r")
10073 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10074 (clobber (reg:CC CC_REGNUM))]
10075 "TARGET_ARM"
10076 "#"
10077 [(set_attr "conds" "clob")
10078 (set_attr "length" "8,12")]
10079 )
10080
10081 (define_insn "*if_move_shift"
10082 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10083 (if_then_else:SI
10084 (match_operator 5 "arm_comparison_operator"
10085 [(match_operand 6 "cc_register" "") (const_int 0)])
10086 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10087 (match_operator:SI 4 "shift_operator"
10088 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10089 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10090 "TARGET_ARM"
10091 "@
10092 mov%D5\\t%0, %2%S4
10093 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10094 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10095 [(set_attr "conds" "use")
10096 (set_attr "shift" "2")
10097 (set_attr "length" "4,8,8")
10098 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10099 (const_string "alu_shift")
10100 (const_string "alu_shift_reg")))]
10101 )
10102
10103 (define_insn "*ifcompare_shift_shift"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r")
10105 (if_then_else:SI
10106 (match_operator 7 "arm_comparison_operator"
10107 [(match_operand:SI 5 "s_register_operand" "r")
10108 (match_operand:SI 6 "arm_add_operand" "rIL")])
10109 (match_operator:SI 8 "shift_operator"
10110 [(match_operand:SI 1 "s_register_operand" "r")
10111 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10112 (match_operator:SI 9 "shift_operator"
10113 [(match_operand:SI 3 "s_register_operand" "r")
10114 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10115 (clobber (reg:CC CC_REGNUM))]
10116 "TARGET_ARM"
10117 "#"
10118 [(set_attr "conds" "clob")
10119 (set_attr "length" "12")]
10120 )
10121
10122 (define_insn "*if_shift_shift"
10123 [(set (match_operand:SI 0 "s_register_operand" "=r")
10124 (if_then_else:SI
10125 (match_operator 5 "arm_comparison_operator"
10126 [(match_operand 8 "cc_register" "") (const_int 0)])
10127 (match_operator:SI 6 "shift_operator"
10128 [(match_operand:SI 1 "s_register_operand" "r")
10129 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10130 (match_operator:SI 7 "shift_operator"
10131 [(match_operand:SI 3 "s_register_operand" "r")
10132 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10133 "TARGET_ARM"
10134 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10135 [(set_attr "conds" "use")
10136 (set_attr "shift" "1")
10137 (set_attr "length" "8")
10138 (set (attr "type") (if_then_else
10139 (and (match_operand 2 "const_int_operand" "")
10140 (match_operand 4 "const_int_operand" ""))
10141 (const_string "alu_shift")
10142 (const_string "alu_shift_reg")))]
10143 )
10144
10145 (define_insn "*ifcompare_not_arith"
10146 [(set (match_operand:SI 0 "s_register_operand" "=r")
10147 (if_then_else:SI
10148 (match_operator 6 "arm_comparison_operator"
10149 [(match_operand:SI 4 "s_register_operand" "r")
10150 (match_operand:SI 5 "arm_add_operand" "rIL")])
10151 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10152 (match_operator:SI 7 "shiftable_operator"
10153 [(match_operand:SI 2 "s_register_operand" "r")
10154 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10155 (clobber (reg:CC CC_REGNUM))]
10156 "TARGET_ARM"
10157 "#"
10158 [(set_attr "conds" "clob")
10159 (set_attr "length" "12")]
10160 )
10161
10162 (define_insn "*if_not_arith"
10163 [(set (match_operand:SI 0 "s_register_operand" "=r")
10164 (if_then_else:SI
10165 (match_operator 5 "arm_comparison_operator"
10166 [(match_operand 4 "cc_register" "") (const_int 0)])
10167 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10168 (match_operator:SI 6 "shiftable_operator"
10169 [(match_operand:SI 2 "s_register_operand" "r")
10170 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10171 "TARGET_ARM"
10172 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10173 [(set_attr "conds" "use")
10174 (set_attr "length" "8")]
10175 )
10176
10177 (define_insn "*ifcompare_arith_not"
10178 [(set (match_operand:SI 0 "s_register_operand" "=r")
10179 (if_then_else:SI
10180 (match_operator 6 "arm_comparison_operator"
10181 [(match_operand:SI 4 "s_register_operand" "r")
10182 (match_operand:SI 5 "arm_add_operand" "rIL")])
10183 (match_operator:SI 7 "shiftable_operator"
10184 [(match_operand:SI 2 "s_register_operand" "r")
10185 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10186 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10187 (clobber (reg:CC CC_REGNUM))]
10188 "TARGET_ARM"
10189 "#"
10190 [(set_attr "conds" "clob")
10191 (set_attr "length" "12")]
10192 )
10193
10194 (define_insn "*if_arith_not"
10195 [(set (match_operand:SI 0 "s_register_operand" "=r")
10196 (if_then_else:SI
10197 (match_operator 5 "arm_comparison_operator"
10198 [(match_operand 4 "cc_register" "") (const_int 0)])
10199 (match_operator:SI 6 "shiftable_operator"
10200 [(match_operand:SI 2 "s_register_operand" "r")
10201 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10202 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10203 "TARGET_ARM"
10204 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10205 [(set_attr "conds" "use")
10206 (set_attr "length" "8")]
10207 )
10208
10209 (define_insn "*ifcompare_neg_move"
10210 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10211 (if_then_else:SI
10212 (match_operator 5 "arm_comparison_operator"
10213 [(match_operand:SI 3 "s_register_operand" "r,r")
10214 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10215 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10216 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10217 (clobber (reg:CC CC_REGNUM))]
10218 "TARGET_ARM"
10219 "#"
10220 [(set_attr "conds" "clob")
10221 (set_attr "length" "8,12")]
10222 )
10223
10224 (define_insn "*if_neg_move"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10226 (if_then_else:SI
10227 (match_operator 4 "arm_comparison_operator"
10228 [(match_operand 3 "cc_register" "") (const_int 0)])
10229 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10230 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10231 "TARGET_ARM"
10232 "@
10233 rsb%d4\\t%0, %2, #0
10234 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10235 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10236 [(set_attr "conds" "use")
10237 (set_attr "length" "4,8,8")]
10238 )
10239
10240 (define_insn "*ifcompare_move_neg"
10241 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10242 (if_then_else:SI
10243 (match_operator 5 "arm_comparison_operator"
10244 [(match_operand:SI 3 "s_register_operand" "r,r")
10245 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10246 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10247 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10248 (clobber (reg:CC CC_REGNUM))]
10249 "TARGET_ARM"
10250 "#"
10251 [(set_attr "conds" "clob")
10252 (set_attr "length" "8,12")]
10253 )
10254
10255 (define_insn "*if_move_neg"
10256 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10257 (if_then_else:SI
10258 (match_operator 4 "arm_comparison_operator"
10259 [(match_operand 3 "cc_register" "") (const_int 0)])
10260 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10261 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10262 "TARGET_ARM"
10263 "@
10264 rsb%D4\\t%0, %2, #0
10265 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10266 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10267 [(set_attr "conds" "use")
10268 (set_attr "length" "4,8,8")]
10269 )
10270
10271 (define_insn "*arith_adjacentmem"
10272 [(set (match_operand:SI 0 "s_register_operand" "=r")
10273 (match_operator:SI 1 "shiftable_operator"
10274 [(match_operand:SI 2 "memory_operand" "m")
10275 (match_operand:SI 3 "memory_operand" "m")]))
10276 (clobber (match_scratch:SI 4 "=r"))]
10277 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10278 "*
10279 {
10280 rtx ldm[3];
10281 rtx arith[4];
10282 rtx base_reg;
10283 HOST_WIDE_INT val1 = 0, val2 = 0;
10284
10285 if (REGNO (operands[0]) > REGNO (operands[4]))
10286 {
10287 ldm[1] = operands[4];
10288 ldm[2] = operands[0];
10289 }
10290 else
10291 {
10292 ldm[1] = operands[0];
10293 ldm[2] = operands[4];
10294 }
10295
10296 base_reg = XEXP (operands[2], 0);
10297
10298 if (!REG_P (base_reg))
10299 {
10300 val1 = INTVAL (XEXP (base_reg, 1));
10301 base_reg = XEXP (base_reg, 0);
10302 }
10303
10304 if (!REG_P (XEXP (operands[3], 0)))
10305 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10306
10307 arith[0] = operands[0];
10308 arith[3] = operands[1];
10309
10310 if (val1 < val2)
10311 {
10312 arith[1] = ldm[1];
10313 arith[2] = ldm[2];
10314 }
10315 else
10316 {
10317 arith[1] = ldm[2];
10318 arith[2] = ldm[1];
10319 }
10320
10321 ldm[0] = base_reg;
10322 if (val1 !=0 && val2 != 0)
10323 {
10324 rtx ops[3];
10325
10326 if (val1 == 4 || val2 == 4)
10327 /* Other val must be 8, since we know they are adjacent and neither
10328 is zero. */
10329 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10330 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10331 {
10332 ldm[0] = ops[0] = operands[4];
10333 ops[1] = base_reg;
10334 ops[2] = GEN_INT (val1);
10335 output_add_immediate (ops);
10336 if (val1 < val2)
10337 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10338 else
10339 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10340 }
10341 else
10342 {
10343 /* Offset is out of range for a single add, so use two ldr. */
10344 ops[0] = ldm[1];
10345 ops[1] = base_reg;
10346 ops[2] = GEN_INT (val1);
10347 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10348 ops[0] = ldm[2];
10349 ops[2] = GEN_INT (val2);
10350 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10351 }
10352 }
10353 else if (val1 != 0)
10354 {
10355 if (val1 < val2)
10356 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10357 else
10358 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10359 }
10360 else
10361 {
10362 if (val1 < val2)
10363 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10364 else
10365 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10366 }
10367 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10368 return \"\";
10369 }"
10370 [(set_attr "length" "12")
10371 (set_attr "predicable" "yes")
10372 (set_attr "type" "load1")]
10373 )
10374
10375 ; This pattern is never tried by combine, so do it as a peephole
10376
10377 (define_peephole2
10378 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10379 (match_operand:SI 1 "arm_general_register_operand" ""))
10380 (set (reg:CC CC_REGNUM)
10381 (compare:CC (match_dup 1) (const_int 0)))]
10382 "TARGET_ARM"
10383 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10384 (set (match_dup 0) (match_dup 1))])]
10385 ""
10386 )
10387
10388 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10389 ; reversed, check that the memory references aren't volatile.
10390
10391 (define_peephole
10392 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10393 (match_operand:SI 4 "memory_operand" "m"))
10394 (set (match_operand:SI 1 "s_register_operand" "=rk")
10395 (match_operand:SI 5 "memory_operand" "m"))
10396 (set (match_operand:SI 2 "s_register_operand" "=rk")
10397 (match_operand:SI 6 "memory_operand" "m"))
10398 (set (match_operand:SI 3 "s_register_operand" "=rk")
10399 (match_operand:SI 7 "memory_operand" "m"))]
10400 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10401 "*
10402 return emit_ldm_seq (operands, 4);
10403 "
10404 )
10405
10406 (define_peephole
10407 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10408 (match_operand:SI 3 "memory_operand" "m"))
10409 (set (match_operand:SI 1 "s_register_operand" "=rk")
10410 (match_operand:SI 4 "memory_operand" "m"))
10411 (set (match_operand:SI 2 "s_register_operand" "=rk")
10412 (match_operand:SI 5 "memory_operand" "m"))]
10413 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10414 "*
10415 return emit_ldm_seq (operands, 3);
10416 "
10417 )
10418
10419 (define_peephole
10420 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10421 (match_operand:SI 2 "memory_operand" "m"))
10422 (set (match_operand:SI 1 "s_register_operand" "=rk")
10423 (match_operand:SI 3 "memory_operand" "m"))]
10424 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10425 "*
10426 return emit_ldm_seq (operands, 2);
10427 "
10428 )
10429
10430 (define_peephole
10431 [(set (match_operand:SI 4 "memory_operand" "=m")
10432 (match_operand:SI 0 "s_register_operand" "rk"))
10433 (set (match_operand:SI 5 "memory_operand" "=m")
10434 (match_operand:SI 1 "s_register_operand" "rk"))
10435 (set (match_operand:SI 6 "memory_operand" "=m")
10436 (match_operand:SI 2 "s_register_operand" "rk"))
10437 (set (match_operand:SI 7 "memory_operand" "=m")
10438 (match_operand:SI 3 "s_register_operand" "rk"))]
10439 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10440 "*
10441 return emit_stm_seq (operands, 4);
10442 "
10443 )
10444
10445 (define_peephole
10446 [(set (match_operand:SI 3 "memory_operand" "=m")
10447 (match_operand:SI 0 "s_register_operand" "rk"))
10448 (set (match_operand:SI 4 "memory_operand" "=m")
10449 (match_operand:SI 1 "s_register_operand" "rk"))
10450 (set (match_operand:SI 5 "memory_operand" "=m")
10451 (match_operand:SI 2 "s_register_operand" "rk"))]
10452 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10453 "*
10454 return emit_stm_seq (operands, 3);
10455 "
10456 )
10457
10458 (define_peephole
10459 [(set (match_operand:SI 2 "memory_operand" "=m")
10460 (match_operand:SI 0 "s_register_operand" "rk"))
10461 (set (match_operand:SI 3 "memory_operand" "=m")
10462 (match_operand:SI 1 "s_register_operand" "rk"))]
10463 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10464 "*
10465 return emit_stm_seq (operands, 2);
10466 "
10467 )
10468
10469 (define_split
10470 [(set (match_operand:SI 0 "s_register_operand" "")
10471 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10472 (const_int 0))
10473 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10474 [(match_operand:SI 3 "s_register_operand" "")
10475 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10476 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10477 "TARGET_ARM"
10478 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10479 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10480 (match_dup 5)))]
10481 ""
10482 )
10483
10484 ;; This split can be used because CC_Z mode implies that the following
10485 ;; branch will be an equality, or an unsigned inequality, so the sign
10486 ;; extension is not needed.
10487
10488 (define_split
10489 [(set (reg:CC_Z CC_REGNUM)
10490 (compare:CC_Z
10491 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10492 (const_int 24))
10493 (match_operand 1 "const_int_operand" "")))
10494 (clobber (match_scratch:SI 2 ""))]
10495 "TARGET_ARM
10496 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10497 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10498 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10499 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10500 "
10501 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10502 "
10503 )
10504 ;; ??? Check the patterns above for Thumb-2 usefulness
10505
10506 (define_expand "prologue"
10507 [(clobber (const_int 0))]
10508 "TARGET_EITHER"
10509 "if (TARGET_32BIT)
10510 arm_expand_prologue ();
10511 else
10512 thumb1_expand_prologue ();
10513 DONE;
10514 "
10515 )
10516
10517 (define_expand "epilogue"
10518 [(clobber (const_int 0))]
10519 "TARGET_EITHER"
10520 "
10521 if (crtl->calls_eh_return)
10522 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10523 if (TARGET_THUMB1)
10524 thumb1_expand_epilogue ();
10525 else if (USE_RETURN_INSN (FALSE))
10526 {
10527 emit_jump_insn (gen_return ());
10528 DONE;
10529 }
10530 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10531 gen_rtvec (1,
10532 gen_rtx_RETURN (VOIDmode)),
10533 VUNSPEC_EPILOGUE));
10534 DONE;
10535 "
10536 )
10537
10538 ;; Note - although unspec_volatile's USE all hard registers,
10539 ;; USEs are ignored after relaod has completed. Thus we need
10540 ;; to add an unspec of the link register to ensure that flow
10541 ;; does not think that it is unused by the sibcall branch that
10542 ;; will replace the standard function epilogue.
10543 (define_insn "sibcall_epilogue"
10544 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10545 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10546 "TARGET_32BIT"
10547 "*
10548 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10549 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10550 return arm_output_epilogue (next_nonnote_insn (insn));
10551 "
10552 ;; Length is absolute worst case
10553 [(set_attr "length" "44")
10554 (set_attr "type" "block")
10555 ;; We don't clobber the conditions, but the potential length of this
10556 ;; operation is sufficient to make conditionalizing the sequence
10557 ;; unlikely to be profitable.
10558 (set_attr "conds" "clob")]
10559 )
10560
10561 (define_insn "*epilogue_insns"
10562 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10563 "TARGET_EITHER"
10564 "*
10565 if (TARGET_32BIT)
10566 return arm_output_epilogue (NULL);
10567 else /* TARGET_THUMB1 */
10568 return thumb_unexpanded_epilogue ();
10569 "
10570 ; Length is absolute worst case
10571 [(set_attr "length" "44")
10572 (set_attr "type" "block")
10573 ;; We don't clobber the conditions, but the potential length of this
10574 ;; operation is sufficient to make conditionalizing the sequence
10575 ;; unlikely to be profitable.
10576 (set_attr "conds" "clob")]
10577 )
10578
10579 (define_expand "eh_epilogue"
10580 [(use (match_operand:SI 0 "register_operand" ""))
10581 (use (match_operand:SI 1 "register_operand" ""))
10582 (use (match_operand:SI 2 "register_operand" ""))]
10583 "TARGET_EITHER"
10584 "
10585 {
10586 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10587 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10588 {
10589 rtx ra = gen_rtx_REG (Pmode, 2);
10590
10591 emit_move_insn (ra, operands[2]);
10592 operands[2] = ra;
10593 }
10594 /* This is a hack -- we may have crystalized the function type too
10595 early. */
10596 cfun->machine->func_type = 0;
10597 }"
10598 )
10599
10600 ;; This split is only used during output to reduce the number of patterns
10601 ;; that need assembler instructions adding to them. We allowed the setting
10602 ;; of the conditions to be implicit during rtl generation so that
10603 ;; the conditional compare patterns would work. However this conflicts to
10604 ;; some extent with the conditional data operations, so we have to split them
10605 ;; up again here.
10606
10607 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10608 ;; conditional execution sufficient?
10609
10610 (define_split
10611 [(set (match_operand:SI 0 "s_register_operand" "")
10612 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10613 [(match_operand 2 "" "") (match_operand 3 "" "")])
10614 (match_dup 0)
10615 (match_operand 4 "" "")))
10616 (clobber (reg:CC CC_REGNUM))]
10617 "TARGET_ARM && reload_completed"
10618 [(set (match_dup 5) (match_dup 6))
10619 (cond_exec (match_dup 7)
10620 (set (match_dup 0) (match_dup 4)))]
10621 "
10622 {
10623 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10624 operands[2], operands[3]);
10625 enum rtx_code rc = GET_CODE (operands[1]);
10626
10627 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10628 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10629 if (mode == CCFPmode || mode == CCFPEmode)
10630 rc = reverse_condition_maybe_unordered (rc);
10631 else
10632 rc = reverse_condition (rc);
10633
10634 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10635 }"
10636 )
10637
10638 (define_split
10639 [(set (match_operand:SI 0 "s_register_operand" "")
10640 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10641 [(match_operand 2 "" "") (match_operand 3 "" "")])
10642 (match_operand 4 "" "")
10643 (match_dup 0)))
10644 (clobber (reg:CC CC_REGNUM))]
10645 "TARGET_ARM && reload_completed"
10646 [(set (match_dup 5) (match_dup 6))
10647 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10648 (set (match_dup 0) (match_dup 4)))]
10649 "
10650 {
10651 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10652 operands[2], operands[3]);
10653
10654 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10655 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10656 }"
10657 )
10658
10659 (define_split
10660 [(set (match_operand:SI 0 "s_register_operand" "")
10661 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10662 [(match_operand 2 "" "") (match_operand 3 "" "")])
10663 (match_operand 4 "" "")
10664 (match_operand 5 "" "")))
10665 (clobber (reg:CC CC_REGNUM))]
10666 "TARGET_ARM && reload_completed"
10667 [(set (match_dup 6) (match_dup 7))
10668 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10669 (set (match_dup 0) (match_dup 4)))
10670 (cond_exec (match_dup 8)
10671 (set (match_dup 0) (match_dup 5)))]
10672 "
10673 {
10674 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10675 operands[2], operands[3]);
10676 enum rtx_code rc = GET_CODE (operands[1]);
10677
10678 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10679 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10680 if (mode == CCFPmode || mode == CCFPEmode)
10681 rc = reverse_condition_maybe_unordered (rc);
10682 else
10683 rc = reverse_condition (rc);
10684
10685 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10686 }"
10687 )
10688
10689 (define_split
10690 [(set (match_operand:SI 0 "s_register_operand" "")
10691 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10692 [(match_operand:SI 2 "s_register_operand" "")
10693 (match_operand:SI 3 "arm_add_operand" "")])
10694 (match_operand:SI 4 "arm_rhs_operand" "")
10695 (not:SI
10696 (match_operand:SI 5 "s_register_operand" ""))))
10697 (clobber (reg:CC CC_REGNUM))]
10698 "TARGET_ARM && reload_completed"
10699 [(set (match_dup 6) (match_dup 7))
10700 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10701 (set (match_dup 0) (match_dup 4)))
10702 (cond_exec (match_dup 8)
10703 (set (match_dup 0) (not:SI (match_dup 5))))]
10704 "
10705 {
10706 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10707 operands[2], operands[3]);
10708 enum rtx_code rc = GET_CODE (operands[1]);
10709
10710 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10711 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10712 if (mode == CCFPmode || mode == CCFPEmode)
10713 rc = reverse_condition_maybe_unordered (rc);
10714 else
10715 rc = reverse_condition (rc);
10716
10717 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10718 }"
10719 )
10720
10721 (define_insn "*cond_move_not"
10722 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10723 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10724 [(match_operand 3 "cc_register" "") (const_int 0)])
10725 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10726 (not:SI
10727 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10728 "TARGET_ARM"
10729 "@
10730 mvn%D4\\t%0, %2
10731 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10732 [(set_attr "conds" "use")
10733 (set_attr "length" "4,8")]
10734 )
10735
10736 ;; The next two patterns occur when an AND operation is followed by a
10737 ;; scc insn sequence
10738
10739 (define_insn "*sign_extract_onebit"
10740 [(set (match_operand:SI 0 "s_register_operand" "=r")
10741 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10742 (const_int 1)
10743 (match_operand:SI 2 "const_int_operand" "n")))
10744 (clobber (reg:CC CC_REGNUM))]
10745 "TARGET_ARM"
10746 "*
10747 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10748 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10749 return \"mvnne\\t%0, #0\";
10750 "
10751 [(set_attr "conds" "clob")
10752 (set_attr "length" "8")]
10753 )
10754
10755 (define_insn "*not_signextract_onebit"
10756 [(set (match_operand:SI 0 "s_register_operand" "=r")
10757 (not:SI
10758 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10759 (const_int 1)
10760 (match_operand:SI 2 "const_int_operand" "n"))))
10761 (clobber (reg:CC CC_REGNUM))]
10762 "TARGET_ARM"
10763 "*
10764 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10765 output_asm_insn (\"tst\\t%1, %2\", operands);
10766 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10767 return \"movne\\t%0, #0\";
10768 "
10769 [(set_attr "conds" "clob")
10770 (set_attr "length" "12")]
10771 )
10772 ;; ??? The above patterns need auditing for Thumb-2
10773
10774 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10775 ;; expressions. For simplicity, the first register is also in the unspec
10776 ;; part.
10777 (define_insn "*push_multi"
10778 [(match_parallel 2 "multi_register_push"
10779 [(set (match_operand:BLK 0 "memory_operand" "=m")
10780 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10781 UNSPEC_PUSH_MULT))])]
10782 "TARGET_32BIT"
10783 "*
10784 {
10785 int num_saves = XVECLEN (operands[2], 0);
10786
10787 /* For the StrongARM at least it is faster to
10788 use STR to store only a single register.
10789 In Thumb mode always use push, and the assembler will pick
10790 something appropriate. */
10791 if (num_saves == 1 && TARGET_ARM)
10792 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10793 else
10794 {
10795 int i;
10796 char pattern[100];
10797
10798 if (TARGET_ARM)
10799 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10800 else
10801 strcpy (pattern, \"push\\t{%1\");
10802
10803 for (i = 1; i < num_saves; i++)
10804 {
10805 strcat (pattern, \", %|\");
10806 strcat (pattern,
10807 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10808 }
10809
10810 strcat (pattern, \"}\");
10811 output_asm_insn (pattern, operands);
10812 }
10813
10814 return \"\";
10815 }"
10816 [(set_attr "type" "store4")]
10817 )
10818
10819 (define_insn "stack_tie"
10820 [(set (mem:BLK (scratch))
10821 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10822 (match_operand:SI 1 "s_register_operand" "rk")]
10823 UNSPEC_PRLG_STK))]
10824 ""
10825 ""
10826 [(set_attr "length" "0")]
10827 )
10828
10829 ;; Similarly for the floating point registers
10830 (define_insn "*push_fp_multi"
10831 [(match_parallel 2 "multi_register_push"
10832 [(set (match_operand:BLK 0 "memory_operand" "=m")
10833 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10834 UNSPEC_PUSH_MULT))])]
10835 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10836 "*
10837 {
10838 char pattern[100];
10839
10840 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10841 output_asm_insn (pattern, operands);
10842 return \"\";
10843 }"
10844 [(set_attr "type" "f_store")]
10845 )
10846
10847 ;; Special patterns for dealing with the constant pool
10848
10849 (define_insn "align_4"
10850 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10851 "TARGET_EITHER"
10852 "*
10853 assemble_align (32);
10854 return \"\";
10855 "
10856 )
10857
10858 (define_insn "align_8"
10859 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10860 "TARGET_EITHER"
10861 "*
10862 assemble_align (64);
10863 return \"\";
10864 "
10865 )
10866
10867 (define_insn "consttable_end"
10868 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10869 "TARGET_EITHER"
10870 "*
10871 making_const_table = FALSE;
10872 return \"\";
10873 "
10874 )
10875
10876 (define_insn "consttable_1"
10877 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10878 "TARGET_THUMB1"
10879 "*
10880 making_const_table = TRUE;
10881 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10882 assemble_zeros (3);
10883 return \"\";
10884 "
10885 [(set_attr "length" "4")]
10886 )
10887
10888 (define_insn "consttable_2"
10889 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10890 "TARGET_THUMB1"
10891 "*
10892 making_const_table = TRUE;
10893 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10894 assemble_zeros (2);
10895 return \"\";
10896 "
10897 [(set_attr "length" "4")]
10898 )
10899
10900 (define_insn "consttable_4"
10901 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10902 "TARGET_EITHER"
10903 "*
10904 {
10905 making_const_table = TRUE;
10906 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10907 {
10908 case MODE_FLOAT:
10909 {
10910 REAL_VALUE_TYPE r;
10911 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10912 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10913 break;
10914 }
10915 default:
10916 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10917 mark_symbol_refs_as_used (operands[0]);
10918 break;
10919 }
10920 return \"\";
10921 }"
10922 [(set_attr "length" "4")]
10923 )
10924
10925 (define_insn "consttable_8"
10926 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10927 "TARGET_EITHER"
10928 "*
10929 {
10930 making_const_table = TRUE;
10931 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10932 {
10933 case MODE_FLOAT:
10934 {
10935 REAL_VALUE_TYPE r;
10936 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10937 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10938 break;
10939 }
10940 default:
10941 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10942 break;
10943 }
10944 return \"\";
10945 }"
10946 [(set_attr "length" "8")]
10947 )
10948
10949 (define_insn "consttable_16"
10950 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10951 "TARGET_EITHER"
10952 "*
10953 {
10954 making_const_table = TRUE;
10955 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10956 {
10957 case MODE_FLOAT:
10958 {
10959 REAL_VALUE_TYPE r;
10960 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10961 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10962 break;
10963 }
10964 default:
10965 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10966 break;
10967 }
10968 return \"\";
10969 }"
10970 [(set_attr "length" "16")]
10971 )
10972
10973 ;; Miscellaneous Thumb patterns
10974
10975 (define_expand "tablejump"
10976 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10977 (use (label_ref (match_operand 1 "" "")))])]
10978 "TARGET_THUMB1"
10979 "
10980 if (flag_pic)
10981 {
10982 /* Hopefully, CSE will eliminate this copy. */
10983 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10984 rtx reg2 = gen_reg_rtx (SImode);
10985
10986 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10987 operands[0] = reg2;
10988 }
10989 "
10990 )
10991
10992 ;; NB never uses BX.
10993 (define_insn "*thumb1_tablejump"
10994 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10995 (use (label_ref (match_operand 1 "" "")))]
10996 "TARGET_THUMB1"
10997 "mov\\t%|pc, %0"
10998 [(set_attr "length" "2")]
10999 )
11000
11001 ;; V5 Instructions,
11002
11003 (define_insn "clzsi2"
11004 [(set (match_operand:SI 0 "s_register_operand" "=r")
11005 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11006 "TARGET_32BIT && arm_arch5"
11007 "clz%?\\t%0, %1"
11008 [(set_attr "predicable" "yes")
11009 (set_attr "insn" "clz")])
11010
11011 ;; V5E instructions.
11012
11013 (define_insn "prefetch"
11014 [(prefetch (match_operand:SI 0 "address_operand" "p")
11015 (match_operand:SI 1 "" "")
11016 (match_operand:SI 2 "" ""))]
11017 "TARGET_32BIT && arm_arch5e"
11018 "pld\\t%a0")
11019
11020 ;; General predication pattern
11021
11022 (define_cond_exec
11023 [(match_operator 0 "arm_comparison_operator"
11024 [(match_operand 1 "cc_register" "")
11025 (const_int 0)])]
11026 "TARGET_32BIT"
11027 ""
11028 )
11029
11030 (define_insn "prologue_use"
11031 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11032 ""
11033 "%@ %0 needed for prologue"
11034 )
11035
11036
11037 ;; Patterns for exception handling
11038
11039 (define_expand "eh_return"
11040 [(use (match_operand 0 "general_operand" ""))]
11041 "TARGET_EITHER"
11042 "
11043 {
11044 if (TARGET_32BIT)
11045 emit_insn (gen_arm_eh_return (operands[0]));
11046 else
11047 emit_insn (gen_thumb_eh_return (operands[0]));
11048 DONE;
11049 }"
11050 )
11051
11052 ;; We can't expand this before we know where the link register is stored.
11053 (define_insn_and_split "arm_eh_return"
11054 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11055 VUNSPEC_EH_RETURN)
11056 (clobber (match_scratch:SI 1 "=&r"))]
11057 "TARGET_ARM"
11058 "#"
11059 "&& reload_completed"
11060 [(const_int 0)]
11061 "
11062 {
11063 arm_set_return_address (operands[0], operands[1]);
11064 DONE;
11065 }"
11066 )
11067
11068 (define_insn_and_split "thumb_eh_return"
11069 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11070 VUNSPEC_EH_RETURN)
11071 (clobber (match_scratch:SI 1 "=&l"))]
11072 "TARGET_THUMB1"
11073 "#"
11074 "&& reload_completed"
11075 [(const_int 0)]
11076 "
11077 {
11078 thumb_set_return_address (operands[0], operands[1]);
11079 DONE;
11080 }"
11081 )
11082
11083 \f
11084 ;; TLS support
11085
11086 (define_insn "load_tp_hard"
11087 [(set (match_operand:SI 0 "register_operand" "=r")
11088 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11089 "TARGET_HARD_TP"
11090 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11091 [(set_attr "predicable" "yes")]
11092 )
11093
11094 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11095 (define_insn "load_tp_soft"
11096 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11097 (clobber (reg:SI LR_REGNUM))
11098 (clobber (reg:SI IP_REGNUM))
11099 (clobber (reg:CC CC_REGNUM))]
11100 "TARGET_SOFT_TP"
11101 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11102 [(set_attr "conds" "clob")]
11103 )
11104
11105 ;; Load the FPA co-processor patterns
11106 (include "fpa.md")
11107 ;; Load the Maverick co-processor patterns
11108 (include "cirrus.md")
11109 ;; Vector bits common to IWMMXT and Neon
11110 (include "vec-common.md")
11111 ;; Load the Intel Wireless Multimedia Extension patterns
11112 (include "iwmmxt.md")
11113 ;; Load the VFP co-processor patterns
11114 (include "vfp.md")
11115 ;; Thumb-2 patterns
11116 (include "thumb2.md")
11117 ;; Neon patterns
11118 (include "neon.md")
11119