arm.md (ctzsi2): Added braces to avoid warning that broke booststrap.
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
79 ; register to "use".
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
95 ; instruction stream.
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 ]
105 )
106
107 ;; UNSPEC_VOLATILE Usage:
108
109 (define_constants
110 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
111 ; insn in the code.
112 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
113 ; instruction epilogue sequence that isn't expanded
114 ; into normal RTL. Used for both normal and sibcall
115 ; epilogues.
116 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
117 ; for inlined constants.
118 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
119 ; table.
120 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
121 ; an 8-bit object.
122 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
123 ; a 16-bit object.
124 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
125 ; a 32-bit object.
126 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
127 ; a 64-bit object.
128 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
129 ; a 128-bit object.
130 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
131 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
132 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
133 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
134 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
135 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
136 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
137 ; handling.
138 ]
139 )
140 \f
141 ;;---------------------------------------------------------------------------
142 ;; Attributes
143
144 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
145 ; generating ARM code. This is used to control the length of some insn
146 ; patterns that share the same RTL in both ARM and Thumb code.
147 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
148
149 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
150 ; scheduling decisions for the load unit and the multiplier.
151 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
152
153 ; IS_XSCALE is set to 'yes' when compiling for XScale.
154 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
155
156 ;; Operand number of an input operand that is shifted. Zero if the
157 ;; given instruction does not shift one of its input operands.
158 (define_attr "shift" "" (const_int 0))
159
160 ; Floating Point Unit. If we only have floating point emulation, then there
161 ; is no point in scheduling the floating point insns. (Well, for best
162 ; performance we should try and group them together).
163 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp,vfpv3d16,vfpv3,neon,neon_fp16"
164 (const (symbol_ref "arm_fpu_attr")))
165
166 ; LENGTH of an instruction (in bytes)
167 (define_attr "length" "" (const_int 4))
168
169 ; POOL_RANGE is how far away from a constant pool entry that this insn
170 ; can be placed. If the distance is zero, then this insn will never
171 ; reference the pool.
172 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
173 ; before its address.
174 (define_attr "pool_range" "" (const_int 0))
175 (define_attr "neg_pool_range" "" (const_int 0))
176
177 ; An assembler sequence may clobber the condition codes without us knowing.
178 ; If such an insn references the pool, then we have no way of knowing how,
179 ; so use the most conservative value for pool_range.
180 (define_asm_attributes
181 [(set_attr "conds" "clob")
182 (set_attr "length" "4")
183 (set_attr "pool_range" "250")])
184
185 ;; The instruction used to implement a particular pattern. This
186 ;; information is used by pipeline descriptions to provide accurate
187 ;; scheduling information.
188
189 (define_attr "insn"
190 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
191 (const_string "other"))
192
193 ; TYPE attribute is used to detect floating point instructions which, if
194 ; running on a co-processor can run in parallel with other, basic instructions
195 ; If write-buffer scheduling is enabled then it can also be used in the
196 ; scheduling of writes.
197
198 ; Classification of each insn
199 ; Note: vfp.md has different meanings for some of these, and some further
200 ; types as well. See that file for details.
201 ; alu any alu instruction that doesn't hit memory or fp
202 ; regs or have a shifted source operand
203 ; alu_shift any data instruction that doesn't hit memory or fp
204 ; regs, but has a source operand shifted by a constant
205 ; alu_shift_reg any data instruction that doesn't hit memory or fp
206 ; regs, but has a source operand shifted by a register value
207 ; mult a multiply instruction
208 ; block blockage insn, this blocks all functional units
209 ; float a floating point arithmetic operation (subject to expansion)
210 ; fdivd DFmode floating point division
211 ; fdivs SFmode floating point division
212 ; fmul Floating point multiply
213 ; ffmul Fast floating point multiply
214 ; farith Floating point arithmetic (4 cycle)
215 ; ffarith Fast floating point arithmetic (2 cycle)
216 ; float_em a floating point arithmetic operation that is normally emulated
217 ; even on a machine with an fpa.
218 ; f_load a floating point load from memory
219 ; f_store a floating point store to memory
220 ; f_load[sd] single/double load from memory
221 ; f_store[sd] single/double store to memory
222 ; f_flag a transfer of co-processor flags to the CPSR
223 ; f_mem_r a transfer of a floating point register to a real reg via mem
224 ; r_mem_f the reverse of f_mem_r
225 ; f_2_r fast transfer float to arm (no memory needed)
226 ; r_2_f fast transfer arm to float
227 ; f_cvt convert floating<->integral
228 ; branch a branch
229 ; call a subroutine call
230 ; load_byte load byte(s) from memory to arm registers
231 ; load1 load 1 word from memory to arm registers
232 ; load2 load 2 words from memory to arm registers
233 ; load3 load 3 words from memory to arm registers
234 ; load4 load 4 words from memory to arm registers
235 ; store store 1 word to memory from arm registers
236 ; store2 store 2 words
237 ; store3 store 3 words
238 ; store4 store 4 (or more) words
239 ; Additions for Cirrus Maverick co-processor:
240 ; mav_farith Floating point arithmetic (4 cycle)
241 ; mav_dmult Double multiplies (7 cycle)
242 ;
243
244 (define_attr "type"
245 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
246 (if_then_else
247 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
248 (const_string "mult")
249 (const_string "alu")))
250
251 ; Load scheduling, set from the arm_ld_sched variable
252 ; initialized by arm_override_options()
253 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
254
255 ;; Classification of NEON instructions for scheduling purposes.
256 ;; Do not set this attribute and the "type" attribute together in
257 ;; any one instruction pattern.
258 (define_attr "neon_type"
259 "neon_int_1,\
260 neon_int_2,\
261 neon_int_3,\
262 neon_int_4,\
263 neon_int_5,\
264 neon_vqneg_vqabs,\
265 neon_vmov,\
266 neon_vaba,\
267 neon_vsma,\
268 neon_vaba_qqq,\
269 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
270 neon_mul_qqq_8_16_32_ddd_32,\
271 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
272 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
273 neon_mla_qqq_8_16,\
274 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
275 neon_mla_qqq_32_qqd_32_scalar,\
276 neon_mul_ddd_16_scalar_32_16_long_scalar,\
277 neon_mul_qqd_32_scalar,\
278 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
279 neon_shift_1,\
280 neon_shift_2,\
281 neon_shift_3,\
282 neon_vshl_ddd,\
283 neon_vqshl_vrshl_vqrshl_qqq,\
284 neon_vsra_vrsra,\
285 neon_fp_vadd_ddd_vabs_dd,\
286 neon_fp_vadd_qqq_vabs_qq,\
287 neon_fp_vsum,\
288 neon_fp_vmul_ddd,\
289 neon_fp_vmul_qqd,\
290 neon_fp_vmla_ddd,\
291 neon_fp_vmla_qqq,\
292 neon_fp_vmla_ddd_scalar,\
293 neon_fp_vmla_qqq_scalar,\
294 neon_fp_vrecps_vrsqrts_ddd,\
295 neon_fp_vrecps_vrsqrts_qqq,\
296 neon_bp_simple,\
297 neon_bp_2cycle,\
298 neon_bp_3cycle,\
299 neon_ldr,\
300 neon_str,\
301 neon_vld1_1_2_regs,\
302 neon_vld1_3_4_regs,\
303 neon_vld2_2_regs_vld1_vld2_all_lanes,\
304 neon_vld2_4_regs,\
305 neon_vld3_vld4,\
306 neon_vst1_1_2_regs_vst2_2_regs,\
307 neon_vst1_3_4_regs,\
308 neon_vst2_4_regs_vst3_vst4,\
309 neon_vst3_vst4,\
310 neon_vld1_vld2_lane,\
311 neon_vld3_vld4_lane,\
312 neon_vst1_vst2_lane,\
313 neon_vst3_vst4_lane,\
314 neon_vld3_vld4_all_lanes,\
315 neon_mcr,\
316 neon_mcr_2_mcrr,\
317 neon_mrc,\
318 neon_mrrc,\
319 neon_ldm_2,\
320 neon_stm_2,\
321 none"
322 (const_string "none"))
323
324 ; condition codes: this one is used by final_prescan_insn to speed up
325 ; conditionalizing instructions. It saves having to scan the rtl to see if
326 ; it uses or alters the condition codes.
327 ;
328 ; USE means that the condition codes are used by the insn in the process of
329 ; outputting code, this means (at present) that we can't use the insn in
330 ; inlined branches
331 ;
332 ; SET means that the purpose of the insn is to set the condition codes in a
333 ; well defined manner.
334 ;
335 ; CLOB means that the condition codes are altered in an undefined manner, if
336 ; they are altered at all
337 ;
338 ; UNCONDITIONAL means the instions can not be conditionally executed.
339 ;
340 ; NOCOND means that the condition codes are neither altered nor affect the
341 ; output of this insn
342
343 (define_attr "conds" "use,set,clob,unconditional,nocond"
344 (if_then_else (eq_attr "type" "call")
345 (const_string "clob")
346 (if_then_else (eq_attr "neon_type" "none")
347 (const_string "nocond")
348 (const_string "unconditional"))))
349
350 ; Predicable means that the insn can be conditionally executed based on
351 ; an automatically added predicate (additional patterns are generated by
352 ; gen...). We default to 'no' because no Thumb patterns match this rule
353 ; and not all ARM patterns do.
354 (define_attr "predicable" "no,yes" (const_string "no"))
355
356 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
357 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
358 ; suffer blockages enough to warrant modelling this (and it can adversely
359 ; affect the schedule).
360 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
361
362 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
363 ; to stall the processor. Used with model_wbuf above.
364 (define_attr "write_conflict" "no,yes"
365 (if_then_else (eq_attr "type"
366 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
367 (const_string "yes")
368 (const_string "no")))
369
370 ; Classify the insns into those that take one cycle and those that take more
371 ; than one on the main cpu execution unit.
372 (define_attr "core_cycles" "single,multi"
373 (if_then_else (eq_attr "type"
374 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
375 (const_string "single")
376 (const_string "multi")))
377
378 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
379 ;; distant label. Only applicable to Thumb code.
380 (define_attr "far_jump" "yes,no" (const_string "no"))
381
382
383 ;; The number of machine instructions this pattern expands to.
384 ;; Used for Thumb-2 conditional execution.
385 (define_attr "ce_count" "" (const_int 1))
386
387 ;;---------------------------------------------------------------------------
388 ;; Mode iterators
389
390 ; A list of modes that are exactly 64 bits in size. We use this to expand
391 ; some splits that are the same for all modes when operating on ARM
392 ; registers.
393 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
394
395 ;;---------------------------------------------------------------------------
396 ;; Predicates
397
398 (include "predicates.md")
399 (include "constraints.md")
400
401 ;;---------------------------------------------------------------------------
402 ;; Pipeline descriptions
403
404 ;; Processor type. This is created automatically from arm-cores.def.
405 (include "arm-tune.md")
406
407 (define_attr "tune_cortexr4" "yes,no"
408 (const (if_then_else
409 (eq_attr "tune" "cortexr4,cortexr4f")
410 (const_string "yes")
411 (const_string "no"))))
412
413 ;; True if the generic scheduling description should be used.
414
415 (define_attr "generic_sched" "yes,no"
416 (const (if_then_else
417 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
418 (eq_attr "tune_cortexr4" "yes"))
419 (const_string "no")
420 (const_string "yes"))))
421
422 (define_attr "generic_vfp" "yes,no"
423 (const (if_then_else
424 (and (eq_attr "fpu" "vfp")
425 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
426 (eq_attr "tune_cortexr4" "no"))
427 (const_string "yes")
428 (const_string "no"))))
429
430 (include "arm-generic.md")
431 (include "arm926ejs.md")
432 (include "arm1020e.md")
433 (include "arm1026ejs.md")
434 (include "arm1136jfs.md")
435 (include "cortex-a8.md")
436 (include "cortex-a9.md")
437 (include "cortex-r4.md")
438 (include "cortex-r4f.md")
439 (include "vfp11.md")
440
441 \f
442 ;;---------------------------------------------------------------------------
443 ;; Insn patterns
444 ;;
445 ;; Addition insns.
446
447 ;; Note: For DImode insns, there is normally no reason why operands should
448 ;; not be in the same register, what we don't want is for something being
449 ;; written to partially overlap something that is an input.
450 ;; Cirrus 64bit additions should not be split because we have a native
451 ;; 64bit addition instructions.
452
453 (define_expand "adddi3"
454 [(parallel
455 [(set (match_operand:DI 0 "s_register_operand" "")
456 (plus:DI (match_operand:DI 1 "s_register_operand" "")
457 (match_operand:DI 2 "s_register_operand" "")))
458 (clobber (reg:CC CC_REGNUM))])]
459 "TARGET_EITHER"
460 "
461 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
462 {
463 if (!cirrus_fp_register (operands[0], DImode))
464 operands[0] = force_reg (DImode, operands[0]);
465 if (!cirrus_fp_register (operands[1], DImode))
466 operands[1] = force_reg (DImode, operands[1]);
467 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
468 DONE;
469 }
470
471 if (TARGET_THUMB1)
472 {
473 if (GET_CODE (operands[1]) != REG)
474 operands[1] = force_reg (DImode, operands[1]);
475 if (GET_CODE (operands[2]) != REG)
476 operands[2] = force_reg (DImode, operands[2]);
477 }
478 "
479 )
480
481 (define_insn "*thumb1_adddi3"
482 [(set (match_operand:DI 0 "register_operand" "=l")
483 (plus:DI (match_operand:DI 1 "register_operand" "%0")
484 (match_operand:DI 2 "register_operand" "l")))
485 (clobber (reg:CC CC_REGNUM))
486 ]
487 "TARGET_THUMB1"
488 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
489 [(set_attr "length" "4")]
490 )
491
492 (define_insn_and_split "*arm_adddi3"
493 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
494 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
495 (match_operand:DI 2 "s_register_operand" "r, 0")))
496 (clobber (reg:CC CC_REGNUM))]
497 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
498 "#"
499 "TARGET_32BIT && reload_completed"
500 [(parallel [(set (reg:CC_C CC_REGNUM)
501 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
502 (match_dup 1)))
503 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
504 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
505 (plus:SI (match_dup 4) (match_dup 5))))]
506 "
507 {
508 operands[3] = gen_highpart (SImode, operands[0]);
509 operands[0] = gen_lowpart (SImode, operands[0]);
510 operands[4] = gen_highpart (SImode, operands[1]);
511 operands[1] = gen_lowpart (SImode, operands[1]);
512 operands[5] = gen_highpart (SImode, operands[2]);
513 operands[2] = gen_lowpart (SImode, operands[2]);
514 }"
515 [(set_attr "conds" "clob")
516 (set_attr "length" "8")]
517 )
518
519 (define_insn_and_split "*adddi_sesidi_di"
520 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
521 (plus:DI (sign_extend:DI
522 (match_operand:SI 2 "s_register_operand" "r,r"))
523 (match_operand:DI 1 "s_register_operand" "r,0")))
524 (clobber (reg:CC CC_REGNUM))]
525 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
526 "#"
527 "TARGET_32BIT && reload_completed"
528 [(parallel [(set (reg:CC_C CC_REGNUM)
529 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
530 (match_dup 1)))
531 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
532 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
533 (plus:SI (ashiftrt:SI (match_dup 2)
534 (const_int 31))
535 (match_dup 4))))]
536 "
537 {
538 operands[3] = gen_highpart (SImode, operands[0]);
539 operands[0] = gen_lowpart (SImode, operands[0]);
540 operands[4] = gen_highpart (SImode, operands[1]);
541 operands[1] = gen_lowpart (SImode, operands[1]);
542 operands[2] = gen_lowpart (SImode, operands[2]);
543 }"
544 [(set_attr "conds" "clob")
545 (set_attr "length" "8")]
546 )
547
548 (define_insn_and_split "*adddi_zesidi_di"
549 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
550 (plus:DI (zero_extend:DI
551 (match_operand:SI 2 "s_register_operand" "r,r"))
552 (match_operand:DI 1 "s_register_operand" "r,0")))
553 (clobber (reg:CC CC_REGNUM))]
554 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
555 "#"
556 "TARGET_32BIT && reload_completed"
557 [(parallel [(set (reg:CC_C CC_REGNUM)
558 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
559 (match_dup 1)))
560 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
561 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
562 (plus:SI (match_dup 4) (const_int 0))))]
563 "
564 {
565 operands[3] = gen_highpart (SImode, operands[0]);
566 operands[0] = gen_lowpart (SImode, operands[0]);
567 operands[4] = gen_highpart (SImode, operands[1]);
568 operands[1] = gen_lowpart (SImode, operands[1]);
569 operands[2] = gen_lowpart (SImode, operands[2]);
570 }"
571 [(set_attr "conds" "clob")
572 (set_attr "length" "8")]
573 )
574
575 (define_expand "addsi3"
576 [(set (match_operand:SI 0 "s_register_operand" "")
577 (plus:SI (match_operand:SI 1 "s_register_operand" "")
578 (match_operand:SI 2 "reg_or_int_operand" "")))]
579 "TARGET_EITHER"
580 "
581 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
582 {
583 arm_split_constant (PLUS, SImode, NULL_RTX,
584 INTVAL (operands[2]), operands[0], operands[1],
585 optimize && can_create_pseudo_p ());
586 DONE;
587 }
588 "
589 )
590
591 ; If there is a scratch available, this will be faster than synthesizing the
592 ; addition.
593 (define_peephole2
594 [(match_scratch:SI 3 "r")
595 (set (match_operand:SI 0 "arm_general_register_operand" "")
596 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
597 (match_operand:SI 2 "const_int_operand" "")))]
598 "TARGET_32BIT &&
599 !(const_ok_for_arm (INTVAL (operands[2]))
600 || const_ok_for_arm (-INTVAL (operands[2])))
601 && const_ok_for_arm (~INTVAL (operands[2]))"
602 [(set (match_dup 3) (match_dup 2))
603 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
604 ""
605 )
606
607 ;; The r/r/k alternative is required when reloading the address
608 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
609 ;; put the duplicated register first, and not try the commutative version.
610 (define_insn_and_split "*arm_addsi3"
611 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
612 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
613 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
614 "TARGET_32BIT"
615 "@
616 add%?\\t%0, %1, %2
617 add%?\\t%0, %1, %2
618 add%?\\t%0, %2, %1
619 sub%?\\t%0, %1, #%n2
620 sub%?\\t%0, %1, #%n2
621 #"
622 "TARGET_32BIT
623 && GET_CODE (operands[2]) == CONST_INT
624 && !(const_ok_for_arm (INTVAL (operands[2]))
625 || const_ok_for_arm (-INTVAL (operands[2])))
626 && (reload_completed || !arm_eliminable_register (operands[1]))"
627 [(clobber (const_int 0))]
628 "
629 arm_split_constant (PLUS, SImode, curr_insn,
630 INTVAL (operands[2]), operands[0],
631 operands[1], 0);
632 DONE;
633 "
634 [(set_attr "length" "4,4,4,4,4,16")
635 (set_attr "predicable" "yes")]
636 )
637
638 ;; Register group 'k' is a single register group containing only the stack
639 ;; register. Trying to reload it will always fail catastrophically,
640 ;; so never allow those alternatives to match if reloading is needed.
641
642 (define_insn_and_split "*thumb1_addsi3"
643 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
644 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
645 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
646 "TARGET_THUMB1"
647 "*
648 static const char * const asms[] =
649 {
650 \"add\\t%0, %0, %2\",
651 \"sub\\t%0, %0, #%n2\",
652 \"add\\t%0, %1, %2\",
653 \"add\\t%0, %0, %2\",
654 \"add\\t%0, %0, %2\",
655 \"add\\t%0, %1, %2\",
656 \"add\\t%0, %1, %2\",
657 \"#\",
658 \"#\"
659 };
660 if ((which_alternative == 2 || which_alternative == 6)
661 && GET_CODE (operands[2]) == CONST_INT
662 && INTVAL (operands[2]) < 0)
663 return \"sub\\t%0, %1, #%n2\";
664 return asms[which_alternative];
665 "
666 "&& reload_completed && CONST_INT_P (operands[2])
667 && operands[1] != stack_pointer_rtx
668 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
669 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
670 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
671 {
672 HOST_WIDE_INT offset = INTVAL (operands[2]);
673 if (offset > 255)
674 offset = 255;
675 else if (offset < -255)
676 offset = -255;
677
678 operands[3] = GEN_INT (offset);
679 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
680 }
681 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
682 )
683
684 ;; Reloading and elimination of the frame pointer can
685 ;; sometimes cause this optimization to be missed.
686 (define_peephole2
687 [(set (match_operand:SI 0 "arm_general_register_operand" "")
688 (match_operand:SI 1 "const_int_operand" ""))
689 (set (match_dup 0)
690 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
691 "TARGET_THUMB1
692 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
693 && (INTVAL (operands[1]) & 3) == 0"
694 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
695 ""
696 )
697
698 ;; ??? Make Thumb-2 variants which prefer low regs
699 (define_insn "*addsi3_compare0"
700 [(set (reg:CC_NOOV CC_REGNUM)
701 (compare:CC_NOOV
702 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
703 (match_operand:SI 2 "arm_add_operand" "rI,L"))
704 (const_int 0)))
705 (set (match_operand:SI 0 "s_register_operand" "=r,r")
706 (plus:SI (match_dup 1) (match_dup 2)))]
707 "TARGET_32BIT"
708 "@
709 add%.\\t%0, %1, %2
710 sub%.\\t%0, %1, #%n2"
711 [(set_attr "conds" "set")]
712 )
713
714 (define_insn "*addsi3_compare0_scratch"
715 [(set (reg:CC_NOOV CC_REGNUM)
716 (compare:CC_NOOV
717 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
718 (match_operand:SI 1 "arm_add_operand" "rI,L"))
719 (const_int 0)))]
720 "TARGET_32BIT"
721 "@
722 cmn%?\\t%0, %1
723 cmp%?\\t%0, #%n1"
724 [(set_attr "conds" "set")]
725 )
726
727 (define_insn "*compare_negsi_si"
728 [(set (reg:CC_Z CC_REGNUM)
729 (compare:CC_Z
730 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
731 (match_operand:SI 1 "s_register_operand" "r")))]
732 "TARGET_32BIT"
733 "cmn%?\\t%1, %0"
734 [(set_attr "conds" "set")]
735 )
736
737 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
738 ;; addend is a constant.
739 (define_insn "*cmpsi2_addneg"
740 [(set (reg:CC CC_REGNUM)
741 (compare:CC
742 (match_operand:SI 1 "s_register_operand" "r,r")
743 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
744 (set (match_operand:SI 0 "s_register_operand" "=r,r")
745 (plus:SI (match_dup 1)
746 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
747 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
748 "@
749 sub%.\\t%0, %1, %2
750 add%.\\t%0, %1, #%n2"
751 [(set_attr "conds" "set")]
752 )
753
754 ;; Convert the sequence
755 ;; sub rd, rn, #1
756 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
757 ;; bne dest
758 ;; into
759 ;; subs rd, rn, #1
760 ;; bcs dest ((unsigned)rn >= 1)
761 ;; similarly for the beq variant using bcc.
762 ;; This is a common looping idiom (while (n--))
763 (define_peephole2
764 [(set (match_operand:SI 0 "arm_general_register_operand" "")
765 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
766 (const_int -1)))
767 (set (match_operand 2 "cc_register" "")
768 (compare (match_dup 0) (const_int -1)))
769 (set (pc)
770 (if_then_else (match_operator 3 "equality_operator"
771 [(match_dup 2) (const_int 0)])
772 (match_operand 4 "" "")
773 (match_operand 5 "" "")))]
774 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
775 [(parallel[
776 (set (match_dup 2)
777 (compare:CC
778 (match_dup 1) (const_int 1)))
779 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
780 (set (pc)
781 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
782 (match_dup 4)
783 (match_dup 5)))]
784 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
785 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
786 ? GEU : LTU),
787 VOIDmode,
788 operands[2], const0_rtx);"
789 )
790
791 ;; The next four insns work because they compare the result with one of
792 ;; the operands, and we know that the use of the condition code is
793 ;; either GEU or LTU, so we can use the carry flag from the addition
794 ;; instead of doing the compare a second time.
795 (define_insn "*addsi3_compare_op1"
796 [(set (reg:CC_C CC_REGNUM)
797 (compare:CC_C
798 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
799 (match_operand:SI 2 "arm_add_operand" "rI,L"))
800 (match_dup 1)))
801 (set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
803 "TARGET_32BIT"
804 "@
805 add%.\\t%0, %1, %2
806 sub%.\\t%0, %1, #%n2"
807 [(set_attr "conds" "set")]
808 )
809
810 (define_insn "*addsi3_compare_op2"
811 [(set (reg:CC_C CC_REGNUM)
812 (compare:CC_C
813 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
814 (match_operand:SI 2 "arm_add_operand" "rI,L"))
815 (match_dup 2)))
816 (set (match_operand:SI 0 "s_register_operand" "=r,r")
817 (plus:SI (match_dup 1) (match_dup 2)))]
818 "TARGET_32BIT"
819 "@
820 add%.\\t%0, %1, %2
821 sub%.\\t%0, %1, #%n2"
822 [(set_attr "conds" "set")]
823 )
824
825 (define_insn "*compare_addsi2_op0"
826 [(set (reg:CC_C CC_REGNUM)
827 (compare:CC_C
828 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
829 (match_operand:SI 1 "arm_add_operand" "rI,L"))
830 (match_dup 0)))]
831 "TARGET_32BIT"
832 "@
833 cmn%?\\t%0, %1
834 cmp%?\\t%0, #%n1"
835 [(set_attr "conds" "set")]
836 )
837
838 (define_insn "*compare_addsi2_op1"
839 [(set (reg:CC_C CC_REGNUM)
840 (compare:CC_C
841 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
842 (match_operand:SI 1 "arm_add_operand" "rI,L"))
843 (match_dup 1)))]
844 "TARGET_32BIT"
845 "@
846 cmn%?\\t%0, %1
847 cmp%?\\t%0, #%n1"
848 [(set_attr "conds" "set")]
849 )
850
851 (define_insn "*addsi3_carryin"
852 [(set (match_operand:SI 0 "s_register_operand" "=r")
853 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
854 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
855 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
856 "TARGET_32BIT"
857 "adc%?\\t%0, %1, %2"
858 [(set_attr "conds" "use")]
859 )
860
861 (define_insn "*addsi3_carryin_shift"
862 [(set (match_operand:SI 0 "s_register_operand" "=r")
863 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
864 (plus:SI
865 (match_operator:SI 2 "shift_operator"
866 [(match_operand:SI 3 "s_register_operand" "r")
867 (match_operand:SI 4 "reg_or_int_operand" "rM")])
868 (match_operand:SI 1 "s_register_operand" "r"))))]
869 "TARGET_32BIT"
870 "adc%?\\t%0, %1, %3%S2"
871 [(set_attr "conds" "use")
872 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
873 (const_string "alu_shift")
874 (const_string "alu_shift_reg")))]
875 )
876
877 (define_insn "*addsi3_carryin_alt1"
878 [(set (match_operand:SI 0 "s_register_operand" "=r")
879 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
880 (match_operand:SI 2 "arm_rhs_operand" "rI"))
881 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
882 "TARGET_32BIT"
883 "adc%?\\t%0, %1, %2"
884 [(set_attr "conds" "use")]
885 )
886
887 (define_insn "*addsi3_carryin_alt2"
888 [(set (match_operand:SI 0 "s_register_operand" "=r")
889 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
890 (match_operand:SI 1 "s_register_operand" "r"))
891 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
892 "TARGET_32BIT"
893 "adc%?\\t%0, %1, %2"
894 [(set_attr "conds" "use")]
895 )
896
897 (define_insn "*addsi3_carryin_alt3"
898 [(set (match_operand:SI 0 "s_register_operand" "=r")
899 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
900 (match_operand:SI 2 "arm_rhs_operand" "rI"))
901 (match_operand:SI 1 "s_register_operand" "r")))]
902 "TARGET_32BIT"
903 "adc%?\\t%0, %1, %2"
904 [(set_attr "conds" "use")]
905 )
906
907 (define_expand "incscc"
908 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
909 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
910 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
911 (match_operand:SI 1 "s_register_operand" "0,?r")))]
912 "TARGET_32BIT"
913 ""
914 )
915
916 (define_insn "*arm_incscc"
917 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
918 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
919 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
920 (match_operand:SI 1 "s_register_operand" "0,?r")))]
921 "TARGET_ARM"
922 "@
923 add%d2\\t%0, %1, #1
924 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
925 [(set_attr "conds" "use")
926 (set_attr "length" "4,8")]
927 )
928
929 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
930 (define_split
931 [(set (match_operand:SI 0 "s_register_operand" "")
932 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
933 (match_operand:SI 2 "s_register_operand" ""))
934 (const_int -1)))
935 (clobber (match_operand:SI 3 "s_register_operand" ""))]
936 "TARGET_32BIT"
937 [(set (match_dup 3) (match_dup 1))
938 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
939 "
940 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
941 ")
942
943 (define_expand "addsf3"
944 [(set (match_operand:SF 0 "s_register_operand" "")
945 (plus:SF (match_operand:SF 1 "s_register_operand" "")
946 (match_operand:SF 2 "arm_float_add_operand" "")))]
947 "TARGET_32BIT && TARGET_HARD_FLOAT"
948 "
949 if (TARGET_MAVERICK
950 && !cirrus_fp_register (operands[2], SFmode))
951 operands[2] = force_reg (SFmode, operands[2]);
952 ")
953
954 (define_expand "adddf3"
955 [(set (match_operand:DF 0 "s_register_operand" "")
956 (plus:DF (match_operand:DF 1 "s_register_operand" "")
957 (match_operand:DF 2 "arm_float_add_operand" "")))]
958 "TARGET_32BIT && TARGET_HARD_FLOAT"
959 "
960 if (TARGET_MAVERICK
961 && !cirrus_fp_register (operands[2], DFmode))
962 operands[2] = force_reg (DFmode, operands[2]);
963 ")
964
965 (define_expand "subdi3"
966 [(parallel
967 [(set (match_operand:DI 0 "s_register_operand" "")
968 (minus:DI (match_operand:DI 1 "s_register_operand" "")
969 (match_operand:DI 2 "s_register_operand" "")))
970 (clobber (reg:CC CC_REGNUM))])]
971 "TARGET_EITHER"
972 "
973 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
974 && TARGET_32BIT
975 && cirrus_fp_register (operands[0], DImode)
976 && cirrus_fp_register (operands[1], DImode))
977 {
978 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
979 DONE;
980 }
981
982 if (TARGET_THUMB1)
983 {
984 if (GET_CODE (operands[1]) != REG)
985 operands[1] = force_reg (DImode, operands[1]);
986 if (GET_CODE (operands[2]) != REG)
987 operands[2] = force_reg (DImode, operands[2]);
988 }
989 "
990 )
991
992 (define_insn "*arm_subdi3"
993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
994 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
995 (match_operand:DI 2 "s_register_operand" "r,0,0")))
996 (clobber (reg:CC CC_REGNUM))]
997 "TARGET_32BIT"
998 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
999 [(set_attr "conds" "clob")
1000 (set_attr "length" "8")]
1001 )
1002
1003 (define_insn "*thumb_subdi3"
1004 [(set (match_operand:DI 0 "register_operand" "=l")
1005 (minus:DI (match_operand:DI 1 "register_operand" "0")
1006 (match_operand:DI 2 "register_operand" "l")))
1007 (clobber (reg:CC CC_REGNUM))]
1008 "TARGET_THUMB1"
1009 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1010 [(set_attr "length" "4")]
1011 )
1012
1013 (define_insn "*subdi_di_zesidi"
1014 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1015 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
1016 (zero_extend:DI
1017 (match_operand:SI 2 "s_register_operand" "r,r"))))
1018 (clobber (reg:CC CC_REGNUM))]
1019 "TARGET_32BIT"
1020 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1021 [(set_attr "conds" "clob")
1022 (set_attr "length" "8")]
1023 )
1024
1025 (define_insn "*subdi_di_sesidi"
1026 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1027 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
1028 (sign_extend:DI
1029 (match_operand:SI 2 "s_register_operand" "r,r"))))
1030 (clobber (reg:CC CC_REGNUM))]
1031 "TARGET_32BIT"
1032 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1033 [(set_attr "conds" "clob")
1034 (set_attr "length" "8")]
1035 )
1036
1037 (define_insn "*subdi_zesidi_di"
1038 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1039 (minus:DI (zero_extend:DI
1040 (match_operand:SI 2 "s_register_operand" "r,r"))
1041 (match_operand:DI 1 "s_register_operand" "?r,0")))
1042 (clobber (reg:CC CC_REGNUM))]
1043 "TARGET_ARM"
1044 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1045 [(set_attr "conds" "clob")
1046 (set_attr "length" "8")]
1047 )
1048
1049 (define_insn "*subdi_sesidi_di"
1050 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1051 (minus:DI (sign_extend:DI
1052 (match_operand:SI 2 "s_register_operand" "r,r"))
1053 (match_operand:DI 1 "s_register_operand" "?r,0")))
1054 (clobber (reg:CC CC_REGNUM))]
1055 "TARGET_ARM"
1056 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1057 [(set_attr "conds" "clob")
1058 (set_attr "length" "8")]
1059 )
1060
1061 (define_insn "*subdi_zesidi_zesidi"
1062 [(set (match_operand:DI 0 "s_register_operand" "=r")
1063 (minus:DI (zero_extend:DI
1064 (match_operand:SI 1 "s_register_operand" "r"))
1065 (zero_extend:DI
1066 (match_operand:SI 2 "s_register_operand" "r"))))
1067 (clobber (reg:CC CC_REGNUM))]
1068 "TARGET_32BIT"
1069 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1070 [(set_attr "conds" "clob")
1071 (set_attr "length" "8")]
1072 )
1073
1074 (define_expand "subsi3"
1075 [(set (match_operand:SI 0 "s_register_operand" "")
1076 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1077 (match_operand:SI 2 "s_register_operand" "")))]
1078 "TARGET_EITHER"
1079 "
1080 if (GET_CODE (operands[1]) == CONST_INT)
1081 {
1082 if (TARGET_32BIT)
1083 {
1084 arm_split_constant (MINUS, SImode, NULL_RTX,
1085 INTVAL (operands[1]), operands[0],
1086 operands[2], optimize && can_create_pseudo_p ());
1087 DONE;
1088 }
1089 else /* TARGET_THUMB1 */
1090 operands[1] = force_reg (SImode, operands[1]);
1091 }
1092 "
1093 )
1094
1095 (define_insn "*thumb1_subsi3_insn"
1096 [(set (match_operand:SI 0 "register_operand" "=l")
1097 (minus:SI (match_operand:SI 1 "register_operand" "l")
1098 (match_operand:SI 2 "register_operand" "l")))]
1099 "TARGET_THUMB1"
1100 "sub\\t%0, %1, %2"
1101 [(set_attr "length" "2")]
1102 )
1103
1104 ; ??? Check Thumb-2 split length
1105 (define_insn_and_split "*arm_subsi3_insn"
1106 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1107 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1108 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1109 "TARGET_32BIT"
1110 "@
1111 rsb%?\\t%0, %2, %1
1112 sub%?\\t%0, %1, %2
1113 #"
1114 "TARGET_32BIT
1115 && GET_CODE (operands[1]) == CONST_INT
1116 && !const_ok_for_arm (INTVAL (operands[1]))"
1117 [(clobber (const_int 0))]
1118 "
1119 arm_split_constant (MINUS, SImode, curr_insn,
1120 INTVAL (operands[1]), operands[0], operands[2], 0);
1121 DONE;
1122 "
1123 [(set_attr "length" "4,4,16")
1124 (set_attr "predicable" "yes")]
1125 )
1126
1127 (define_peephole2
1128 [(match_scratch:SI 3 "r")
1129 (set (match_operand:SI 0 "arm_general_register_operand" "")
1130 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1131 (match_operand:SI 2 "arm_general_register_operand" "")))]
1132 "TARGET_32BIT
1133 && !const_ok_for_arm (INTVAL (operands[1]))
1134 && const_ok_for_arm (~INTVAL (operands[1]))"
1135 [(set (match_dup 3) (match_dup 1))
1136 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1137 ""
1138 )
1139
1140 (define_insn "*subsi3_compare0"
1141 [(set (reg:CC_NOOV CC_REGNUM)
1142 (compare:CC_NOOV
1143 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1144 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1145 (const_int 0)))
1146 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1147 (minus:SI (match_dup 1) (match_dup 2)))]
1148 "TARGET_32BIT"
1149 "@
1150 sub%.\\t%0, %1, %2
1151 rsb%.\\t%0, %2, %1"
1152 [(set_attr "conds" "set")]
1153 )
1154
1155 (define_expand "decscc"
1156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1157 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1158 (match_operator:SI 2 "arm_comparison_operator"
1159 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1160 "TARGET_32BIT"
1161 ""
1162 )
1163
1164 (define_insn "*arm_decscc"
1165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1166 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1167 (match_operator:SI 2 "arm_comparison_operator"
1168 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1169 "TARGET_ARM"
1170 "@
1171 sub%d2\\t%0, %1, #1
1172 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1173 [(set_attr "conds" "use")
1174 (set_attr "length" "*,8")]
1175 )
1176
1177 (define_expand "subsf3"
1178 [(set (match_operand:SF 0 "s_register_operand" "")
1179 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1180 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1181 "TARGET_32BIT && TARGET_HARD_FLOAT"
1182 "
1183 if (TARGET_MAVERICK)
1184 {
1185 if (!cirrus_fp_register (operands[1], SFmode))
1186 operands[1] = force_reg (SFmode, operands[1]);
1187 if (!cirrus_fp_register (operands[2], SFmode))
1188 operands[2] = force_reg (SFmode, operands[2]);
1189 }
1190 ")
1191
1192 (define_expand "subdf3"
1193 [(set (match_operand:DF 0 "s_register_operand" "")
1194 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1195 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1196 "TARGET_32BIT && TARGET_HARD_FLOAT"
1197 "
1198 if (TARGET_MAVERICK)
1199 {
1200 if (!cirrus_fp_register (operands[1], DFmode))
1201 operands[1] = force_reg (DFmode, operands[1]);
1202 if (!cirrus_fp_register (operands[2], DFmode))
1203 operands[2] = force_reg (DFmode, operands[2]);
1204 }
1205 ")
1206
1207 \f
1208 ;; Multiplication insns
1209
1210 (define_expand "mulsi3"
1211 [(set (match_operand:SI 0 "s_register_operand" "")
1212 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1213 (match_operand:SI 1 "s_register_operand" "")))]
1214 "TARGET_EITHER"
1215 ""
1216 )
1217
1218 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1219 (define_insn "*arm_mulsi3"
1220 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1221 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1222 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1223 "TARGET_32BIT && !arm_arch6"
1224 "mul%?\\t%0, %2, %1"
1225 [(set_attr "insn" "mul")
1226 (set_attr "predicable" "yes")]
1227 )
1228
1229 (define_insn "*arm_mulsi3_v6"
1230 [(set (match_operand:SI 0 "s_register_operand" "=r")
1231 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1232 (match_operand:SI 2 "s_register_operand" "r")))]
1233 "TARGET_32BIT && arm_arch6"
1234 "mul%?\\t%0, %1, %2"
1235 [(set_attr "insn" "mul")
1236 (set_attr "predicable" "yes")]
1237 )
1238
1239 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1240 ; 1 and 2; are the same, because reload will make operand 0 match
1241 ; operand 1 without realizing that this conflicts with operand 2. We fix
1242 ; this by adding another alternative to match this case, and then `reload'
1243 ; it ourselves. This alternative must come first.
1244 (define_insn "*thumb_mulsi3"
1245 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1246 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1247 (match_operand:SI 2 "register_operand" "l,l,l")))]
1248 "TARGET_THUMB1 && !arm_arch6"
1249 "*
1250 if (which_alternative < 2)
1251 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1252 else
1253 return \"mul\\t%0, %2\";
1254 "
1255 [(set_attr "length" "4,4,2")
1256 (set_attr "insn" "mul")]
1257 )
1258
1259 (define_insn "*thumb_mulsi3_v6"
1260 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1261 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1262 (match_operand:SI 2 "register_operand" "l,0,0")))]
1263 "TARGET_THUMB1 && arm_arch6"
1264 "@
1265 mul\\t%0, %2
1266 mul\\t%0, %1
1267 mul\\t%0, %1"
1268 [(set_attr "length" "2")
1269 (set_attr "insn" "mul")]
1270 )
1271
1272 (define_insn "*mulsi3_compare0"
1273 [(set (reg:CC_NOOV CC_REGNUM)
1274 (compare:CC_NOOV (mult:SI
1275 (match_operand:SI 2 "s_register_operand" "r,r")
1276 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1277 (const_int 0)))
1278 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1279 (mult:SI (match_dup 2) (match_dup 1)))]
1280 "TARGET_ARM && !arm_arch6"
1281 "mul%.\\t%0, %2, %1"
1282 [(set_attr "conds" "set")
1283 (set_attr "insn" "muls")]
1284 )
1285
1286 (define_insn "*mulsi3_compare0_v6"
1287 [(set (reg:CC_NOOV CC_REGNUM)
1288 (compare:CC_NOOV (mult:SI
1289 (match_operand:SI 2 "s_register_operand" "r")
1290 (match_operand:SI 1 "s_register_operand" "r"))
1291 (const_int 0)))
1292 (set (match_operand:SI 0 "s_register_operand" "=r")
1293 (mult:SI (match_dup 2) (match_dup 1)))]
1294 "TARGET_ARM && arm_arch6 && optimize_size"
1295 "mul%.\\t%0, %2, %1"
1296 [(set_attr "conds" "set")
1297 (set_attr "insn" "muls")]
1298 )
1299
1300 (define_insn "*mulsi_compare0_scratch"
1301 [(set (reg:CC_NOOV CC_REGNUM)
1302 (compare:CC_NOOV (mult:SI
1303 (match_operand:SI 2 "s_register_operand" "r,r")
1304 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1305 (const_int 0)))
1306 (clobber (match_scratch:SI 0 "=&r,&r"))]
1307 "TARGET_ARM && !arm_arch6"
1308 "mul%.\\t%0, %2, %1"
1309 [(set_attr "conds" "set")
1310 (set_attr "insn" "muls")]
1311 )
1312
1313 (define_insn "*mulsi_compare0_scratch_v6"
1314 [(set (reg:CC_NOOV CC_REGNUM)
1315 (compare:CC_NOOV (mult:SI
1316 (match_operand:SI 2 "s_register_operand" "r")
1317 (match_operand:SI 1 "s_register_operand" "r"))
1318 (const_int 0)))
1319 (clobber (match_scratch:SI 0 "=r"))]
1320 "TARGET_ARM && arm_arch6 && optimize_size"
1321 "mul%.\\t%0, %2, %1"
1322 [(set_attr "conds" "set")
1323 (set_attr "insn" "muls")]
1324 )
1325
1326 ;; Unnamed templates to match MLA instruction.
1327
1328 (define_insn "*mulsi3addsi"
1329 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1330 (plus:SI
1331 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1332 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1333 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1334 "TARGET_32BIT && !arm_arch6"
1335 "mla%?\\t%0, %2, %1, %3"
1336 [(set_attr "insn" "mla")
1337 (set_attr "predicable" "yes")]
1338 )
1339
1340 (define_insn "*mulsi3addsi_v6"
1341 [(set (match_operand:SI 0 "s_register_operand" "=r")
1342 (plus:SI
1343 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1344 (match_operand:SI 1 "s_register_operand" "r"))
1345 (match_operand:SI 3 "s_register_operand" "r")))]
1346 "TARGET_32BIT && arm_arch6"
1347 "mla%?\\t%0, %2, %1, %3"
1348 [(set_attr "insn" "mla")
1349 (set_attr "predicable" "yes")]
1350 )
1351
1352 (define_insn "*mulsi3addsi_compare0"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1354 (compare:CC_NOOV
1355 (plus:SI (mult:SI
1356 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1357 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1358 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1359 (const_int 0)))
1360 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1361 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1362 (match_dup 3)))]
1363 "TARGET_ARM && arm_arch6"
1364 "mla%.\\t%0, %2, %1, %3"
1365 [(set_attr "conds" "set")
1366 (set_attr "insn" "mlas")]
1367 )
1368
1369 (define_insn "*mulsi3addsi_compare0_v6"
1370 [(set (reg:CC_NOOV CC_REGNUM)
1371 (compare:CC_NOOV
1372 (plus:SI (mult:SI
1373 (match_operand:SI 2 "s_register_operand" "r")
1374 (match_operand:SI 1 "s_register_operand" "r"))
1375 (match_operand:SI 3 "s_register_operand" "r"))
1376 (const_int 0)))
1377 (set (match_operand:SI 0 "s_register_operand" "=r")
1378 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1379 (match_dup 3)))]
1380 "TARGET_ARM && arm_arch6 && optimize_size"
1381 "mla%.\\t%0, %2, %1, %3"
1382 [(set_attr "conds" "set")
1383 (set_attr "insn" "mlas")]
1384 )
1385
1386 (define_insn "*mulsi3addsi_compare0_scratch"
1387 [(set (reg:CC_NOOV CC_REGNUM)
1388 (compare:CC_NOOV
1389 (plus:SI (mult:SI
1390 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1391 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1392 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1393 (const_int 0)))
1394 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1395 "TARGET_ARM && !arm_arch6"
1396 "mla%.\\t%0, %2, %1, %3"
1397 [(set_attr "conds" "set")
1398 (set_attr "insn" "mlas")]
1399 )
1400
1401 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1402 [(set (reg:CC_NOOV CC_REGNUM)
1403 (compare:CC_NOOV
1404 (plus:SI (mult:SI
1405 (match_operand:SI 2 "s_register_operand" "r")
1406 (match_operand:SI 1 "s_register_operand" "r"))
1407 (match_operand:SI 3 "s_register_operand" "r"))
1408 (const_int 0)))
1409 (clobber (match_scratch:SI 0 "=r"))]
1410 "TARGET_ARM && arm_arch6 && optimize_size"
1411 "mla%.\\t%0, %2, %1, %3"
1412 [(set_attr "conds" "set")
1413 (set_attr "insn" "mlas")]
1414 )
1415
1416 (define_insn "*mulsi3subsi"
1417 [(set (match_operand:SI 0 "s_register_operand" "=r")
1418 (minus:SI
1419 (match_operand:SI 3 "s_register_operand" "r")
1420 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1421 (match_operand:SI 1 "s_register_operand" "r"))))]
1422 "TARGET_32BIT && arm_arch_thumb2"
1423 "mls%?\\t%0, %2, %1, %3"
1424 [(set_attr "insn" "mla")
1425 (set_attr "predicable" "yes")]
1426 )
1427
1428 ;; Unnamed template to match long long multiply-accumulate (smlal)
1429
1430 (define_insn "*mulsidi3adddi"
1431 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1432 (plus:DI
1433 (mult:DI
1434 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1435 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1436 (match_operand:DI 1 "s_register_operand" "0")))]
1437 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1438 "smlal%?\\t%Q0, %R0, %3, %2"
1439 [(set_attr "insn" "smlal")
1440 (set_attr "predicable" "yes")]
1441 )
1442
1443 (define_insn "*mulsidi3adddi_v6"
1444 [(set (match_operand:DI 0 "s_register_operand" "=r")
1445 (plus:DI
1446 (mult:DI
1447 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1448 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1449 (match_operand:DI 1 "s_register_operand" "0")))]
1450 "TARGET_32BIT && arm_arch6"
1451 "smlal%?\\t%Q0, %R0, %3, %2"
1452 [(set_attr "insn" "smlal")
1453 (set_attr "predicable" "yes")]
1454 )
1455
1456 ;; 32x32->64 widening multiply.
1457 ;; As with mulsi3, the only difference between the v3-5 and v6+
1458 ;; versions of these patterns is the requirement that the output not
1459 ;; overlap the inputs, but that still means we have to have a named
1460 ;; expander and two different starred insns.
1461
1462 (define_expand "mulsidi3"
1463 [(set (match_operand:DI 0 "s_register_operand" "")
1464 (mult:DI
1465 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1466 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1467 "TARGET_32BIT && arm_arch3m"
1468 ""
1469 )
1470
1471 (define_insn "*mulsidi3_nov6"
1472 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1473 (mult:DI
1474 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1475 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1476 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1477 "smull%?\\t%Q0, %R0, %1, %2"
1478 [(set_attr "insn" "smull")
1479 (set_attr "predicable" "yes")]
1480 )
1481
1482 (define_insn "*mulsidi3_v6"
1483 [(set (match_operand:DI 0 "s_register_operand" "=r")
1484 (mult:DI
1485 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1486 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1487 "TARGET_32BIT && arm_arch6"
1488 "smull%?\\t%Q0, %R0, %1, %2"
1489 [(set_attr "insn" "smull")
1490 (set_attr "predicable" "yes")]
1491 )
1492
1493 (define_expand "umulsidi3"
1494 [(set (match_operand:DI 0 "s_register_operand" "")
1495 (mult:DI
1496 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1497 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1498 "TARGET_32BIT && arm_arch3m"
1499 ""
1500 )
1501
1502 (define_insn "*umulsidi3_nov6"
1503 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1504 (mult:DI
1505 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1506 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1507 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1508 "umull%?\\t%Q0, %R0, %1, %2"
1509 [(set_attr "insn" "umull")
1510 (set_attr "predicable" "yes")]
1511 )
1512
1513 (define_insn "*umulsidi3_v6"
1514 [(set (match_operand:DI 0 "s_register_operand" "=r")
1515 (mult:DI
1516 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1517 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1518 "TARGET_32BIT && arm_arch6"
1519 "umull%?\\t%Q0, %R0, %1, %2"
1520 [(set_attr "insn" "umull")
1521 (set_attr "predicable" "yes")]
1522 )
1523
1524 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1525
1526 (define_insn "*umulsidi3adddi"
1527 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1528 (plus:DI
1529 (mult:DI
1530 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1531 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1532 (match_operand:DI 1 "s_register_operand" "0")))]
1533 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1534 "umlal%?\\t%Q0, %R0, %3, %2"
1535 [(set_attr "insn" "umlal")
1536 (set_attr "predicable" "yes")]
1537 )
1538
1539 (define_insn "*umulsidi3adddi_v6"
1540 [(set (match_operand:DI 0 "s_register_operand" "=r")
1541 (plus:DI
1542 (mult:DI
1543 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1544 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1545 (match_operand:DI 1 "s_register_operand" "0")))]
1546 "TARGET_32BIT && arm_arch6"
1547 "umlal%?\\t%Q0, %R0, %3, %2"
1548 [(set_attr "insn" "umlal")
1549 (set_attr "predicable" "yes")]
1550 )
1551
1552 (define_expand "smulsi3_highpart"
1553 [(parallel
1554 [(set (match_operand:SI 0 "s_register_operand" "")
1555 (truncate:SI
1556 (lshiftrt:DI
1557 (mult:DI
1558 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1559 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1560 (const_int 32))))
1561 (clobber (match_scratch:SI 3 ""))])]
1562 "TARGET_32BIT && arm_arch3m"
1563 ""
1564 )
1565
1566 (define_insn "*smulsi3_highpart_nov6"
1567 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1568 (truncate:SI
1569 (lshiftrt:DI
1570 (mult:DI
1571 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1572 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1573 (const_int 32))))
1574 (clobber (match_scratch:SI 3 "=&r,&r"))]
1575 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1576 "smull%?\\t%3, %0, %2, %1"
1577 [(set_attr "insn" "smull")
1578 (set_attr "predicable" "yes")]
1579 )
1580
1581 (define_insn "*smulsi3_highpart_v6"
1582 [(set (match_operand:SI 0 "s_register_operand" "=r")
1583 (truncate:SI
1584 (lshiftrt:DI
1585 (mult:DI
1586 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1587 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1588 (const_int 32))))
1589 (clobber (match_scratch:SI 3 "=r"))]
1590 "TARGET_32BIT && arm_arch6"
1591 "smull%?\\t%3, %0, %2, %1"
1592 [(set_attr "insn" "smull")
1593 (set_attr "predicable" "yes")]
1594 )
1595
1596 (define_expand "umulsi3_highpart"
1597 [(parallel
1598 [(set (match_operand:SI 0 "s_register_operand" "")
1599 (truncate:SI
1600 (lshiftrt:DI
1601 (mult:DI
1602 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1603 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1604 (const_int 32))))
1605 (clobber (match_scratch:SI 3 ""))])]
1606 "TARGET_32BIT && arm_arch3m"
1607 ""
1608 )
1609
1610 (define_insn "*umulsi3_highpart_nov6"
1611 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1612 (truncate:SI
1613 (lshiftrt:DI
1614 (mult:DI
1615 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1616 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1617 (const_int 32))))
1618 (clobber (match_scratch:SI 3 "=&r,&r"))]
1619 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1620 "umull%?\\t%3, %0, %2, %1"
1621 [(set_attr "insn" "umull")
1622 (set_attr "predicable" "yes")]
1623 )
1624
1625 (define_insn "*umulsi3_highpart_v6"
1626 [(set (match_operand:SI 0 "s_register_operand" "=r")
1627 (truncate:SI
1628 (lshiftrt:DI
1629 (mult:DI
1630 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1631 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1632 (const_int 32))))
1633 (clobber (match_scratch:SI 3 "=r"))]
1634 "TARGET_32BIT && arm_arch6"
1635 "umull%?\\t%3, %0, %2, %1"
1636 [(set_attr "insn" "umull")
1637 (set_attr "predicable" "yes")]
1638 )
1639
1640 (define_insn "mulhisi3"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (sign_extend:SI
1643 (match_operand:HI 1 "s_register_operand" "%r"))
1644 (sign_extend:SI
1645 (match_operand:HI 2 "s_register_operand" "r"))))]
1646 "TARGET_DSP_MULTIPLY"
1647 "smulbb%?\\t%0, %1, %2"
1648 [(set_attr "insn" "smulxy")
1649 (set_attr "predicable" "yes")]
1650 )
1651
1652 (define_insn "*mulhisi3tb"
1653 [(set (match_operand:SI 0 "s_register_operand" "=r")
1654 (mult:SI (ashiftrt:SI
1655 (match_operand:SI 1 "s_register_operand" "r")
1656 (const_int 16))
1657 (sign_extend:SI
1658 (match_operand:HI 2 "s_register_operand" "r"))))]
1659 "TARGET_DSP_MULTIPLY"
1660 "smultb%?\\t%0, %1, %2"
1661 [(set_attr "insn" "smulxy")
1662 (set_attr "predicable" "yes")]
1663 )
1664
1665 (define_insn "*mulhisi3bt"
1666 [(set (match_operand:SI 0 "s_register_operand" "=r")
1667 (mult:SI (sign_extend:SI
1668 (match_operand:HI 1 "s_register_operand" "r"))
1669 (ashiftrt:SI
1670 (match_operand:SI 2 "s_register_operand" "r")
1671 (const_int 16))))]
1672 "TARGET_DSP_MULTIPLY"
1673 "smulbt%?\\t%0, %1, %2"
1674 [(set_attr "insn" "smulxy")
1675 (set_attr "predicable" "yes")]
1676 )
1677
1678 (define_insn "*mulhisi3tt"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1680 (mult:SI (ashiftrt:SI
1681 (match_operand:SI 1 "s_register_operand" "r")
1682 (const_int 16))
1683 (ashiftrt:SI
1684 (match_operand:SI 2 "s_register_operand" "r")
1685 (const_int 16))))]
1686 "TARGET_DSP_MULTIPLY"
1687 "smultt%?\\t%0, %1, %2"
1688 [(set_attr "insn" "smulxy")
1689 (set_attr "predicable" "yes")]
1690 )
1691
1692 (define_insn "*mulhisi3addsi"
1693 [(set (match_operand:SI 0 "s_register_operand" "=r")
1694 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1695 (mult:SI (sign_extend:SI
1696 (match_operand:HI 2 "s_register_operand" "%r"))
1697 (sign_extend:SI
1698 (match_operand:HI 3 "s_register_operand" "r")))))]
1699 "TARGET_DSP_MULTIPLY"
1700 "smlabb%?\\t%0, %2, %3, %1"
1701 [(set_attr "insn" "smlaxy")
1702 (set_attr "predicable" "yes")]
1703 )
1704
1705 (define_insn "*mulhidi3adddi"
1706 [(set (match_operand:DI 0 "s_register_operand" "=r")
1707 (plus:DI
1708 (match_operand:DI 1 "s_register_operand" "0")
1709 (mult:DI (sign_extend:DI
1710 (match_operand:HI 2 "s_register_operand" "%r"))
1711 (sign_extend:DI
1712 (match_operand:HI 3 "s_register_operand" "r")))))]
1713 "TARGET_DSP_MULTIPLY"
1714 "smlalbb%?\\t%Q0, %R0, %2, %3"
1715 [(set_attr "insn" "smlalxy")
1716 (set_attr "predicable" "yes")])
1717
1718 (define_expand "mulsf3"
1719 [(set (match_operand:SF 0 "s_register_operand" "")
1720 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1721 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1722 "TARGET_32BIT && TARGET_HARD_FLOAT"
1723 "
1724 if (TARGET_MAVERICK
1725 && !cirrus_fp_register (operands[2], SFmode))
1726 operands[2] = force_reg (SFmode, operands[2]);
1727 ")
1728
1729 (define_expand "muldf3"
1730 [(set (match_operand:DF 0 "s_register_operand" "")
1731 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1732 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1733 "TARGET_32BIT && TARGET_HARD_FLOAT"
1734 "
1735 if (TARGET_MAVERICK
1736 && !cirrus_fp_register (operands[2], DFmode))
1737 operands[2] = force_reg (DFmode, operands[2]);
1738 ")
1739 \f
1740 ;; Division insns
1741
1742 (define_expand "divsf3"
1743 [(set (match_operand:SF 0 "s_register_operand" "")
1744 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1745 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1746 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1747 "")
1748
1749 (define_expand "divdf3"
1750 [(set (match_operand:DF 0 "s_register_operand" "")
1751 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1752 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1753 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1754 "")
1755 \f
1756 ;; Modulo insns
1757
1758 (define_expand "modsf3"
1759 [(set (match_operand:SF 0 "s_register_operand" "")
1760 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1761 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1762 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1763 "")
1764
1765 (define_expand "moddf3"
1766 [(set (match_operand:DF 0 "s_register_operand" "")
1767 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1768 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1769 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1770 "")
1771 \f
1772 ;; Boolean and,ior,xor insns
1773
1774 ;; Split up double word logical operations
1775
1776 ;; Split up simple DImode logical operations. Simply perform the logical
1777 ;; operation on the upper and lower halves of the registers.
1778 (define_split
1779 [(set (match_operand:DI 0 "s_register_operand" "")
1780 (match_operator:DI 6 "logical_binary_operator"
1781 [(match_operand:DI 1 "s_register_operand" "")
1782 (match_operand:DI 2 "s_register_operand" "")]))]
1783 "TARGET_32BIT && reload_completed
1784 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1785 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1786 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1787 "
1788 {
1789 operands[3] = gen_highpart (SImode, operands[0]);
1790 operands[0] = gen_lowpart (SImode, operands[0]);
1791 operands[4] = gen_highpart (SImode, operands[1]);
1792 operands[1] = gen_lowpart (SImode, operands[1]);
1793 operands[5] = gen_highpart (SImode, operands[2]);
1794 operands[2] = gen_lowpart (SImode, operands[2]);
1795 }"
1796 )
1797
1798 (define_split
1799 [(set (match_operand:DI 0 "s_register_operand" "")
1800 (match_operator:DI 6 "logical_binary_operator"
1801 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1802 (match_operand:DI 1 "s_register_operand" "")]))]
1803 "TARGET_32BIT && reload_completed"
1804 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1805 (set (match_dup 3) (match_op_dup:SI 6
1806 [(ashiftrt:SI (match_dup 2) (const_int 31))
1807 (match_dup 4)]))]
1808 "
1809 {
1810 operands[3] = gen_highpart (SImode, operands[0]);
1811 operands[0] = gen_lowpart (SImode, operands[0]);
1812 operands[4] = gen_highpart (SImode, operands[1]);
1813 operands[1] = gen_lowpart (SImode, operands[1]);
1814 operands[5] = gen_highpart (SImode, operands[2]);
1815 operands[2] = gen_lowpart (SImode, operands[2]);
1816 }"
1817 )
1818
1819 ;; The zero extend of operand 2 means we can just copy the high part of
1820 ;; operand1 into operand0.
1821 (define_split
1822 [(set (match_operand:DI 0 "s_register_operand" "")
1823 (ior:DI
1824 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1825 (match_operand:DI 1 "s_register_operand" "")))]
1826 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1827 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1828 (set (match_dup 3) (match_dup 4))]
1829 "
1830 {
1831 operands[4] = gen_highpart (SImode, operands[1]);
1832 operands[3] = gen_highpart (SImode, operands[0]);
1833 operands[0] = gen_lowpart (SImode, operands[0]);
1834 operands[1] = gen_lowpart (SImode, operands[1]);
1835 }"
1836 )
1837
1838 ;; The zero extend of operand 2 means we can just copy the high part of
1839 ;; operand1 into operand0.
1840 (define_split
1841 [(set (match_operand:DI 0 "s_register_operand" "")
1842 (xor:DI
1843 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1844 (match_operand:DI 1 "s_register_operand" "")))]
1845 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1846 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1847 (set (match_dup 3) (match_dup 4))]
1848 "
1849 {
1850 operands[4] = gen_highpart (SImode, operands[1]);
1851 operands[3] = gen_highpart (SImode, operands[0]);
1852 operands[0] = gen_lowpart (SImode, operands[0]);
1853 operands[1] = gen_lowpart (SImode, operands[1]);
1854 }"
1855 )
1856
1857 (define_insn "anddi3"
1858 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1859 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1860 (match_operand:DI 2 "s_register_operand" "r,r")))]
1861 "TARGET_32BIT && ! TARGET_IWMMXT"
1862 "#"
1863 [(set_attr "length" "8")]
1864 )
1865
1866 (define_insn_and_split "*anddi_zesidi_di"
1867 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1868 (and:DI (zero_extend:DI
1869 (match_operand:SI 2 "s_register_operand" "r,r"))
1870 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1871 "TARGET_32BIT"
1872 "#"
1873 "TARGET_32BIT && reload_completed"
1874 ; The zero extend of operand 2 clears the high word of the output
1875 ; operand.
1876 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1877 (set (match_dup 3) (const_int 0))]
1878 "
1879 {
1880 operands[3] = gen_highpart (SImode, operands[0]);
1881 operands[0] = gen_lowpart (SImode, operands[0]);
1882 operands[1] = gen_lowpart (SImode, operands[1]);
1883 }"
1884 [(set_attr "length" "8")]
1885 )
1886
1887 (define_insn "*anddi_sesdi_di"
1888 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1889 (and:DI (sign_extend:DI
1890 (match_operand:SI 2 "s_register_operand" "r,r"))
1891 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1892 "TARGET_32BIT"
1893 "#"
1894 [(set_attr "length" "8")]
1895 )
1896
1897 (define_expand "andsi3"
1898 [(set (match_operand:SI 0 "s_register_operand" "")
1899 (and:SI (match_operand:SI 1 "s_register_operand" "")
1900 (match_operand:SI 2 "reg_or_int_operand" "")))]
1901 "TARGET_EITHER"
1902 "
1903 if (TARGET_32BIT)
1904 {
1905 if (GET_CODE (operands[2]) == CONST_INT)
1906 {
1907 arm_split_constant (AND, SImode, NULL_RTX,
1908 INTVAL (operands[2]), operands[0],
1909 operands[1], optimize && can_create_pseudo_p ());
1910
1911 DONE;
1912 }
1913 }
1914 else /* TARGET_THUMB1 */
1915 {
1916 if (GET_CODE (operands[2]) != CONST_INT)
1917 operands[2] = force_reg (SImode, operands[2]);
1918 else
1919 {
1920 int i;
1921
1922 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1923 {
1924 operands[2] = force_reg (SImode,
1925 GEN_INT (~INTVAL (operands[2])));
1926
1927 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1928
1929 DONE;
1930 }
1931
1932 for (i = 9; i <= 31; i++)
1933 {
1934 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1935 {
1936 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1937 const0_rtx));
1938 DONE;
1939 }
1940 else if ((((HOST_WIDE_INT) 1) << i) - 1
1941 == ~INTVAL (operands[2]))
1942 {
1943 rtx shift = GEN_INT (i);
1944 rtx reg = gen_reg_rtx (SImode);
1945
1946 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1947 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1948
1949 DONE;
1950 }
1951 }
1952
1953 operands[2] = force_reg (SImode, operands[2]);
1954 }
1955 }
1956 "
1957 )
1958
1959 ; ??? Check split length for Thumb-2
1960 (define_insn_and_split "*arm_andsi3_insn"
1961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1962 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1963 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1964 "TARGET_32BIT"
1965 "@
1966 and%?\\t%0, %1, %2
1967 bic%?\\t%0, %1, #%B2
1968 #"
1969 "TARGET_32BIT
1970 && GET_CODE (operands[2]) == CONST_INT
1971 && !(const_ok_for_arm (INTVAL (operands[2]))
1972 || const_ok_for_arm (~INTVAL (operands[2])))"
1973 [(clobber (const_int 0))]
1974 "
1975 arm_split_constant (AND, SImode, curr_insn,
1976 INTVAL (operands[2]), operands[0], operands[1], 0);
1977 DONE;
1978 "
1979 [(set_attr "length" "4,4,16")
1980 (set_attr "predicable" "yes")]
1981 )
1982
1983 (define_insn "*thumb1_andsi3_insn"
1984 [(set (match_operand:SI 0 "register_operand" "=l")
1985 (and:SI (match_operand:SI 1 "register_operand" "%0")
1986 (match_operand:SI 2 "register_operand" "l")))]
1987 "TARGET_THUMB1"
1988 "and\\t%0, %0, %2"
1989 [(set_attr "length" "2")]
1990 )
1991
1992 (define_insn "*andsi3_compare0"
1993 [(set (reg:CC_NOOV CC_REGNUM)
1994 (compare:CC_NOOV
1995 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1996 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1997 (const_int 0)))
1998 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1999 (and:SI (match_dup 1) (match_dup 2)))]
2000 "TARGET_32BIT"
2001 "@
2002 and%.\\t%0, %1, %2
2003 bic%.\\t%0, %1, #%B2"
2004 [(set_attr "conds" "set")]
2005 )
2006
2007 (define_insn "*andsi3_compare0_scratch"
2008 [(set (reg:CC_NOOV CC_REGNUM)
2009 (compare:CC_NOOV
2010 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2011 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2012 (const_int 0)))
2013 (clobber (match_scratch:SI 2 "=X,r"))]
2014 "TARGET_32BIT"
2015 "@
2016 tst%?\\t%0, %1
2017 bic%.\\t%2, %0, #%B1"
2018 [(set_attr "conds" "set")]
2019 )
2020
2021 (define_insn "*zeroextractsi_compare0_scratch"
2022 [(set (reg:CC_NOOV CC_REGNUM)
2023 (compare:CC_NOOV (zero_extract:SI
2024 (match_operand:SI 0 "s_register_operand" "r")
2025 (match_operand 1 "const_int_operand" "n")
2026 (match_operand 2 "const_int_operand" "n"))
2027 (const_int 0)))]
2028 "TARGET_32BIT
2029 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2030 && INTVAL (operands[1]) > 0
2031 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2032 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2033 "*
2034 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2035 << INTVAL (operands[2]));
2036 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2037 return \"\";
2038 "
2039 [(set_attr "conds" "set")]
2040 )
2041
2042 (define_insn_and_split "*ne_zeroextractsi"
2043 [(set (match_operand:SI 0 "s_register_operand" "=r")
2044 (ne:SI (zero_extract:SI
2045 (match_operand:SI 1 "s_register_operand" "r")
2046 (match_operand:SI 2 "const_int_operand" "n")
2047 (match_operand:SI 3 "const_int_operand" "n"))
2048 (const_int 0)))
2049 (clobber (reg:CC CC_REGNUM))]
2050 "TARGET_32BIT
2051 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2052 && INTVAL (operands[2]) > 0
2053 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2054 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2055 "#"
2056 "TARGET_32BIT
2057 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2058 && INTVAL (operands[2]) > 0
2059 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2060 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2061 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2062 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2063 (const_int 0)))
2064 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2065 (set (match_dup 0)
2066 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2067 (match_dup 0) (const_int 1)))]
2068 "
2069 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2070 << INTVAL (operands[3]));
2071 "
2072 [(set_attr "conds" "clob")
2073 (set (attr "length")
2074 (if_then_else (eq_attr "is_thumb" "yes")
2075 (const_int 12)
2076 (const_int 8)))]
2077 )
2078
2079 (define_insn_and_split "*ne_zeroextractsi_shifted"
2080 [(set (match_operand:SI 0 "s_register_operand" "=r")
2081 (ne:SI (zero_extract:SI
2082 (match_operand:SI 1 "s_register_operand" "r")
2083 (match_operand:SI 2 "const_int_operand" "n")
2084 (const_int 0))
2085 (const_int 0)))
2086 (clobber (reg:CC CC_REGNUM))]
2087 "TARGET_ARM"
2088 "#"
2089 "TARGET_ARM"
2090 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2091 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2092 (const_int 0)))
2093 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2094 (set (match_dup 0)
2095 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2096 (match_dup 0) (const_int 1)))]
2097 "
2098 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2099 "
2100 [(set_attr "conds" "clob")
2101 (set_attr "length" "8")]
2102 )
2103
2104 (define_insn_and_split "*ite_ne_zeroextractsi"
2105 [(set (match_operand:SI 0 "s_register_operand" "=r")
2106 (if_then_else:SI (ne (zero_extract:SI
2107 (match_operand:SI 1 "s_register_operand" "r")
2108 (match_operand:SI 2 "const_int_operand" "n")
2109 (match_operand:SI 3 "const_int_operand" "n"))
2110 (const_int 0))
2111 (match_operand:SI 4 "arm_not_operand" "rIK")
2112 (const_int 0)))
2113 (clobber (reg:CC CC_REGNUM))]
2114 "TARGET_ARM
2115 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2116 && INTVAL (operands[2]) > 0
2117 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2118 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2119 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2120 "#"
2121 "TARGET_ARM
2122 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2123 && INTVAL (operands[2]) > 0
2124 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2125 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2126 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2127 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2128 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2129 (const_int 0)))
2130 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2131 (set (match_dup 0)
2132 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2133 (match_dup 0) (match_dup 4)))]
2134 "
2135 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2136 << INTVAL (operands[3]));
2137 "
2138 [(set_attr "conds" "clob")
2139 (set_attr "length" "8")]
2140 )
2141
2142 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2143 [(set (match_operand:SI 0 "s_register_operand" "=r")
2144 (if_then_else:SI (ne (zero_extract:SI
2145 (match_operand:SI 1 "s_register_operand" "r")
2146 (match_operand:SI 2 "const_int_operand" "n")
2147 (const_int 0))
2148 (const_int 0))
2149 (match_operand:SI 3 "arm_not_operand" "rIK")
2150 (const_int 0)))
2151 (clobber (reg:CC CC_REGNUM))]
2152 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2153 "#"
2154 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2155 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2156 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2157 (const_int 0)))
2158 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2159 (set (match_dup 0)
2160 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2161 (match_dup 0) (match_dup 3)))]
2162 "
2163 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2164 "
2165 [(set_attr "conds" "clob")
2166 (set_attr "length" "8")]
2167 )
2168
2169 (define_split
2170 [(set (match_operand:SI 0 "s_register_operand" "")
2171 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2172 (match_operand:SI 2 "const_int_operand" "")
2173 (match_operand:SI 3 "const_int_operand" "")))
2174 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2175 "TARGET_THUMB1"
2176 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2177 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2178 "{
2179 HOST_WIDE_INT temp = INTVAL (operands[2]);
2180
2181 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2182 operands[3] = GEN_INT (32 - temp);
2183 }"
2184 )
2185
2186 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2187 (define_split
2188 [(set (match_operand:SI 0 "s_register_operand" "")
2189 (match_operator:SI 1 "shiftable_operator"
2190 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2191 (match_operand:SI 3 "const_int_operand" "")
2192 (match_operand:SI 4 "const_int_operand" ""))
2193 (match_operand:SI 5 "s_register_operand" "")]))
2194 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2195 "TARGET_ARM"
2196 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2197 (set (match_dup 0)
2198 (match_op_dup 1
2199 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2200 (match_dup 5)]))]
2201 "{
2202 HOST_WIDE_INT temp = INTVAL (operands[3]);
2203
2204 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2205 operands[4] = GEN_INT (32 - temp);
2206 }"
2207 )
2208
2209 (define_split
2210 [(set (match_operand:SI 0 "s_register_operand" "")
2211 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2212 (match_operand:SI 2 "const_int_operand" "")
2213 (match_operand:SI 3 "const_int_operand" "")))]
2214 "TARGET_THUMB1"
2215 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2216 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2217 "{
2218 HOST_WIDE_INT temp = INTVAL (operands[2]);
2219
2220 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2221 operands[3] = GEN_INT (32 - temp);
2222 }"
2223 )
2224
2225 (define_split
2226 [(set (match_operand:SI 0 "s_register_operand" "")
2227 (match_operator:SI 1 "shiftable_operator"
2228 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2229 (match_operand:SI 3 "const_int_operand" "")
2230 (match_operand:SI 4 "const_int_operand" ""))
2231 (match_operand:SI 5 "s_register_operand" "")]))
2232 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2233 "TARGET_ARM"
2234 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2235 (set (match_dup 0)
2236 (match_op_dup 1
2237 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2238 (match_dup 5)]))]
2239 "{
2240 HOST_WIDE_INT temp = INTVAL (operands[3]);
2241
2242 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2243 operands[4] = GEN_INT (32 - temp);
2244 }"
2245 )
2246
2247 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2248 ;;; represented by the bitfield, then this will produce incorrect results.
2249 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2250 ;;; which have a real bit-field insert instruction, the truncation happens
2251 ;;; in the bit-field insert instruction itself. Since arm does not have a
2252 ;;; bit-field insert instruction, we would have to emit code here to truncate
2253 ;;; the value before we insert. This loses some of the advantage of having
2254 ;;; this insv pattern, so this pattern needs to be reevalutated.
2255
2256 (define_expand "insv"
2257 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2258 (match_operand:SI 1 "general_operand" "")
2259 (match_operand:SI 2 "general_operand" ""))
2260 (match_operand:SI 3 "reg_or_int_operand" ""))]
2261 "TARGET_ARM || arm_arch_thumb2"
2262 "
2263 {
2264 int start_bit = INTVAL (operands[2]);
2265 int width = INTVAL (operands[1]);
2266 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2267 rtx target, subtarget;
2268
2269 if (arm_arch_thumb2)
2270 {
2271 bool use_bfi = TRUE;
2272
2273 if (GET_CODE (operands[3]) == CONST_INT)
2274 {
2275 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2276
2277 if (val == 0)
2278 {
2279 emit_insn (gen_insv_zero (operands[0], operands[1],
2280 operands[2]));
2281 DONE;
2282 }
2283
2284 /* See if the set can be done with a single orr instruction. */
2285 if (val == mask && const_ok_for_arm (val << start_bit))
2286 use_bfi = FALSE;
2287 }
2288
2289 if (use_bfi)
2290 {
2291 if (GET_CODE (operands[3]) != REG)
2292 operands[3] = force_reg (SImode, operands[3]);
2293
2294 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2295 operands[3]));
2296 DONE;
2297 }
2298 }
2299
2300 target = copy_rtx (operands[0]);
2301 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2302 subreg as the final target. */
2303 if (GET_CODE (target) == SUBREG)
2304 {
2305 subtarget = gen_reg_rtx (SImode);
2306 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2307 < GET_MODE_SIZE (SImode))
2308 target = SUBREG_REG (target);
2309 }
2310 else
2311 subtarget = target;
2312
2313 if (GET_CODE (operands[3]) == CONST_INT)
2314 {
2315 /* Since we are inserting a known constant, we may be able to
2316 reduce the number of bits that we have to clear so that
2317 the mask becomes simple. */
2318 /* ??? This code does not check to see if the new mask is actually
2319 simpler. It may not be. */
2320 rtx op1 = gen_reg_rtx (SImode);
2321 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2322 start of this pattern. */
2323 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2324 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2325
2326 emit_insn (gen_andsi3 (op1, operands[0],
2327 gen_int_mode (~mask2, SImode)));
2328 emit_insn (gen_iorsi3 (subtarget, op1,
2329 gen_int_mode (op3_value << start_bit, SImode)));
2330 }
2331 else if (start_bit == 0
2332 && !(const_ok_for_arm (mask)
2333 || const_ok_for_arm (~mask)))
2334 {
2335 /* A Trick, since we are setting the bottom bits in the word,
2336 we can shift operand[3] up, operand[0] down, OR them together
2337 and rotate the result back again. This takes 3 insns, and
2338 the third might be mergeable into another op. */
2339 /* The shift up copes with the possibility that operand[3] is
2340 wider than the bitfield. */
2341 rtx op0 = gen_reg_rtx (SImode);
2342 rtx op1 = gen_reg_rtx (SImode);
2343
2344 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2345 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2346 emit_insn (gen_iorsi3 (op1, op1, op0));
2347 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2348 }
2349 else if ((width + start_bit == 32)
2350 && !(const_ok_for_arm (mask)
2351 || const_ok_for_arm (~mask)))
2352 {
2353 /* Similar trick, but slightly less efficient. */
2354
2355 rtx op0 = gen_reg_rtx (SImode);
2356 rtx op1 = gen_reg_rtx (SImode);
2357
2358 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2359 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2360 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2361 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2362 }
2363 else
2364 {
2365 rtx op0 = gen_int_mode (mask, SImode);
2366 rtx op1 = gen_reg_rtx (SImode);
2367 rtx op2 = gen_reg_rtx (SImode);
2368
2369 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2370 {
2371 rtx tmp = gen_reg_rtx (SImode);
2372
2373 emit_insn (gen_movsi (tmp, op0));
2374 op0 = tmp;
2375 }
2376
2377 /* Mask out any bits in operand[3] that are not needed. */
2378 emit_insn (gen_andsi3 (op1, operands[3], op0));
2379
2380 if (GET_CODE (op0) == CONST_INT
2381 && (const_ok_for_arm (mask << start_bit)
2382 || const_ok_for_arm (~(mask << start_bit))))
2383 {
2384 op0 = gen_int_mode (~(mask << start_bit), SImode);
2385 emit_insn (gen_andsi3 (op2, operands[0], op0));
2386 }
2387 else
2388 {
2389 if (GET_CODE (op0) == CONST_INT)
2390 {
2391 rtx tmp = gen_reg_rtx (SImode);
2392
2393 emit_insn (gen_movsi (tmp, op0));
2394 op0 = tmp;
2395 }
2396
2397 if (start_bit != 0)
2398 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2399
2400 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2401 }
2402
2403 if (start_bit != 0)
2404 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2405
2406 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2407 }
2408
2409 if (subtarget != target)
2410 {
2411 /* If TARGET is still a SUBREG, then it must be wider than a word,
2412 so we must be careful only to set the subword we were asked to. */
2413 if (GET_CODE (target) == SUBREG)
2414 emit_move_insn (target, subtarget);
2415 else
2416 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2417 }
2418
2419 DONE;
2420 }"
2421 )
2422
2423 (define_insn "insv_zero"
2424 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2425 (match_operand:SI 1 "const_int_operand" "M")
2426 (match_operand:SI 2 "const_int_operand" "M"))
2427 (const_int 0))]
2428 "arm_arch_thumb2"
2429 "bfc%?\t%0, %2, %1"
2430 [(set_attr "length" "4")
2431 (set_attr "predicable" "yes")]
2432 )
2433
2434 (define_insn "insv_t2"
2435 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2436 (match_operand:SI 1 "const_int_operand" "M")
2437 (match_operand:SI 2 "const_int_operand" "M"))
2438 (match_operand:SI 3 "s_register_operand" "r"))]
2439 "arm_arch_thumb2"
2440 "bfi%?\t%0, %3, %2, %1"
2441 [(set_attr "length" "4")
2442 (set_attr "predicable" "yes")]
2443 )
2444
2445 ; constants for op 2 will never be given to these patterns.
2446 (define_insn_and_split "*anddi_notdi_di"
2447 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2448 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2449 (match_operand:DI 2 "s_register_operand" "0,r")))]
2450 "TARGET_32BIT"
2451 "#"
2452 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2453 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2454 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2455 "
2456 {
2457 operands[3] = gen_highpart (SImode, operands[0]);
2458 operands[0] = gen_lowpart (SImode, operands[0]);
2459 operands[4] = gen_highpart (SImode, operands[1]);
2460 operands[1] = gen_lowpart (SImode, operands[1]);
2461 operands[5] = gen_highpart (SImode, operands[2]);
2462 operands[2] = gen_lowpart (SImode, operands[2]);
2463 }"
2464 [(set_attr "length" "8")
2465 (set_attr "predicable" "yes")]
2466 )
2467
2468 (define_insn_and_split "*anddi_notzesidi_di"
2469 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2470 (and:DI (not:DI (zero_extend:DI
2471 (match_operand:SI 2 "s_register_operand" "r,r")))
2472 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2473 "TARGET_32BIT"
2474 "@
2475 bic%?\\t%Q0, %Q1, %2
2476 #"
2477 ; (not (zero_extend ...)) allows us to just copy the high word from
2478 ; operand1 to operand0.
2479 "TARGET_32BIT
2480 && reload_completed
2481 && operands[0] != operands[1]"
2482 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2483 (set (match_dup 3) (match_dup 4))]
2484 "
2485 {
2486 operands[3] = gen_highpart (SImode, operands[0]);
2487 operands[0] = gen_lowpart (SImode, operands[0]);
2488 operands[4] = gen_highpart (SImode, operands[1]);
2489 operands[1] = gen_lowpart (SImode, operands[1]);
2490 }"
2491 [(set_attr "length" "4,8")
2492 (set_attr "predicable" "yes")]
2493 )
2494
2495 (define_insn_and_split "*anddi_notsesidi_di"
2496 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2497 (and:DI (not:DI (sign_extend:DI
2498 (match_operand:SI 2 "s_register_operand" "r,r")))
2499 (match_operand:DI 1 "s_register_operand" "0,r")))]
2500 "TARGET_32BIT"
2501 "#"
2502 "TARGET_32BIT && reload_completed"
2503 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2504 (set (match_dup 3) (and:SI (not:SI
2505 (ashiftrt:SI (match_dup 2) (const_int 31)))
2506 (match_dup 4)))]
2507 "
2508 {
2509 operands[3] = gen_highpart (SImode, operands[0]);
2510 operands[0] = gen_lowpart (SImode, operands[0]);
2511 operands[4] = gen_highpart (SImode, operands[1]);
2512 operands[1] = gen_lowpart (SImode, operands[1]);
2513 }"
2514 [(set_attr "length" "8")
2515 (set_attr "predicable" "yes")]
2516 )
2517
2518 (define_insn "andsi_notsi_si"
2519 [(set (match_operand:SI 0 "s_register_operand" "=r")
2520 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2521 (match_operand:SI 1 "s_register_operand" "r")))]
2522 "TARGET_32BIT"
2523 "bic%?\\t%0, %1, %2"
2524 [(set_attr "predicable" "yes")]
2525 )
2526
2527 (define_insn "bicsi3"
2528 [(set (match_operand:SI 0 "register_operand" "=l")
2529 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2530 (match_operand:SI 2 "register_operand" "0")))]
2531 "TARGET_THUMB1"
2532 "bic\\t%0, %0, %1"
2533 [(set_attr "length" "2")]
2534 )
2535
2536 (define_insn "andsi_not_shiftsi_si"
2537 [(set (match_operand:SI 0 "s_register_operand" "=r")
2538 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2539 [(match_operand:SI 2 "s_register_operand" "r")
2540 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2541 (match_operand:SI 1 "s_register_operand" "r")))]
2542 "TARGET_ARM"
2543 "bic%?\\t%0, %1, %2%S4"
2544 [(set_attr "predicable" "yes")
2545 (set_attr "shift" "2")
2546 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2547 (const_string "alu_shift")
2548 (const_string "alu_shift_reg")))]
2549 )
2550
2551 (define_insn "*andsi_notsi_si_compare0"
2552 [(set (reg:CC_NOOV CC_REGNUM)
2553 (compare:CC_NOOV
2554 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2555 (match_operand:SI 1 "s_register_operand" "r"))
2556 (const_int 0)))
2557 (set (match_operand:SI 0 "s_register_operand" "=r")
2558 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2559 "TARGET_32BIT"
2560 "bic%.\\t%0, %1, %2"
2561 [(set_attr "conds" "set")]
2562 )
2563
2564 (define_insn "*andsi_notsi_si_compare0_scratch"
2565 [(set (reg:CC_NOOV CC_REGNUM)
2566 (compare:CC_NOOV
2567 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2568 (match_operand:SI 1 "s_register_operand" "r"))
2569 (const_int 0)))
2570 (clobber (match_scratch:SI 0 "=r"))]
2571 "TARGET_32BIT"
2572 "bic%.\\t%0, %1, %2"
2573 [(set_attr "conds" "set")]
2574 )
2575
2576 (define_insn "iordi3"
2577 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2578 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2579 (match_operand:DI 2 "s_register_operand" "r,r")))]
2580 "TARGET_32BIT && ! TARGET_IWMMXT"
2581 "#"
2582 [(set_attr "length" "8")
2583 (set_attr "predicable" "yes")]
2584 )
2585
2586 (define_insn "*iordi_zesidi_di"
2587 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2588 (ior:DI (zero_extend:DI
2589 (match_operand:SI 2 "s_register_operand" "r,r"))
2590 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2591 "TARGET_32BIT"
2592 "@
2593 orr%?\\t%Q0, %Q1, %2
2594 #"
2595 [(set_attr "length" "4,8")
2596 (set_attr "predicable" "yes")]
2597 )
2598
2599 (define_insn "*iordi_sesidi_di"
2600 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2601 (ior:DI (sign_extend:DI
2602 (match_operand:SI 2 "s_register_operand" "r,r"))
2603 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2604 "TARGET_32BIT"
2605 "#"
2606 [(set_attr "length" "8")
2607 (set_attr "predicable" "yes")]
2608 )
2609
2610 (define_expand "iorsi3"
2611 [(set (match_operand:SI 0 "s_register_operand" "")
2612 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2613 (match_operand:SI 2 "reg_or_int_operand" "")))]
2614 "TARGET_EITHER"
2615 "
2616 if (GET_CODE (operands[2]) == CONST_INT)
2617 {
2618 if (TARGET_32BIT)
2619 {
2620 arm_split_constant (IOR, SImode, NULL_RTX,
2621 INTVAL (operands[2]), operands[0], operands[1],
2622 optimize && can_create_pseudo_p ());
2623 DONE;
2624 }
2625 else /* TARGET_THUMB1 */
2626 operands [2] = force_reg (SImode, operands [2]);
2627 }
2628 "
2629 )
2630
2631 (define_insn_and_split "*arm_iorsi3"
2632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2633 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2634 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2635 "TARGET_ARM"
2636 "@
2637 orr%?\\t%0, %1, %2
2638 #"
2639 "TARGET_ARM
2640 && GET_CODE (operands[2]) == CONST_INT
2641 && !const_ok_for_arm (INTVAL (operands[2]))"
2642 [(clobber (const_int 0))]
2643 "
2644 arm_split_constant (IOR, SImode, curr_insn,
2645 INTVAL (operands[2]), operands[0], operands[1], 0);
2646 DONE;
2647 "
2648 [(set_attr "length" "4,16")
2649 (set_attr "predicable" "yes")]
2650 )
2651
2652 (define_insn "*thumb1_iorsi3"
2653 [(set (match_operand:SI 0 "register_operand" "=l")
2654 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2655 (match_operand:SI 2 "register_operand" "l")))]
2656 "TARGET_THUMB1"
2657 "orr\\t%0, %0, %2"
2658 [(set_attr "length" "2")]
2659 )
2660
2661 (define_peephole2
2662 [(match_scratch:SI 3 "r")
2663 (set (match_operand:SI 0 "arm_general_register_operand" "")
2664 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2665 (match_operand:SI 2 "const_int_operand" "")))]
2666 "TARGET_ARM
2667 && !const_ok_for_arm (INTVAL (operands[2]))
2668 && const_ok_for_arm (~INTVAL (operands[2]))"
2669 [(set (match_dup 3) (match_dup 2))
2670 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2671 ""
2672 )
2673
2674 (define_insn "*iorsi3_compare0"
2675 [(set (reg:CC_NOOV CC_REGNUM)
2676 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2677 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2678 (const_int 0)))
2679 (set (match_operand:SI 0 "s_register_operand" "=r")
2680 (ior:SI (match_dup 1) (match_dup 2)))]
2681 "TARGET_32BIT"
2682 "orr%.\\t%0, %1, %2"
2683 [(set_attr "conds" "set")]
2684 )
2685
2686 (define_insn "*iorsi3_compare0_scratch"
2687 [(set (reg:CC_NOOV CC_REGNUM)
2688 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2689 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2690 (const_int 0)))
2691 (clobber (match_scratch:SI 0 "=r"))]
2692 "TARGET_32BIT"
2693 "orr%.\\t%0, %1, %2"
2694 [(set_attr "conds" "set")]
2695 )
2696
2697 (define_insn "xordi3"
2698 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2699 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2700 (match_operand:DI 2 "s_register_operand" "r,r")))]
2701 "TARGET_32BIT && !TARGET_IWMMXT"
2702 "#"
2703 [(set_attr "length" "8")
2704 (set_attr "predicable" "yes")]
2705 )
2706
2707 (define_insn "*xordi_zesidi_di"
2708 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2709 (xor:DI (zero_extend:DI
2710 (match_operand:SI 2 "s_register_operand" "r,r"))
2711 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2712 "TARGET_32BIT"
2713 "@
2714 eor%?\\t%Q0, %Q1, %2
2715 #"
2716 [(set_attr "length" "4,8")
2717 (set_attr "predicable" "yes")]
2718 )
2719
2720 (define_insn "*xordi_sesidi_di"
2721 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2722 (xor:DI (sign_extend:DI
2723 (match_operand:SI 2 "s_register_operand" "r,r"))
2724 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2725 "TARGET_32BIT"
2726 "#"
2727 [(set_attr "length" "8")
2728 (set_attr "predicable" "yes")]
2729 )
2730
2731 (define_expand "xorsi3"
2732 [(set (match_operand:SI 0 "s_register_operand" "")
2733 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2734 (match_operand:SI 2 "arm_rhs_operand" "")))]
2735 "TARGET_EITHER"
2736 "if (TARGET_THUMB1)
2737 if (GET_CODE (operands[2]) == CONST_INT)
2738 operands[2] = force_reg (SImode, operands[2]);
2739 "
2740 )
2741
2742 (define_insn "*arm_xorsi3"
2743 [(set (match_operand:SI 0 "s_register_operand" "=r")
2744 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2745 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2746 "TARGET_32BIT"
2747 "eor%?\\t%0, %1, %2"
2748 [(set_attr "predicable" "yes")]
2749 )
2750
2751 (define_insn "*thumb1_xorsi3"
2752 [(set (match_operand:SI 0 "register_operand" "=l")
2753 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2754 (match_operand:SI 2 "register_operand" "l")))]
2755 "TARGET_THUMB1"
2756 "eor\\t%0, %0, %2"
2757 [(set_attr "length" "2")]
2758 )
2759
2760 (define_insn "*xorsi3_compare0"
2761 [(set (reg:CC_NOOV CC_REGNUM)
2762 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2763 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2764 (const_int 0)))
2765 (set (match_operand:SI 0 "s_register_operand" "=r")
2766 (xor:SI (match_dup 1) (match_dup 2)))]
2767 "TARGET_32BIT"
2768 "eor%.\\t%0, %1, %2"
2769 [(set_attr "conds" "set")]
2770 )
2771
2772 (define_insn "*xorsi3_compare0_scratch"
2773 [(set (reg:CC_NOOV CC_REGNUM)
2774 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2775 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2776 (const_int 0)))]
2777 "TARGET_32BIT"
2778 "teq%?\\t%0, %1"
2779 [(set_attr "conds" "set")]
2780 )
2781
2782 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2783 ; (NOT D) we can sometimes merge the final NOT into one of the following
2784 ; insns.
2785
2786 (define_split
2787 [(set (match_operand:SI 0 "s_register_operand" "")
2788 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2789 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2790 (match_operand:SI 3 "arm_rhs_operand" "")))
2791 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2792 "TARGET_32BIT"
2793 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2794 (not:SI (match_dup 3))))
2795 (set (match_dup 0) (not:SI (match_dup 4)))]
2796 ""
2797 )
2798
2799 (define_insn "*andsi_iorsi3_notsi"
2800 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2801 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2802 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2803 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2804 "TARGET_32BIT"
2805 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2806 [(set_attr "length" "8")
2807 (set_attr "ce_count" "2")
2808 (set_attr "predicable" "yes")]
2809 )
2810
2811 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2812 ; insns are available?
2813 (define_split
2814 [(set (match_operand:SI 0 "s_register_operand" "")
2815 (match_operator:SI 1 "logical_binary_operator"
2816 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2817 (match_operand:SI 3 "const_int_operand" "")
2818 (match_operand:SI 4 "const_int_operand" ""))
2819 (match_operator:SI 9 "logical_binary_operator"
2820 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2821 (match_operand:SI 6 "const_int_operand" ""))
2822 (match_operand:SI 7 "s_register_operand" "")])]))
2823 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2824 "TARGET_32BIT
2825 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2826 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2827 [(set (match_dup 8)
2828 (match_op_dup 1
2829 [(ashift:SI (match_dup 2) (match_dup 4))
2830 (match_dup 5)]))
2831 (set (match_dup 0)
2832 (match_op_dup 1
2833 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2834 (match_dup 7)]))]
2835 "
2836 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2837 ")
2838
2839 (define_split
2840 [(set (match_operand:SI 0 "s_register_operand" "")
2841 (match_operator:SI 1 "logical_binary_operator"
2842 [(match_operator:SI 9 "logical_binary_operator"
2843 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2844 (match_operand:SI 6 "const_int_operand" ""))
2845 (match_operand:SI 7 "s_register_operand" "")])
2846 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2847 (match_operand:SI 3 "const_int_operand" "")
2848 (match_operand:SI 4 "const_int_operand" ""))]))
2849 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2850 "TARGET_32BIT
2851 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2852 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2853 [(set (match_dup 8)
2854 (match_op_dup 1
2855 [(ashift:SI (match_dup 2) (match_dup 4))
2856 (match_dup 5)]))
2857 (set (match_dup 0)
2858 (match_op_dup 1
2859 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2860 (match_dup 7)]))]
2861 "
2862 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2863 ")
2864
2865 (define_split
2866 [(set (match_operand:SI 0 "s_register_operand" "")
2867 (match_operator:SI 1 "logical_binary_operator"
2868 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2869 (match_operand:SI 3 "const_int_operand" "")
2870 (match_operand:SI 4 "const_int_operand" ""))
2871 (match_operator:SI 9 "logical_binary_operator"
2872 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2873 (match_operand:SI 6 "const_int_operand" ""))
2874 (match_operand:SI 7 "s_register_operand" "")])]))
2875 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2876 "TARGET_32BIT
2877 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2878 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2879 [(set (match_dup 8)
2880 (match_op_dup 1
2881 [(ashift:SI (match_dup 2) (match_dup 4))
2882 (match_dup 5)]))
2883 (set (match_dup 0)
2884 (match_op_dup 1
2885 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2886 (match_dup 7)]))]
2887 "
2888 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2889 ")
2890
2891 (define_split
2892 [(set (match_operand:SI 0 "s_register_operand" "")
2893 (match_operator:SI 1 "logical_binary_operator"
2894 [(match_operator:SI 9 "logical_binary_operator"
2895 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2896 (match_operand:SI 6 "const_int_operand" ""))
2897 (match_operand:SI 7 "s_register_operand" "")])
2898 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2899 (match_operand:SI 3 "const_int_operand" "")
2900 (match_operand:SI 4 "const_int_operand" ""))]))
2901 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2902 "TARGET_32BIT
2903 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2904 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2905 [(set (match_dup 8)
2906 (match_op_dup 1
2907 [(ashift:SI (match_dup 2) (match_dup 4))
2908 (match_dup 5)]))
2909 (set (match_dup 0)
2910 (match_op_dup 1
2911 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2912 (match_dup 7)]))]
2913 "
2914 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2915 ")
2916 \f
2917
2918 ;; Minimum and maximum insns
2919
2920 (define_expand "smaxsi3"
2921 [(parallel [
2922 (set (match_operand:SI 0 "s_register_operand" "")
2923 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2924 (match_operand:SI 2 "arm_rhs_operand" "")))
2925 (clobber (reg:CC CC_REGNUM))])]
2926 "TARGET_32BIT"
2927 "
2928 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2929 {
2930 /* No need for a clobber of the condition code register here. */
2931 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2932 gen_rtx_SMAX (SImode, operands[1],
2933 operands[2])));
2934 DONE;
2935 }
2936 ")
2937
2938 (define_insn "*smax_0"
2939 [(set (match_operand:SI 0 "s_register_operand" "=r")
2940 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2941 (const_int 0)))]
2942 "TARGET_32BIT"
2943 "bic%?\\t%0, %1, %1, asr #31"
2944 [(set_attr "predicable" "yes")]
2945 )
2946
2947 (define_insn "*smax_m1"
2948 [(set (match_operand:SI 0 "s_register_operand" "=r")
2949 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2950 (const_int -1)))]
2951 "TARGET_32BIT"
2952 "orr%?\\t%0, %1, %1, asr #31"
2953 [(set_attr "predicable" "yes")]
2954 )
2955
2956 (define_insn "*arm_smax_insn"
2957 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2958 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2959 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2960 (clobber (reg:CC CC_REGNUM))]
2961 "TARGET_ARM"
2962 "@
2963 cmp\\t%1, %2\;movlt\\t%0, %2
2964 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2965 [(set_attr "conds" "clob")
2966 (set_attr "length" "8,12")]
2967 )
2968
2969 (define_expand "sminsi3"
2970 [(parallel [
2971 (set (match_operand:SI 0 "s_register_operand" "")
2972 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2973 (match_operand:SI 2 "arm_rhs_operand" "")))
2974 (clobber (reg:CC CC_REGNUM))])]
2975 "TARGET_32BIT"
2976 "
2977 if (operands[2] == const0_rtx)
2978 {
2979 /* No need for a clobber of the condition code register here. */
2980 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2981 gen_rtx_SMIN (SImode, operands[1],
2982 operands[2])));
2983 DONE;
2984 }
2985 ")
2986
2987 (define_insn "*smin_0"
2988 [(set (match_operand:SI 0 "s_register_operand" "=r")
2989 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2990 (const_int 0)))]
2991 "TARGET_32BIT"
2992 "and%?\\t%0, %1, %1, asr #31"
2993 [(set_attr "predicable" "yes")]
2994 )
2995
2996 (define_insn "*arm_smin_insn"
2997 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2998 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2999 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3000 (clobber (reg:CC CC_REGNUM))]
3001 "TARGET_ARM"
3002 "@
3003 cmp\\t%1, %2\;movge\\t%0, %2
3004 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3005 [(set_attr "conds" "clob")
3006 (set_attr "length" "8,12")]
3007 )
3008
3009 (define_expand "umaxsi3"
3010 [(parallel [
3011 (set (match_operand:SI 0 "s_register_operand" "")
3012 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3013 (match_operand:SI 2 "arm_rhs_operand" "")))
3014 (clobber (reg:CC CC_REGNUM))])]
3015 "TARGET_32BIT"
3016 ""
3017 )
3018
3019 (define_insn "*arm_umaxsi3"
3020 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3021 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3022 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3023 (clobber (reg:CC CC_REGNUM))]
3024 "TARGET_ARM"
3025 "@
3026 cmp\\t%1, %2\;movcc\\t%0, %2
3027 cmp\\t%1, %2\;movcs\\t%0, %1
3028 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3029 [(set_attr "conds" "clob")
3030 (set_attr "length" "8,8,12")]
3031 )
3032
3033 (define_expand "uminsi3"
3034 [(parallel [
3035 (set (match_operand:SI 0 "s_register_operand" "")
3036 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3037 (match_operand:SI 2 "arm_rhs_operand" "")))
3038 (clobber (reg:CC CC_REGNUM))])]
3039 "TARGET_32BIT"
3040 ""
3041 )
3042
3043 (define_insn "*arm_uminsi3"
3044 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3045 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3046 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3047 (clobber (reg:CC CC_REGNUM))]
3048 "TARGET_ARM"
3049 "@
3050 cmp\\t%1, %2\;movcs\\t%0, %2
3051 cmp\\t%1, %2\;movcc\\t%0, %1
3052 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3053 [(set_attr "conds" "clob")
3054 (set_attr "length" "8,8,12")]
3055 )
3056
3057 (define_insn "*store_minmaxsi"
3058 [(set (match_operand:SI 0 "memory_operand" "=m")
3059 (match_operator:SI 3 "minmax_operator"
3060 [(match_operand:SI 1 "s_register_operand" "r")
3061 (match_operand:SI 2 "s_register_operand" "r")]))
3062 (clobber (reg:CC CC_REGNUM))]
3063 "TARGET_32BIT"
3064 "*
3065 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3066 operands[1], operands[2]);
3067 output_asm_insn (\"cmp\\t%1, %2\", operands);
3068 if (TARGET_THUMB2)
3069 output_asm_insn (\"ite\t%d3\", operands);
3070 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3071 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3072 return \"\";
3073 "
3074 [(set_attr "conds" "clob")
3075 (set (attr "length")
3076 (if_then_else (eq_attr "is_thumb" "yes")
3077 (const_int 14)
3078 (const_int 12)))
3079 (set_attr "type" "store1")]
3080 )
3081
3082 ; Reject the frame pointer in operand[1], since reloading this after
3083 ; it has been eliminated can cause carnage.
3084 (define_insn "*minmax_arithsi"
3085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3086 (match_operator:SI 4 "shiftable_operator"
3087 [(match_operator:SI 5 "minmax_operator"
3088 [(match_operand:SI 2 "s_register_operand" "r,r")
3089 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3090 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3091 (clobber (reg:CC CC_REGNUM))]
3092 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3093 "*
3094 {
3095 enum rtx_code code = GET_CODE (operands[4]);
3096 bool need_else;
3097
3098 if (which_alternative != 0 || operands[3] != const0_rtx
3099 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3100 need_else = true;
3101 else
3102 need_else = false;
3103
3104 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3105 operands[2], operands[3]);
3106 output_asm_insn (\"cmp\\t%2, %3\", operands);
3107 if (TARGET_THUMB2)
3108 {
3109 if (need_else)
3110 output_asm_insn (\"ite\\t%d5\", operands);
3111 else
3112 output_asm_insn (\"it\\t%d5\", operands);
3113 }
3114 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3115 if (need_else)
3116 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3117 return \"\";
3118 }"
3119 [(set_attr "conds" "clob")
3120 (set (attr "length")
3121 (if_then_else (eq_attr "is_thumb" "yes")
3122 (const_int 14)
3123 (const_int 12)))]
3124 )
3125
3126 \f
3127 ;; Shift and rotation insns
3128
3129 (define_expand "ashldi3"
3130 [(set (match_operand:DI 0 "s_register_operand" "")
3131 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3132 (match_operand:SI 2 "reg_or_int_operand" "")))]
3133 "TARGET_32BIT"
3134 "
3135 if (GET_CODE (operands[2]) == CONST_INT)
3136 {
3137 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3138 {
3139 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3140 DONE;
3141 }
3142 /* Ideally we shouldn't fail here if we could know that operands[1]
3143 ends up already living in an iwmmxt register. Otherwise it's
3144 cheaper to have the alternate code being generated than moving
3145 values to iwmmxt regs and back. */
3146 FAIL;
3147 }
3148 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3149 FAIL;
3150 "
3151 )
3152
3153 (define_insn "arm_ashldi3_1bit"
3154 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3155 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3156 (const_int 1)))
3157 (clobber (reg:CC CC_REGNUM))]
3158 "TARGET_32BIT"
3159 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3160 [(set_attr "conds" "clob")
3161 (set_attr "length" "8")]
3162 )
3163
3164 (define_expand "ashlsi3"
3165 [(set (match_operand:SI 0 "s_register_operand" "")
3166 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3167 (match_operand:SI 2 "arm_rhs_operand" "")))]
3168 "TARGET_EITHER"
3169 "
3170 if (GET_CODE (operands[2]) == CONST_INT
3171 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3172 {
3173 emit_insn (gen_movsi (operands[0], const0_rtx));
3174 DONE;
3175 }
3176 "
3177 )
3178
3179 (define_insn "*thumb1_ashlsi3"
3180 [(set (match_operand:SI 0 "register_operand" "=l,l")
3181 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3182 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3183 "TARGET_THUMB1"
3184 "lsl\\t%0, %1, %2"
3185 [(set_attr "length" "2")]
3186 )
3187
3188 (define_expand "ashrdi3"
3189 [(set (match_operand:DI 0 "s_register_operand" "")
3190 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3191 (match_operand:SI 2 "reg_or_int_operand" "")))]
3192 "TARGET_32BIT"
3193 "
3194 if (GET_CODE (operands[2]) == CONST_INT)
3195 {
3196 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3197 {
3198 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3199 DONE;
3200 }
3201 /* Ideally we shouldn't fail here if we could know that operands[1]
3202 ends up already living in an iwmmxt register. Otherwise it's
3203 cheaper to have the alternate code being generated than moving
3204 values to iwmmxt regs and back. */
3205 FAIL;
3206 }
3207 else if (!TARGET_REALLY_IWMMXT)
3208 FAIL;
3209 "
3210 )
3211
3212 (define_insn "arm_ashrdi3_1bit"
3213 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3214 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3215 (const_int 1)))
3216 (clobber (reg:CC CC_REGNUM))]
3217 "TARGET_32BIT"
3218 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3219 [(set_attr "conds" "clob")
3220 (set_attr "length" "8")]
3221 )
3222
3223 (define_expand "ashrsi3"
3224 [(set (match_operand:SI 0 "s_register_operand" "")
3225 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3226 (match_operand:SI 2 "arm_rhs_operand" "")))]
3227 "TARGET_EITHER"
3228 "
3229 if (GET_CODE (operands[2]) == CONST_INT
3230 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3231 operands[2] = GEN_INT (31);
3232 "
3233 )
3234
3235 (define_insn "*thumb1_ashrsi3"
3236 [(set (match_operand:SI 0 "register_operand" "=l,l")
3237 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3238 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3239 "TARGET_THUMB1"
3240 "asr\\t%0, %1, %2"
3241 [(set_attr "length" "2")]
3242 )
3243
3244 (define_expand "lshrdi3"
3245 [(set (match_operand:DI 0 "s_register_operand" "")
3246 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3247 (match_operand:SI 2 "reg_or_int_operand" "")))]
3248 "TARGET_32BIT"
3249 "
3250 if (GET_CODE (operands[2]) == CONST_INT)
3251 {
3252 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3253 {
3254 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3255 DONE;
3256 }
3257 /* Ideally we shouldn't fail here if we could know that operands[1]
3258 ends up already living in an iwmmxt register. Otherwise it's
3259 cheaper to have the alternate code being generated than moving
3260 values to iwmmxt regs and back. */
3261 FAIL;
3262 }
3263 else if (!TARGET_REALLY_IWMMXT)
3264 FAIL;
3265 "
3266 )
3267
3268 (define_insn "arm_lshrdi3_1bit"
3269 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3270 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3271 (const_int 1)))
3272 (clobber (reg:CC CC_REGNUM))]
3273 "TARGET_32BIT"
3274 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3275 [(set_attr "conds" "clob")
3276 (set_attr "length" "8")]
3277 )
3278
3279 (define_expand "lshrsi3"
3280 [(set (match_operand:SI 0 "s_register_operand" "")
3281 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3282 (match_operand:SI 2 "arm_rhs_operand" "")))]
3283 "TARGET_EITHER"
3284 "
3285 if (GET_CODE (operands[2]) == CONST_INT
3286 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3287 {
3288 emit_insn (gen_movsi (operands[0], const0_rtx));
3289 DONE;
3290 }
3291 "
3292 )
3293
3294 (define_insn "*thumb1_lshrsi3"
3295 [(set (match_operand:SI 0 "register_operand" "=l,l")
3296 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3297 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3298 "TARGET_THUMB1"
3299 "lsr\\t%0, %1, %2"
3300 [(set_attr "length" "2")]
3301 )
3302
3303 (define_expand "rotlsi3"
3304 [(set (match_operand:SI 0 "s_register_operand" "")
3305 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3306 (match_operand:SI 2 "reg_or_int_operand" "")))]
3307 "TARGET_32BIT"
3308 "
3309 if (GET_CODE (operands[2]) == CONST_INT)
3310 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3311 else
3312 {
3313 rtx reg = gen_reg_rtx (SImode);
3314 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3315 operands[2] = reg;
3316 }
3317 "
3318 )
3319
3320 (define_expand "rotrsi3"
3321 [(set (match_operand:SI 0 "s_register_operand" "")
3322 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3323 (match_operand:SI 2 "arm_rhs_operand" "")))]
3324 "TARGET_EITHER"
3325 "
3326 if (TARGET_32BIT)
3327 {
3328 if (GET_CODE (operands[2]) == CONST_INT
3329 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3330 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3331 }
3332 else /* TARGET_THUMB1 */
3333 {
3334 if (GET_CODE (operands [2]) == CONST_INT)
3335 operands [2] = force_reg (SImode, operands[2]);
3336 }
3337 "
3338 )
3339
3340 (define_insn "*thumb1_rotrsi3"
3341 [(set (match_operand:SI 0 "register_operand" "=l")
3342 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3343 (match_operand:SI 2 "register_operand" "l")))]
3344 "TARGET_THUMB1"
3345 "ror\\t%0, %0, %2"
3346 [(set_attr "length" "2")]
3347 )
3348
3349 (define_insn "*arm_shiftsi3"
3350 [(set (match_operand:SI 0 "s_register_operand" "=r")
3351 (match_operator:SI 3 "shift_operator"
3352 [(match_operand:SI 1 "s_register_operand" "r")
3353 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3354 "TARGET_32BIT"
3355 "* return arm_output_shift(operands, 0);"
3356 [(set_attr "predicable" "yes")
3357 (set_attr "shift" "1")
3358 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3359 (const_string "alu_shift")
3360 (const_string "alu_shift_reg")))]
3361 )
3362
3363 (define_insn "*shiftsi3_compare0"
3364 [(set (reg:CC_NOOV CC_REGNUM)
3365 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3366 [(match_operand:SI 1 "s_register_operand" "r")
3367 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3368 (const_int 0)))
3369 (set (match_operand:SI 0 "s_register_operand" "=r")
3370 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3371 "TARGET_32BIT"
3372 "* return arm_output_shift(operands, 1);"
3373 [(set_attr "conds" "set")
3374 (set_attr "shift" "1")
3375 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3376 (const_string "alu_shift")
3377 (const_string "alu_shift_reg")))]
3378 )
3379
3380 (define_insn "*shiftsi3_compare0_scratch"
3381 [(set (reg:CC_NOOV CC_REGNUM)
3382 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3383 [(match_operand:SI 1 "s_register_operand" "r")
3384 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3385 (const_int 0)))
3386 (clobber (match_scratch:SI 0 "=r"))]
3387 "TARGET_32BIT"
3388 "* return arm_output_shift(operands, 1);"
3389 [(set_attr "conds" "set")
3390 (set_attr "shift" "1")]
3391 )
3392
3393 (define_insn "*arm_notsi_shiftsi"
3394 [(set (match_operand:SI 0 "s_register_operand" "=r")
3395 (not:SI (match_operator:SI 3 "shift_operator"
3396 [(match_operand:SI 1 "s_register_operand" "r")
3397 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3398 "TARGET_ARM"
3399 "mvn%?\\t%0, %1%S3"
3400 [(set_attr "predicable" "yes")
3401 (set_attr "shift" "1")
3402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3403 (const_string "alu_shift")
3404 (const_string "alu_shift_reg")))]
3405 )
3406
3407 (define_insn "*arm_notsi_shiftsi_compare0"
3408 [(set (reg:CC_NOOV CC_REGNUM)
3409 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3410 [(match_operand:SI 1 "s_register_operand" "r")
3411 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3412 (const_int 0)))
3413 (set (match_operand:SI 0 "s_register_operand" "=r")
3414 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3415 "TARGET_ARM"
3416 "mvn%.\\t%0, %1%S3"
3417 [(set_attr "conds" "set")
3418 (set_attr "shift" "1")
3419 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3420 (const_string "alu_shift")
3421 (const_string "alu_shift_reg")))]
3422 )
3423
3424 (define_insn "*arm_not_shiftsi_compare0_scratch"
3425 [(set (reg:CC_NOOV CC_REGNUM)
3426 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3427 [(match_operand:SI 1 "s_register_operand" "r")
3428 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3429 (const_int 0)))
3430 (clobber (match_scratch:SI 0 "=r"))]
3431 "TARGET_ARM"
3432 "mvn%.\\t%0, %1%S3"
3433 [(set_attr "conds" "set")
3434 (set_attr "shift" "1")
3435 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3436 (const_string "alu_shift")
3437 (const_string "alu_shift_reg")))]
3438 )
3439
3440 ;; We don't really have extzv, but defining this using shifts helps
3441 ;; to reduce register pressure later on.
3442
3443 (define_expand "extzv"
3444 [(set (match_dup 4)
3445 (ashift:SI (match_operand:SI 1 "register_operand" "")
3446 (match_operand:SI 2 "const_int_operand" "")))
3447 (set (match_operand:SI 0 "register_operand" "")
3448 (lshiftrt:SI (match_dup 4)
3449 (match_operand:SI 3 "const_int_operand" "")))]
3450 "TARGET_THUMB1 || arm_arch_thumb2"
3451 "
3452 {
3453 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3454 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3455
3456 if (arm_arch_thumb2)
3457 {
3458 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3459 operands[3]));
3460 DONE;
3461 }
3462
3463 operands[3] = GEN_INT (rshift);
3464
3465 if (lshift == 0)
3466 {
3467 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3468 DONE;
3469 }
3470
3471 operands[2] = GEN_INT (lshift);
3472 operands[4] = gen_reg_rtx (SImode);
3473 }"
3474 )
3475
3476 (define_insn "extv"
3477 [(set (match_operand:SI 0 "s_register_operand" "=r")
3478 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3479 (match_operand:SI 2 "const_int_operand" "M")
3480 (match_operand:SI 3 "const_int_operand" "M")))]
3481 "arm_arch_thumb2"
3482 "sbfx%?\t%0, %1, %3, %2"
3483 [(set_attr "length" "4")
3484 (set_attr "predicable" "yes")]
3485 )
3486
3487 (define_insn "extzv_t2"
3488 [(set (match_operand:SI 0 "s_register_operand" "=r")
3489 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3490 (match_operand:SI 2 "const_int_operand" "M")
3491 (match_operand:SI 3 "const_int_operand" "M")))]
3492 "arm_arch_thumb2"
3493 "ubfx%?\t%0, %1, %3, %2"
3494 [(set_attr "length" "4")
3495 (set_attr "predicable" "yes")]
3496 )
3497
3498 \f
3499 ;; Unary arithmetic insns
3500
3501 (define_expand "negdi2"
3502 [(parallel
3503 [(set (match_operand:DI 0 "s_register_operand" "")
3504 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3505 (clobber (reg:CC CC_REGNUM))])]
3506 "TARGET_EITHER"
3507 "
3508 if (TARGET_THUMB1)
3509 {
3510 if (GET_CODE (operands[1]) != REG)
3511 operands[1] = force_reg (DImode, operands[1]);
3512 }
3513 "
3514 )
3515
3516 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3517 ;; The second alternative is to allow the common case of a *full* overlap.
3518 (define_insn "*arm_negdi2"
3519 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3520 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3521 (clobber (reg:CC CC_REGNUM))]
3522 "TARGET_ARM"
3523 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3524 [(set_attr "conds" "clob")
3525 (set_attr "length" "8")]
3526 )
3527
3528 (define_insn "*thumb1_negdi2"
3529 [(set (match_operand:DI 0 "register_operand" "=&l")
3530 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3531 (clobber (reg:CC CC_REGNUM))]
3532 "TARGET_THUMB1"
3533 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3534 [(set_attr "length" "6")]
3535 )
3536
3537 (define_expand "negsi2"
3538 [(set (match_operand:SI 0 "s_register_operand" "")
3539 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3540 "TARGET_EITHER"
3541 ""
3542 )
3543
3544 (define_insn "*arm_negsi2"
3545 [(set (match_operand:SI 0 "s_register_operand" "=r")
3546 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3547 "TARGET_32BIT"
3548 "rsb%?\\t%0, %1, #0"
3549 [(set_attr "predicable" "yes")]
3550 )
3551
3552 (define_insn "*thumb1_negsi2"
3553 [(set (match_operand:SI 0 "register_operand" "=l")
3554 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3555 "TARGET_THUMB1"
3556 "neg\\t%0, %1"
3557 [(set_attr "length" "2")]
3558 )
3559
3560 (define_expand "negsf2"
3561 [(set (match_operand:SF 0 "s_register_operand" "")
3562 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3563 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3564 ""
3565 )
3566
3567 (define_expand "negdf2"
3568 [(set (match_operand:DF 0 "s_register_operand" "")
3569 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3570 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3571 "")
3572
3573 ;; abssi2 doesn't really clobber the condition codes if a different register
3574 ;; is being set. To keep things simple, assume during rtl manipulations that
3575 ;; it does, but tell the final scan operator the truth. Similarly for
3576 ;; (neg (abs...))
3577
3578 (define_expand "abssi2"
3579 [(parallel
3580 [(set (match_operand:SI 0 "s_register_operand" "")
3581 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3582 (clobber (match_dup 2))])]
3583 "TARGET_EITHER"
3584 "
3585 if (TARGET_THUMB1)
3586 operands[2] = gen_rtx_SCRATCH (SImode);
3587 else
3588 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3589 ")
3590
3591 (define_insn "*arm_abssi2"
3592 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3593 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3594 (clobber (reg:CC CC_REGNUM))]
3595 "TARGET_ARM"
3596 "@
3597 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3598 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3599 [(set_attr "conds" "clob,*")
3600 (set_attr "shift" "1")
3601 ;; predicable can't be set based on the variant, so left as no
3602 (set_attr "length" "8")]
3603 )
3604
3605 (define_insn_and_split "*thumb1_abssi2"
3606 [(set (match_operand:SI 0 "s_register_operand" "=l")
3607 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3608 (clobber (match_scratch:SI 2 "=&l"))]
3609 "TARGET_THUMB1"
3610 "#"
3611 "TARGET_THUMB1 && reload_completed"
3612 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3613 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3614 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3615 ""
3616 [(set_attr "length" "6")]
3617 )
3618
3619 (define_insn "*arm_neg_abssi2"
3620 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3621 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3622 (clobber (reg:CC CC_REGNUM))]
3623 "TARGET_ARM"
3624 "@
3625 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3626 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3627 [(set_attr "conds" "clob,*")
3628 (set_attr "shift" "1")
3629 ;; predicable can't be set based on the variant, so left as no
3630 (set_attr "length" "8")]
3631 )
3632
3633 (define_insn_and_split "*thumb1_neg_abssi2"
3634 [(set (match_operand:SI 0 "s_register_operand" "=l")
3635 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3636 (clobber (match_scratch:SI 2 "=&l"))]
3637 "TARGET_THUMB1"
3638 "#"
3639 "TARGET_THUMB1 && reload_completed"
3640 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3641 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3642 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3643 ""
3644 [(set_attr "length" "6")]
3645 )
3646
3647 (define_expand "abssf2"
3648 [(set (match_operand:SF 0 "s_register_operand" "")
3649 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3650 "TARGET_32BIT && TARGET_HARD_FLOAT"
3651 "")
3652
3653 (define_expand "absdf2"
3654 [(set (match_operand:DF 0 "s_register_operand" "")
3655 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3656 "TARGET_32BIT && TARGET_HARD_FLOAT"
3657 "")
3658
3659 (define_expand "sqrtsf2"
3660 [(set (match_operand:SF 0 "s_register_operand" "")
3661 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3662 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3663 "")
3664
3665 (define_expand "sqrtdf2"
3666 [(set (match_operand:DF 0 "s_register_operand" "")
3667 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3668 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3669 "")
3670
3671 (define_insn_and_split "one_cmpldi2"
3672 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3673 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3674 "TARGET_32BIT"
3675 "#"
3676 "TARGET_32BIT && reload_completed"
3677 [(set (match_dup 0) (not:SI (match_dup 1)))
3678 (set (match_dup 2) (not:SI (match_dup 3)))]
3679 "
3680 {
3681 operands[2] = gen_highpart (SImode, operands[0]);
3682 operands[0] = gen_lowpart (SImode, operands[0]);
3683 operands[3] = gen_highpart (SImode, operands[1]);
3684 operands[1] = gen_lowpart (SImode, operands[1]);
3685 }"
3686 [(set_attr "length" "8")
3687 (set_attr "predicable" "yes")]
3688 )
3689
3690 (define_expand "one_cmplsi2"
3691 [(set (match_operand:SI 0 "s_register_operand" "")
3692 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3693 "TARGET_EITHER"
3694 ""
3695 )
3696
3697 (define_insn "*arm_one_cmplsi2"
3698 [(set (match_operand:SI 0 "s_register_operand" "=r")
3699 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3700 "TARGET_32BIT"
3701 "mvn%?\\t%0, %1"
3702 [(set_attr "predicable" "yes")]
3703 )
3704
3705 (define_insn "*thumb1_one_cmplsi2"
3706 [(set (match_operand:SI 0 "register_operand" "=l")
3707 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3708 "TARGET_THUMB1"
3709 "mvn\\t%0, %1"
3710 [(set_attr "length" "2")]
3711 )
3712
3713 (define_insn "*notsi_compare0"
3714 [(set (reg:CC_NOOV CC_REGNUM)
3715 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3716 (const_int 0)))
3717 (set (match_operand:SI 0 "s_register_operand" "=r")
3718 (not:SI (match_dup 1)))]
3719 "TARGET_32BIT"
3720 "mvn%.\\t%0, %1"
3721 [(set_attr "conds" "set")]
3722 )
3723
3724 (define_insn "*notsi_compare0_scratch"
3725 [(set (reg:CC_NOOV CC_REGNUM)
3726 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3727 (const_int 0)))
3728 (clobber (match_scratch:SI 0 "=r"))]
3729 "TARGET_32BIT"
3730 "mvn%.\\t%0, %1"
3731 [(set_attr "conds" "set")]
3732 )
3733 \f
3734 ;; Fixed <--> Floating conversion insns
3735
3736 (define_expand "floatsihf2"
3737 [(set (match_operand:HF 0 "general_operand" "")
3738 (float:HF (match_operand:SI 1 "general_operand" "")))]
3739 "TARGET_EITHER"
3740 "
3741 {
3742 rtx op1 = gen_reg_rtx (SFmode);
3743 expand_float (op1, operands[1], 0);
3744 op1 = convert_to_mode (HFmode, op1, 0);
3745 emit_move_insn (operands[0], op1);
3746 DONE;
3747 }"
3748 )
3749
3750 (define_expand "floatdihf2"
3751 [(set (match_operand:HF 0 "general_operand" "")
3752 (float:HF (match_operand:DI 1 "general_operand" "")))]
3753 "TARGET_EITHER"
3754 "
3755 {
3756 rtx op1 = gen_reg_rtx (SFmode);
3757 expand_float (op1, operands[1], 0);
3758 op1 = convert_to_mode (HFmode, op1, 0);
3759 emit_move_insn (operands[0], op1);
3760 DONE;
3761 }"
3762 )
3763
3764 (define_expand "floatsisf2"
3765 [(set (match_operand:SF 0 "s_register_operand" "")
3766 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3767 "TARGET_32BIT && TARGET_HARD_FLOAT"
3768 "
3769 if (TARGET_MAVERICK)
3770 {
3771 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3772 DONE;
3773 }
3774 ")
3775
3776 (define_expand "floatsidf2"
3777 [(set (match_operand:DF 0 "s_register_operand" "")
3778 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3779 "TARGET_32BIT && TARGET_HARD_FLOAT"
3780 "
3781 if (TARGET_MAVERICK)
3782 {
3783 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3784 DONE;
3785 }
3786 ")
3787
3788 (define_expand "fix_trunchfsi2"
3789 [(set (match_operand:SI 0 "general_operand" "")
3790 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3791 "TARGET_EITHER"
3792 "
3793 {
3794 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3795 expand_fix (operands[0], op1, 0);
3796 DONE;
3797 }"
3798 )
3799
3800 (define_expand "fix_trunchfdi2"
3801 [(set (match_operand:DI 0 "general_operand" "")
3802 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3803 "TARGET_EITHER"
3804 "
3805 {
3806 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3807 expand_fix (operands[0], op1, 0);
3808 DONE;
3809 }"
3810 )
3811
3812 (define_expand "fix_truncsfsi2"
3813 [(set (match_operand:SI 0 "s_register_operand" "")
3814 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3815 "TARGET_32BIT && TARGET_HARD_FLOAT"
3816 "
3817 if (TARGET_MAVERICK)
3818 {
3819 if (!cirrus_fp_register (operands[0], SImode))
3820 operands[0] = force_reg (SImode, operands[0]);
3821 if (!cirrus_fp_register (operands[1], SFmode))
3822 operands[1] = force_reg (SFmode, operands[0]);
3823 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3824 DONE;
3825 }
3826 ")
3827
3828 (define_expand "fix_truncdfsi2"
3829 [(set (match_operand:SI 0 "s_register_operand" "")
3830 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3831 "TARGET_32BIT && TARGET_HARD_FLOAT"
3832 "
3833 if (TARGET_MAVERICK)
3834 {
3835 if (!cirrus_fp_register (operands[1], DFmode))
3836 operands[1] = force_reg (DFmode, operands[0]);
3837 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3838 DONE;
3839 }
3840 ")
3841
3842 ;; Truncation insns
3843
3844 (define_expand "truncdfsf2"
3845 [(set (match_operand:SF 0 "s_register_operand" "")
3846 (float_truncate:SF
3847 (match_operand:DF 1 "s_register_operand" "")))]
3848 "TARGET_32BIT && TARGET_HARD_FLOAT"
3849 ""
3850 )
3851
3852 /* DFmode -> HFmode conversions have to go through SFmode. */
3853 (define_expand "truncdfhf2"
3854 [(set (match_operand:HF 0 "general_operand" "")
3855 (float_truncate:HF
3856 (match_operand:DF 1 "general_operand" "")))]
3857 "TARGET_EITHER"
3858 "
3859 {
3860 rtx op1;
3861 op1 = convert_to_mode (SFmode, operands[1], 0);
3862 op1 = convert_to_mode (HFmode, op1, 0);
3863 emit_move_insn (operands[0], op1);
3864 DONE;
3865 }"
3866 )
3867 \f
3868 ;; Zero and sign extension instructions.
3869
3870 (define_expand "zero_extendsidi2"
3871 [(set (match_operand:DI 0 "s_register_operand" "")
3872 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3873 "TARGET_32BIT"
3874 ""
3875 )
3876
3877 (define_insn "*arm_zero_extendsidi2"
3878 [(set (match_operand:DI 0 "s_register_operand" "=r")
3879 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3880 "TARGET_ARM"
3881 "*
3882 if (REGNO (operands[1])
3883 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3884 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3885 return \"mov%?\\t%R0, #0\";
3886 "
3887 [(set_attr "length" "8")
3888 (set_attr "predicable" "yes")]
3889 )
3890
3891 (define_expand "zero_extendqidi2"
3892 [(set (match_operand:DI 0 "s_register_operand" "")
3893 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3894 "TARGET_32BIT"
3895 ""
3896 )
3897
3898 (define_insn "*arm_zero_extendqidi2"
3899 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3900 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3901 "TARGET_ARM"
3902 "@
3903 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3904 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3905 [(set_attr "length" "8")
3906 (set_attr "predicable" "yes")
3907 (set_attr "type" "*,load_byte")
3908 (set_attr "pool_range" "*,4092")
3909 (set_attr "neg_pool_range" "*,4084")]
3910 )
3911
3912 (define_expand "extendsidi2"
3913 [(set (match_operand:DI 0 "s_register_operand" "")
3914 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3915 "TARGET_32BIT"
3916 ""
3917 )
3918
3919 (define_insn "*arm_extendsidi2"
3920 [(set (match_operand:DI 0 "s_register_operand" "=r")
3921 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3922 "TARGET_ARM"
3923 "*
3924 if (REGNO (operands[1])
3925 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3926 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3927 return \"mov%?\\t%R0, %Q0, asr #31\";
3928 "
3929 [(set_attr "length" "8")
3930 (set_attr "shift" "1")
3931 (set_attr "predicable" "yes")]
3932 )
3933
3934 (define_expand "zero_extendhisi2"
3935 [(set (match_dup 2)
3936 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3937 (const_int 16)))
3938 (set (match_operand:SI 0 "s_register_operand" "")
3939 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3940 "TARGET_EITHER"
3941 "
3942 {
3943 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3944 {
3945 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3946 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3947 DONE;
3948 }
3949
3950 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3951 {
3952 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3953 DONE;
3954 }
3955
3956 if (!s_register_operand (operands[1], HImode))
3957 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3958
3959 if (arm_arch6)
3960 {
3961 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3962 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3963 DONE;
3964 }
3965
3966 operands[1] = gen_lowpart (SImode, operands[1]);
3967 operands[2] = gen_reg_rtx (SImode);
3968 }"
3969 )
3970
3971 (define_insn "*thumb1_zero_extendhisi2"
3972 [(set (match_operand:SI 0 "register_operand" "=l")
3973 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3974 "TARGET_THUMB1 && !arm_arch6"
3975 "*
3976 rtx mem = XEXP (operands[1], 0);
3977
3978 if (GET_CODE (mem) == CONST)
3979 mem = XEXP (mem, 0);
3980
3981 if (GET_CODE (mem) == LABEL_REF)
3982 return \"ldr\\t%0, %1\";
3983
3984 if (GET_CODE (mem) == PLUS)
3985 {
3986 rtx a = XEXP (mem, 0);
3987 rtx b = XEXP (mem, 1);
3988
3989 /* This can happen due to bugs in reload. */
3990 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3991 {
3992 rtx ops[2];
3993 ops[0] = operands[0];
3994 ops[1] = a;
3995
3996 output_asm_insn (\"mov %0, %1\", ops);
3997
3998 XEXP (mem, 0) = operands[0];
3999 }
4000
4001 else if ( GET_CODE (a) == LABEL_REF
4002 && GET_CODE (b) == CONST_INT)
4003 return \"ldr\\t%0, %1\";
4004 }
4005
4006 return \"ldrh\\t%0, %1\";
4007 "
4008 [(set_attr "length" "4")
4009 (set_attr "type" "load_byte")
4010 (set_attr "pool_range" "60")]
4011 )
4012
4013 (define_insn "*thumb1_zero_extendhisi2_v6"
4014 [(set (match_operand:SI 0 "register_operand" "=l,l")
4015 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4016 "TARGET_THUMB1 && arm_arch6"
4017 "*
4018 rtx mem;
4019
4020 if (which_alternative == 0)
4021 return \"uxth\\t%0, %1\";
4022
4023 mem = XEXP (operands[1], 0);
4024
4025 if (GET_CODE (mem) == CONST)
4026 mem = XEXP (mem, 0);
4027
4028 if (GET_CODE (mem) == LABEL_REF)
4029 return \"ldr\\t%0, %1\";
4030
4031 if (GET_CODE (mem) == PLUS)
4032 {
4033 rtx a = XEXP (mem, 0);
4034 rtx b = XEXP (mem, 1);
4035
4036 /* This can happen due to bugs in reload. */
4037 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4038 {
4039 rtx ops[2];
4040 ops[0] = operands[0];
4041 ops[1] = a;
4042
4043 output_asm_insn (\"mov %0, %1\", ops);
4044
4045 XEXP (mem, 0) = operands[0];
4046 }
4047
4048 else if ( GET_CODE (a) == LABEL_REF
4049 && GET_CODE (b) == CONST_INT)
4050 return \"ldr\\t%0, %1\";
4051 }
4052
4053 return \"ldrh\\t%0, %1\";
4054 "
4055 [(set_attr "length" "2,4")
4056 (set_attr "type" "alu_shift,load_byte")
4057 (set_attr "pool_range" "*,60")]
4058 )
4059
4060 (define_insn "*arm_zero_extendhisi2"
4061 [(set (match_operand:SI 0 "s_register_operand" "=r")
4062 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4063 "TARGET_ARM && arm_arch4 && !arm_arch6"
4064 "ldr%(h%)\\t%0, %1"
4065 [(set_attr "type" "load_byte")
4066 (set_attr "predicable" "yes")
4067 (set_attr "pool_range" "256")
4068 (set_attr "neg_pool_range" "244")]
4069 )
4070
4071 (define_insn "*arm_zero_extendhisi2_v6"
4072 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4073 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4074 "TARGET_ARM && arm_arch6"
4075 "@
4076 uxth%?\\t%0, %1
4077 ldr%(h%)\\t%0, %1"
4078 [(set_attr "type" "alu_shift,load_byte")
4079 (set_attr "predicable" "yes")
4080 (set_attr "pool_range" "*,256")
4081 (set_attr "neg_pool_range" "*,244")]
4082 )
4083
4084 (define_insn "*arm_zero_extendhisi2addsi"
4085 [(set (match_operand:SI 0 "s_register_operand" "=r")
4086 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4087 (match_operand:SI 2 "s_register_operand" "r")))]
4088 "TARGET_INT_SIMD"
4089 "uxtah%?\\t%0, %2, %1"
4090 [(set_attr "type" "alu_shift")
4091 (set_attr "predicable" "yes")]
4092 )
4093
4094 (define_expand "zero_extendqisi2"
4095 [(set (match_operand:SI 0 "s_register_operand" "")
4096 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4097 "TARGET_EITHER"
4098 "
4099 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4100 {
4101 if (TARGET_ARM)
4102 {
4103 emit_insn (gen_andsi3 (operands[0],
4104 gen_lowpart (SImode, operands[1]),
4105 GEN_INT (255)));
4106 }
4107 else /* TARGET_THUMB */
4108 {
4109 rtx temp = gen_reg_rtx (SImode);
4110 rtx ops[3];
4111
4112 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4113 operands[1] = gen_lowpart (SImode, operands[1]);
4114
4115 ops[0] = temp;
4116 ops[1] = operands[1];
4117 ops[2] = GEN_INT (24);
4118
4119 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4120 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4121
4122 ops[0] = operands[0];
4123 ops[1] = temp;
4124 ops[2] = GEN_INT (24);
4125
4126 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4127 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4128 }
4129 DONE;
4130 }
4131 "
4132 )
4133
4134 (define_insn "*thumb1_zero_extendqisi2"
4135 [(set (match_operand:SI 0 "register_operand" "=l")
4136 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4137 "TARGET_THUMB1 && !arm_arch6"
4138 "ldrb\\t%0, %1"
4139 [(set_attr "length" "2")
4140 (set_attr "type" "load_byte")
4141 (set_attr "pool_range" "32")]
4142 )
4143
4144 (define_insn "*thumb1_zero_extendqisi2_v6"
4145 [(set (match_operand:SI 0 "register_operand" "=l,l")
4146 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4147 "TARGET_THUMB1 && arm_arch6"
4148 "@
4149 uxtb\\t%0, %1
4150 ldrb\\t%0, %1"
4151 [(set_attr "length" "2,2")
4152 (set_attr "type" "alu_shift,load_byte")
4153 (set_attr "pool_range" "*,32")]
4154 )
4155
4156 (define_insn "*arm_zero_extendqisi2"
4157 [(set (match_operand:SI 0 "s_register_operand" "=r")
4158 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4159 "TARGET_ARM && !arm_arch6"
4160 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4161 [(set_attr "type" "load_byte")
4162 (set_attr "predicable" "yes")
4163 (set_attr "pool_range" "4096")
4164 (set_attr "neg_pool_range" "4084")]
4165 )
4166
4167 (define_insn "*arm_zero_extendqisi2_v6"
4168 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4169 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4170 "TARGET_ARM && arm_arch6"
4171 "@
4172 uxtb%(%)\\t%0, %1
4173 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4174 [(set_attr "type" "alu_shift,load_byte")
4175 (set_attr "predicable" "yes")
4176 (set_attr "pool_range" "*,4096")
4177 (set_attr "neg_pool_range" "*,4084")]
4178 )
4179
4180 (define_insn "*arm_zero_extendqisi2addsi"
4181 [(set (match_operand:SI 0 "s_register_operand" "=r")
4182 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4183 (match_operand:SI 2 "s_register_operand" "r")))]
4184 "TARGET_INT_SIMD"
4185 "uxtab%?\\t%0, %2, %1"
4186 [(set_attr "predicable" "yes")
4187 (set_attr "insn" "xtab")
4188 (set_attr "type" "alu_shift")]
4189 )
4190
4191 (define_split
4192 [(set (match_operand:SI 0 "s_register_operand" "")
4193 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4194 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4195 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4196 [(set (match_dup 2) (match_dup 1))
4197 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4198 ""
4199 )
4200
4201 (define_split
4202 [(set (match_operand:SI 0 "s_register_operand" "")
4203 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4204 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4205 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4206 [(set (match_dup 2) (match_dup 1))
4207 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4208 ""
4209 )
4210
4211 (define_code_iterator ior_xor [ior xor])
4212
4213 (define_split
4214 [(set (match_operand:SI 0 "s_register_operand" "")
4215 (ior_xor:SI (and:SI (ashift:SI
4216 (match_operand:SI 1 "s_register_operand" "")
4217 (match_operand:SI 2 "const_int_operand" ""))
4218 (match_operand:SI 3 "const_int_operand" ""))
4219 (zero_extend:SI
4220 (match_operator 5 "subreg_lowpart_operator"
4221 [(match_operand:SI 4 "s_register_operand" "")]))))]
4222 "TARGET_32BIT
4223 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4224 == (GET_MODE_MASK (GET_MODE (operands[5]))
4225 & (GET_MODE_MASK (GET_MODE (operands[5]))
4226 << (INTVAL (operands[2])))))"
4227 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4228 (match_dup 4)))
4229 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4230 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4231 )
4232
4233 (define_insn "*compareqi_eq0"
4234 [(set (reg:CC_Z CC_REGNUM)
4235 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4236 (const_int 0)))]
4237 "TARGET_32BIT"
4238 "tst\\t%0, #255"
4239 [(set_attr "conds" "set")]
4240 )
4241
4242 (define_expand "extendhisi2"
4243 [(set (match_dup 2)
4244 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4245 (const_int 16)))
4246 (set (match_operand:SI 0 "s_register_operand" "")
4247 (ashiftrt:SI (match_dup 2)
4248 (const_int 16)))]
4249 "TARGET_EITHER"
4250 "
4251 {
4252 if (GET_CODE (operands[1]) == MEM)
4253 {
4254 if (TARGET_THUMB1)
4255 {
4256 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4257 DONE;
4258 }
4259 else if (arm_arch4)
4260 {
4261 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4262 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4263 DONE;
4264 }
4265 }
4266
4267 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4268 {
4269 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4270 DONE;
4271 }
4272
4273 if (!s_register_operand (operands[1], HImode))
4274 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4275
4276 if (arm_arch6)
4277 {
4278 if (TARGET_THUMB1)
4279 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4280 else
4281 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4282 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4283
4284 DONE;
4285 }
4286
4287 operands[1] = gen_lowpart (SImode, operands[1]);
4288 operands[2] = gen_reg_rtx (SImode);
4289 }"
4290 )
4291
4292 (define_insn "thumb1_extendhisi2"
4293 [(set (match_operand:SI 0 "register_operand" "=l")
4294 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4295 (clobber (match_scratch:SI 2 "=&l"))]
4296 "TARGET_THUMB1 && !arm_arch6"
4297 "*
4298 {
4299 rtx ops[4];
4300 rtx mem = XEXP (operands[1], 0);
4301
4302 /* This code used to try to use 'V', and fix the address only if it was
4303 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4304 range of QImode offsets, and offsettable_address_p does a QImode
4305 address check. */
4306
4307 if (GET_CODE (mem) == CONST)
4308 mem = XEXP (mem, 0);
4309
4310 if (GET_CODE (mem) == LABEL_REF)
4311 return \"ldr\\t%0, %1\";
4312
4313 if (GET_CODE (mem) == PLUS)
4314 {
4315 rtx a = XEXP (mem, 0);
4316 rtx b = XEXP (mem, 1);
4317
4318 if (GET_CODE (a) == LABEL_REF
4319 && GET_CODE (b) == CONST_INT)
4320 return \"ldr\\t%0, %1\";
4321
4322 if (GET_CODE (b) == REG)
4323 return \"ldrsh\\t%0, %1\";
4324
4325 ops[1] = a;
4326 ops[2] = b;
4327 }
4328 else
4329 {
4330 ops[1] = mem;
4331 ops[2] = const0_rtx;
4332 }
4333
4334 gcc_assert (GET_CODE (ops[1]) == REG);
4335
4336 ops[0] = operands[0];
4337 ops[3] = operands[2];
4338 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4339 return \"\";
4340 }"
4341 [(set_attr "length" "4")
4342 (set_attr "type" "load_byte")
4343 (set_attr "pool_range" "1020")]
4344 )
4345
4346 ;; We used to have an early-clobber on the scratch register here.
4347 ;; However, there's a bug somewhere in reload which means that this
4348 ;; can be partially ignored during spill allocation if the memory
4349 ;; address also needs reloading; this causes us to die later on when
4350 ;; we try to verify the operands. Fortunately, we don't really need
4351 ;; the early-clobber: we can always use operand 0 if operand 2
4352 ;; overlaps the address.
4353 (define_insn "*thumb1_extendhisi2_insn_v6"
4354 [(set (match_operand:SI 0 "register_operand" "=l,l")
4355 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4356 (clobber (match_scratch:SI 2 "=X,l"))]
4357 "TARGET_THUMB1 && arm_arch6"
4358 "*
4359 {
4360 rtx ops[4];
4361 rtx mem;
4362
4363 if (which_alternative == 0)
4364 return \"sxth\\t%0, %1\";
4365
4366 mem = XEXP (operands[1], 0);
4367
4368 /* This code used to try to use 'V', and fix the address only if it was
4369 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4370 range of QImode offsets, and offsettable_address_p does a QImode
4371 address check. */
4372
4373 if (GET_CODE (mem) == CONST)
4374 mem = XEXP (mem, 0);
4375
4376 if (GET_CODE (mem) == LABEL_REF)
4377 return \"ldr\\t%0, %1\";
4378
4379 if (GET_CODE (mem) == PLUS)
4380 {
4381 rtx a = XEXP (mem, 0);
4382 rtx b = XEXP (mem, 1);
4383
4384 if (GET_CODE (a) == LABEL_REF
4385 && GET_CODE (b) == CONST_INT)
4386 return \"ldr\\t%0, %1\";
4387
4388 if (GET_CODE (b) == REG)
4389 return \"ldrsh\\t%0, %1\";
4390
4391 ops[1] = a;
4392 ops[2] = b;
4393 }
4394 else
4395 {
4396 ops[1] = mem;
4397 ops[2] = const0_rtx;
4398 }
4399
4400 gcc_assert (GET_CODE (ops[1]) == REG);
4401
4402 ops[0] = operands[0];
4403 if (reg_mentioned_p (operands[2], ops[1]))
4404 ops[3] = ops[0];
4405 else
4406 ops[3] = operands[2];
4407 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4408 return \"\";
4409 }"
4410 [(set_attr "length" "2,4")
4411 (set_attr "type" "alu_shift,load_byte")
4412 (set_attr "pool_range" "*,1020")]
4413 )
4414
4415 ;; This pattern will only be used when ldsh is not available
4416 (define_expand "extendhisi2_mem"
4417 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4418 (set (match_dup 3)
4419 (zero_extend:SI (match_dup 7)))
4420 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4421 (set (match_operand:SI 0 "" "")
4422 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4423 "TARGET_ARM"
4424 "
4425 {
4426 rtx mem1, mem2;
4427 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4428
4429 mem1 = change_address (operands[1], QImode, addr);
4430 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4431 operands[0] = gen_lowpart (SImode, operands[0]);
4432 operands[1] = mem1;
4433 operands[2] = gen_reg_rtx (SImode);
4434 operands[3] = gen_reg_rtx (SImode);
4435 operands[6] = gen_reg_rtx (SImode);
4436 operands[7] = mem2;
4437
4438 if (BYTES_BIG_ENDIAN)
4439 {
4440 operands[4] = operands[2];
4441 operands[5] = operands[3];
4442 }
4443 else
4444 {
4445 operands[4] = operands[3];
4446 operands[5] = operands[2];
4447 }
4448 }"
4449 )
4450
4451 (define_insn "*arm_extendhisi2"
4452 [(set (match_operand:SI 0 "s_register_operand" "=r")
4453 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4454 "TARGET_ARM && arm_arch4 && !arm_arch6"
4455 "ldr%(sh%)\\t%0, %1"
4456 [(set_attr "type" "load_byte")
4457 (set_attr "predicable" "yes")
4458 (set_attr "pool_range" "256")
4459 (set_attr "neg_pool_range" "244")]
4460 )
4461
4462 ;; ??? Check Thumb-2 pool range
4463 (define_insn "*arm_extendhisi2_v6"
4464 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4465 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4466 "TARGET_32BIT && arm_arch6"
4467 "@
4468 sxth%?\\t%0, %1
4469 ldr%(sh%)\\t%0, %1"
4470 [(set_attr "type" "alu_shift,load_byte")
4471 (set_attr "predicable" "yes")
4472 (set_attr "pool_range" "*,256")
4473 (set_attr "neg_pool_range" "*,244")]
4474 )
4475
4476 (define_insn "*arm_extendhisi2addsi"
4477 [(set (match_operand:SI 0 "s_register_operand" "=r")
4478 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4479 (match_operand:SI 2 "s_register_operand" "r")))]
4480 "TARGET_INT_SIMD"
4481 "sxtah%?\\t%0, %2, %1"
4482 )
4483
4484 (define_expand "extendqihi2"
4485 [(set (match_dup 2)
4486 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4487 (const_int 24)))
4488 (set (match_operand:HI 0 "s_register_operand" "")
4489 (ashiftrt:SI (match_dup 2)
4490 (const_int 24)))]
4491 "TARGET_ARM"
4492 "
4493 {
4494 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4495 {
4496 emit_insn (gen_rtx_SET (VOIDmode,
4497 operands[0],
4498 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4499 DONE;
4500 }
4501 if (!s_register_operand (operands[1], QImode))
4502 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4503 operands[0] = gen_lowpart (SImode, operands[0]);
4504 operands[1] = gen_lowpart (SImode, operands[1]);
4505 operands[2] = gen_reg_rtx (SImode);
4506 }"
4507 )
4508
4509 (define_insn "*arm_extendqihi_insn"
4510 [(set (match_operand:HI 0 "s_register_operand" "=r")
4511 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4512 "TARGET_ARM && arm_arch4"
4513 "ldr%(sb%)\\t%0, %1"
4514 [(set_attr "type" "load_byte")
4515 (set_attr "predicable" "yes")
4516 (set_attr "pool_range" "256")
4517 (set_attr "neg_pool_range" "244")]
4518 )
4519
4520 (define_expand "extendqisi2"
4521 [(set (match_dup 2)
4522 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4523 (const_int 24)))
4524 (set (match_operand:SI 0 "s_register_operand" "")
4525 (ashiftrt:SI (match_dup 2)
4526 (const_int 24)))]
4527 "TARGET_EITHER"
4528 "
4529 {
4530 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4531 {
4532 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4533 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4534 DONE;
4535 }
4536
4537 if (!s_register_operand (operands[1], QImode))
4538 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4539
4540 if (arm_arch6)
4541 {
4542 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4543 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4544 DONE;
4545 }
4546
4547 operands[1] = gen_lowpart (SImode, operands[1]);
4548 operands[2] = gen_reg_rtx (SImode);
4549 }"
4550 )
4551
4552 (define_insn "*arm_extendqisi"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r")
4554 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4555 "TARGET_ARM && arm_arch4 && !arm_arch6"
4556 "ldr%(sb%)\\t%0, %1"
4557 [(set_attr "type" "load_byte")
4558 (set_attr "predicable" "yes")
4559 (set_attr "pool_range" "256")
4560 (set_attr "neg_pool_range" "244")]
4561 )
4562
4563 (define_insn "*arm_extendqisi_v6"
4564 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4565 (sign_extend:SI
4566 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4567 "TARGET_ARM && arm_arch6"
4568 "@
4569 sxtb%?\\t%0, %1
4570 ldr%(sb%)\\t%0, %1"
4571 [(set_attr "type" "alu_shift,load_byte")
4572 (set_attr "predicable" "yes")
4573 (set_attr "pool_range" "*,256")
4574 (set_attr "neg_pool_range" "*,244")]
4575 )
4576
4577 (define_insn "*arm_extendqisi2addsi"
4578 [(set (match_operand:SI 0 "s_register_operand" "=r")
4579 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4580 (match_operand:SI 2 "s_register_operand" "r")))]
4581 "TARGET_INT_SIMD"
4582 "sxtab%?\\t%0, %2, %1"
4583 [(set_attr "type" "alu_shift")
4584 (set_attr "insn" "xtab")
4585 (set_attr "predicable" "yes")]
4586 )
4587
4588 (define_insn "*thumb1_extendqisi2"
4589 [(set (match_operand:SI 0 "register_operand" "=l,l")
4590 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4591 "TARGET_THUMB1 && !arm_arch6"
4592 "*
4593 {
4594 rtx ops[3];
4595 rtx mem = XEXP (operands[1], 0);
4596
4597 if (GET_CODE (mem) == CONST)
4598 mem = XEXP (mem, 0);
4599
4600 if (GET_CODE (mem) == LABEL_REF)
4601 return \"ldr\\t%0, %1\";
4602
4603 if (GET_CODE (mem) == PLUS
4604 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4605 return \"ldr\\t%0, %1\";
4606
4607 if (which_alternative == 0)
4608 return \"ldrsb\\t%0, %1\";
4609
4610 ops[0] = operands[0];
4611
4612 if (GET_CODE (mem) == PLUS)
4613 {
4614 rtx a = XEXP (mem, 0);
4615 rtx b = XEXP (mem, 1);
4616
4617 ops[1] = a;
4618 ops[2] = b;
4619
4620 if (GET_CODE (a) == REG)
4621 {
4622 if (GET_CODE (b) == REG)
4623 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4624 else if (REGNO (a) == REGNO (ops[0]))
4625 {
4626 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4627 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4628 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4629 }
4630 else
4631 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4632 }
4633 else
4634 {
4635 gcc_assert (GET_CODE (b) == REG);
4636 if (REGNO (b) == REGNO (ops[0]))
4637 {
4638 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4639 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4640 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4641 }
4642 else
4643 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4644 }
4645 }
4646 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4647 {
4648 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4649 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4650 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4651 }
4652 else
4653 {
4654 ops[1] = mem;
4655 ops[2] = const0_rtx;
4656
4657 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4658 }
4659 return \"\";
4660 }"
4661 [(set_attr "length" "2,6")
4662 (set_attr "type" "load_byte,load_byte")
4663 (set_attr "pool_range" "32,32")]
4664 )
4665
4666 (define_insn "*thumb1_extendqisi2_v6"
4667 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4668 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4669 "TARGET_THUMB1 && arm_arch6"
4670 "*
4671 {
4672 rtx ops[3];
4673 rtx mem;
4674
4675 if (which_alternative == 0)
4676 return \"sxtb\\t%0, %1\";
4677
4678 mem = XEXP (operands[1], 0);
4679
4680 if (GET_CODE (mem) == CONST)
4681 mem = XEXP (mem, 0);
4682
4683 if (GET_CODE (mem) == LABEL_REF)
4684 return \"ldr\\t%0, %1\";
4685
4686 if (GET_CODE (mem) == PLUS
4687 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4688 return \"ldr\\t%0, %1\";
4689
4690 if (which_alternative == 0)
4691 return \"ldrsb\\t%0, %1\";
4692
4693 ops[0] = operands[0];
4694
4695 if (GET_CODE (mem) == PLUS)
4696 {
4697 rtx a = XEXP (mem, 0);
4698 rtx b = XEXP (mem, 1);
4699
4700 ops[1] = a;
4701 ops[2] = b;
4702
4703 if (GET_CODE (a) == REG)
4704 {
4705 if (GET_CODE (b) == REG)
4706 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4707 else if (REGNO (a) == REGNO (ops[0]))
4708 {
4709 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4710 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4711 }
4712 else
4713 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4714 }
4715 else
4716 {
4717 gcc_assert (GET_CODE (b) == REG);
4718 if (REGNO (b) == REGNO (ops[0]))
4719 {
4720 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4721 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4722 }
4723 else
4724 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4725 }
4726 }
4727 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4728 {
4729 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4730 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4731 }
4732 else
4733 {
4734 ops[1] = mem;
4735 ops[2] = const0_rtx;
4736
4737 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4738 }
4739 return \"\";
4740 }"
4741 [(set_attr "length" "2,2,4")
4742 (set_attr "type" "alu_shift,load_byte,load_byte")
4743 (set_attr "pool_range" "*,32,32")]
4744 )
4745
4746 (define_expand "extendsfdf2"
4747 [(set (match_operand:DF 0 "s_register_operand" "")
4748 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4749 "TARGET_32BIT && TARGET_HARD_FLOAT"
4750 ""
4751 )
4752
4753 /* HFmode -> DFmode conversions have to go through SFmode. */
4754 (define_expand "extendhfdf2"
4755 [(set (match_operand:DF 0 "general_operand" "")
4756 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4757 "TARGET_EITHER"
4758 "
4759 {
4760 rtx op1;
4761 op1 = convert_to_mode (SFmode, operands[1], 0);
4762 op1 = convert_to_mode (DFmode, op1, 0);
4763 emit_insn (gen_movdf (operands[0], op1));
4764 DONE;
4765 }"
4766 )
4767 \f
4768 ;; Move insns (including loads and stores)
4769
4770 ;; XXX Just some ideas about movti.
4771 ;; I don't think these are a good idea on the arm, there just aren't enough
4772 ;; registers
4773 ;;(define_expand "loadti"
4774 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4775 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4776 ;; "" "")
4777
4778 ;;(define_expand "storeti"
4779 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4780 ;; (match_operand:TI 1 "s_register_operand" ""))]
4781 ;; "" "")
4782
4783 ;;(define_expand "movti"
4784 ;; [(set (match_operand:TI 0 "general_operand" "")
4785 ;; (match_operand:TI 1 "general_operand" ""))]
4786 ;; ""
4787 ;; "
4788 ;;{
4789 ;; rtx insn;
4790 ;;
4791 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4792 ;; operands[1] = copy_to_reg (operands[1]);
4793 ;; if (GET_CODE (operands[0]) == MEM)
4794 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4795 ;; else if (GET_CODE (operands[1]) == MEM)
4796 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4797 ;; else
4798 ;; FAIL;
4799 ;;
4800 ;; emit_insn (insn);
4801 ;; DONE;
4802 ;;}")
4803
4804 ;; Recognize garbage generated above.
4805
4806 ;;(define_insn ""
4807 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4808 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4809 ;; ""
4810 ;; "*
4811 ;; {
4812 ;; register mem = (which_alternative < 3);
4813 ;; register const char *template;
4814 ;;
4815 ;; operands[mem] = XEXP (operands[mem], 0);
4816 ;; switch (which_alternative)
4817 ;; {
4818 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4819 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4820 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4821 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4822 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4823 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4824 ;; }
4825 ;; output_asm_insn (template, operands);
4826 ;; return \"\";
4827 ;; }")
4828
4829 (define_expand "movdi"
4830 [(set (match_operand:DI 0 "general_operand" "")
4831 (match_operand:DI 1 "general_operand" ""))]
4832 "TARGET_EITHER"
4833 "
4834 if (can_create_pseudo_p ())
4835 {
4836 if (GET_CODE (operands[0]) != REG)
4837 operands[1] = force_reg (DImode, operands[1]);
4838 }
4839 "
4840 )
4841
4842 (define_insn "*arm_movdi"
4843 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4844 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4845 "TARGET_ARM
4846 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4847 && !TARGET_IWMMXT
4848 && ( register_operand (operands[0], DImode)
4849 || register_operand (operands[1], DImode))"
4850 "*
4851 switch (which_alternative)
4852 {
4853 case 0:
4854 case 1:
4855 case 2:
4856 return \"#\";
4857 default:
4858 return output_move_double (operands);
4859 }
4860 "
4861 [(set_attr "length" "8,12,16,8,8")
4862 (set_attr "type" "*,*,*,load2,store2")
4863 (set_attr "pool_range" "*,*,*,1020,*")
4864 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4865 )
4866
4867 (define_split
4868 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4869 (match_operand:ANY64 1 "const_double_operand" ""))]
4870 "TARGET_32BIT
4871 && reload_completed
4872 && (arm_const_double_inline_cost (operands[1])
4873 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4874 [(const_int 0)]
4875 "
4876 arm_split_constant (SET, SImode, curr_insn,
4877 INTVAL (gen_lowpart (SImode, operands[1])),
4878 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4879 arm_split_constant (SET, SImode, curr_insn,
4880 INTVAL (gen_highpart_mode (SImode,
4881 GET_MODE (operands[0]),
4882 operands[1])),
4883 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4884 DONE;
4885 "
4886 )
4887
4888 ; If optimizing for size, or if we have load delay slots, then
4889 ; we want to split the constant into two separate operations.
4890 ; In both cases this may split a trivial part into a single data op
4891 ; leaving a single complex constant to load. We can also get longer
4892 ; offsets in a LDR which means we get better chances of sharing the pool
4893 ; entries. Finally, we can normally do a better job of scheduling
4894 ; LDR instructions than we can with LDM.
4895 ; This pattern will only match if the one above did not.
4896 (define_split
4897 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4898 (match_operand:ANY64 1 "const_double_operand" ""))]
4899 "TARGET_ARM && reload_completed
4900 && arm_const_double_by_parts (operands[1])"
4901 [(set (match_dup 0) (match_dup 1))
4902 (set (match_dup 2) (match_dup 3))]
4903 "
4904 operands[2] = gen_highpart (SImode, operands[0]);
4905 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4906 operands[1]);
4907 operands[0] = gen_lowpart (SImode, operands[0]);
4908 operands[1] = gen_lowpart (SImode, operands[1]);
4909 "
4910 )
4911
4912 (define_split
4913 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4914 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4915 "TARGET_EITHER && reload_completed"
4916 [(set (match_dup 0) (match_dup 1))
4917 (set (match_dup 2) (match_dup 3))]
4918 "
4919 operands[2] = gen_highpart (SImode, operands[0]);
4920 operands[3] = gen_highpart (SImode, operands[1]);
4921 operands[0] = gen_lowpart (SImode, operands[0]);
4922 operands[1] = gen_lowpart (SImode, operands[1]);
4923
4924 /* Handle a partial overlap. */
4925 if (rtx_equal_p (operands[0], operands[3]))
4926 {
4927 rtx tmp0 = operands[0];
4928 rtx tmp1 = operands[1];
4929
4930 operands[0] = operands[2];
4931 operands[1] = operands[3];
4932 operands[2] = tmp0;
4933 operands[3] = tmp1;
4934 }
4935 "
4936 )
4937
4938 ;; We can't actually do base+index doubleword loads if the index and
4939 ;; destination overlap. Split here so that we at least have chance to
4940 ;; schedule.
4941 (define_split
4942 [(set (match_operand:DI 0 "s_register_operand" "")
4943 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4944 (match_operand:SI 2 "s_register_operand" ""))))]
4945 "TARGET_LDRD
4946 && reg_overlap_mentioned_p (operands[0], operands[1])
4947 && reg_overlap_mentioned_p (operands[0], operands[2])"
4948 [(set (match_dup 4)
4949 (plus:SI (match_dup 1)
4950 (match_dup 2)))
4951 (set (match_dup 0)
4952 (mem:DI (match_dup 4)))]
4953 "
4954 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4955 "
4956 )
4957
4958 ;;; ??? This should have alternatives for constants.
4959 ;;; ??? This was originally identical to the movdf_insn pattern.
4960 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4961 ;;; thumb_reorg with a memory reference.
4962 (define_insn "*thumb1_movdi_insn"
4963 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4964 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4965 "TARGET_THUMB1
4966 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4967 && ( register_operand (operands[0], DImode)
4968 || register_operand (operands[1], DImode))"
4969 "*
4970 {
4971 switch (which_alternative)
4972 {
4973 default:
4974 case 0:
4975 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4976 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4977 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4978 case 1:
4979 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4980 case 2:
4981 operands[1] = GEN_INT (- INTVAL (operands[1]));
4982 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4983 case 3:
4984 return \"ldmia\\t%1, {%0, %H0}\";
4985 case 4:
4986 return \"stmia\\t%0, {%1, %H1}\";
4987 case 5:
4988 return thumb_load_double_from_address (operands);
4989 case 6:
4990 operands[2] = gen_rtx_MEM (SImode,
4991 plus_constant (XEXP (operands[0], 0), 4));
4992 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4993 return \"\";
4994 case 7:
4995 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4996 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4997 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4998 }
4999 }"
5000 [(set_attr "length" "4,4,6,2,2,6,4,4")
5001 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5002 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5003 )
5004
5005 (define_expand "movsi"
5006 [(set (match_operand:SI 0 "general_operand" "")
5007 (match_operand:SI 1 "general_operand" ""))]
5008 "TARGET_EITHER"
5009 "
5010 {
5011 rtx base, offset, tmp;
5012
5013 if (TARGET_32BIT)
5014 {
5015 /* Everything except mem = const or mem = mem can be done easily. */
5016 if (GET_CODE (operands[0]) == MEM)
5017 operands[1] = force_reg (SImode, operands[1]);
5018 if (arm_general_register_operand (operands[0], SImode)
5019 && GET_CODE (operands[1]) == CONST_INT
5020 && !(const_ok_for_arm (INTVAL (operands[1]))
5021 || const_ok_for_arm (~INTVAL (operands[1]))))
5022 {
5023 arm_split_constant (SET, SImode, NULL_RTX,
5024 INTVAL (operands[1]), operands[0], NULL_RTX,
5025 optimize && can_create_pseudo_p ());
5026 DONE;
5027 }
5028
5029 if (TARGET_USE_MOVT && !target_word_relocations
5030 && GET_CODE (operands[1]) == SYMBOL_REF
5031 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5032 {
5033 arm_emit_movpair (operands[0], operands[1]);
5034 DONE;
5035 }
5036 }
5037 else /* TARGET_THUMB1... */
5038 {
5039 if (can_create_pseudo_p ())
5040 {
5041 if (GET_CODE (operands[0]) != REG)
5042 operands[1] = force_reg (SImode, operands[1]);
5043 }
5044 }
5045
5046 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5047 {
5048 split_const (operands[1], &base, &offset);
5049 if (GET_CODE (base) == SYMBOL_REF
5050 && !offset_within_block_p (base, INTVAL (offset)))
5051 {
5052 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5053 emit_move_insn (tmp, base);
5054 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5055 DONE;
5056 }
5057 }
5058
5059 /* Recognize the case where operand[1] is a reference to thread-local
5060 data and load its address to a register. */
5061 if (arm_tls_referenced_p (operands[1]))
5062 {
5063 rtx tmp = operands[1];
5064 rtx addend = NULL;
5065
5066 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5067 {
5068 addend = XEXP (XEXP (tmp, 0), 1);
5069 tmp = XEXP (XEXP (tmp, 0), 0);
5070 }
5071
5072 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5073 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5074
5075 tmp = legitimize_tls_address (tmp,
5076 !can_create_pseudo_p () ? operands[0] : 0);
5077 if (addend)
5078 {
5079 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5080 tmp = force_operand (tmp, operands[0]);
5081 }
5082 operands[1] = tmp;
5083 }
5084 else if (flag_pic
5085 && (CONSTANT_P (operands[1])
5086 || symbol_mentioned_p (operands[1])
5087 || label_mentioned_p (operands[1])))
5088 operands[1] = legitimize_pic_address (operands[1], SImode,
5089 (!can_create_pseudo_p ()
5090 ? operands[0]
5091 : 0));
5092 }
5093 "
5094 )
5095
5096 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5097 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5098 ;; so this does not matter.
5099 (define_insn "*arm_movt"
5100 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5101 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5102 (match_operand:SI 2 "general_operand" "i")))]
5103 "TARGET_32BIT"
5104 "movt%?\t%0, #:upper16:%c2"
5105 [(set_attr "predicable" "yes")
5106 (set_attr "length" "4")]
5107 )
5108
5109 (define_insn "*arm_movsi_insn"
5110 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5111 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5112 "TARGET_ARM && ! TARGET_IWMMXT
5113 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5114 && ( register_operand (operands[0], SImode)
5115 || register_operand (operands[1], SImode))"
5116 "@
5117 mov%?\\t%0, %1
5118 mov%?\\t%0, %1
5119 mvn%?\\t%0, #%B1
5120 movw%?\\t%0, %1
5121 ldr%?\\t%0, %1
5122 str%?\\t%1, %0"
5123 [(set_attr "type" "*,*,*,*,load1,store1")
5124 (set_attr "predicable" "yes")
5125 (set_attr "pool_range" "*,*,*,*,4096,*")
5126 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5127 )
5128
5129 (define_split
5130 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5131 (match_operand:SI 1 "const_int_operand" ""))]
5132 "TARGET_32BIT
5133 && (!(const_ok_for_arm (INTVAL (operands[1]))
5134 || const_ok_for_arm (~INTVAL (operands[1]))))"
5135 [(clobber (const_int 0))]
5136 "
5137 arm_split_constant (SET, SImode, NULL_RTX,
5138 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5139 DONE;
5140 "
5141 )
5142
5143 (define_insn "*thumb1_movsi_insn"
5144 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5145 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5146 "TARGET_THUMB1
5147 && ( register_operand (operands[0], SImode)
5148 || register_operand (operands[1], SImode))"
5149 "@
5150 mov %0, %1
5151 mov %0, %1
5152 #
5153 #
5154 ldmia\\t%1, {%0}
5155 stmia\\t%0, {%1}
5156 ldr\\t%0, %1
5157 str\\t%1, %0
5158 mov\\t%0, %1"
5159 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5160 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5161 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5162 )
5163
5164 (define_split
5165 [(set (match_operand:SI 0 "register_operand" "")
5166 (match_operand:SI 1 "const_int_operand" ""))]
5167 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5168 [(set (match_dup 0) (match_dup 1))
5169 (set (match_dup 0) (neg:SI (match_dup 0)))]
5170 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5171 )
5172
5173 (define_split
5174 [(set (match_operand:SI 0 "register_operand" "")
5175 (match_operand:SI 1 "const_int_operand" ""))]
5176 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5177 [(set (match_dup 0) (match_dup 1))
5178 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5179 "
5180 {
5181 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5182 unsigned HOST_WIDE_INT mask = 0xff;
5183 int i;
5184
5185 for (i = 0; i < 25; i++)
5186 if ((val & (mask << i)) == val)
5187 break;
5188
5189 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5190 if (i == 0)
5191 FAIL;
5192
5193 operands[1] = GEN_INT (val >> i);
5194 operands[2] = GEN_INT (i);
5195 }"
5196 )
5197
5198 ;; When generating pic, we need to load the symbol offset into a register.
5199 ;; So that the optimizer does not confuse this with a normal symbol load
5200 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5201 ;; since that is the only type of relocation we can use.
5202
5203 ;; The rather odd constraints on the following are to force reload to leave
5204 ;; the insn alone, and to force the minipool generation pass to then move
5205 ;; the GOT symbol to memory.
5206
5207 (define_insn "pic_load_addr_arm"
5208 [(set (match_operand:SI 0 "s_register_operand" "=r")
5209 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5210 "TARGET_ARM && flag_pic"
5211 "ldr%?\\t%0, %1"
5212 [(set_attr "type" "load1")
5213 (set (attr "pool_range") (const_int 4096))
5214 (set (attr "neg_pool_range") (const_int 4084))]
5215 )
5216
5217 (define_insn "pic_load_addr_thumb1"
5218 [(set (match_operand:SI 0 "s_register_operand" "=l")
5219 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5220 "TARGET_THUMB1 && flag_pic"
5221 "ldr\\t%0, %1"
5222 [(set_attr "type" "load1")
5223 (set (attr "pool_range") (const_int 1024))]
5224 )
5225
5226 (define_insn "pic_add_dot_plus_four"
5227 [(set (match_operand:SI 0 "register_operand" "=r")
5228 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5229 (const_int 4)
5230 (match_operand 2 "" "")]
5231 UNSPEC_PIC_BASE))]
5232 "TARGET_THUMB1"
5233 "*
5234 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5235 INTVAL (operands[2]));
5236 return \"add\\t%0, %|pc\";
5237 "
5238 [(set_attr "length" "2")]
5239 )
5240
5241 (define_insn "pic_add_dot_plus_eight"
5242 [(set (match_operand:SI 0 "register_operand" "=r")
5243 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5244 (const_int 8)
5245 (match_operand 2 "" "")]
5246 UNSPEC_PIC_BASE))]
5247 "TARGET_ARM"
5248 "*
5249 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5250 INTVAL (operands[2]));
5251 return \"add%?\\t%0, %|pc, %1\";
5252 "
5253 [(set_attr "predicable" "yes")]
5254 )
5255
5256 (define_insn "tls_load_dot_plus_eight"
5257 [(set (match_operand:SI 0 "register_operand" "+r")
5258 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5259 (const_int 8)
5260 (match_operand 2 "" "")]
5261 UNSPEC_PIC_BASE)))]
5262 "TARGET_ARM"
5263 "*
5264 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5265 INTVAL (operands[2]));
5266 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5267 "
5268 [(set_attr "predicable" "yes")]
5269 )
5270
5271 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5272 ;; followed by a load. These sequences can be crunched down to
5273 ;; tls_load_dot_plus_eight by a peephole.
5274
5275 (define_peephole2
5276 [(set (match_operand:SI 0 "register_operand" "")
5277 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5278 (const_int 8)
5279 (match_operand 1 "" "")]
5280 UNSPEC_PIC_BASE))
5281 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5282 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5283 [(set (match_dup 2)
5284 (mem:SI (unspec:SI [(match_dup 3)
5285 (const_int 8)
5286 (match_dup 1)]
5287 UNSPEC_PIC_BASE)))]
5288 ""
5289 )
5290
5291 (define_insn "pic_offset_arm"
5292 [(set (match_operand:SI 0 "register_operand" "=r")
5293 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5294 (unspec:SI [(match_operand:SI 2 "" "X")]
5295 UNSPEC_PIC_OFFSET))))]
5296 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5297 "ldr%?\\t%0, [%1,%2]"
5298 [(set_attr "type" "load1")]
5299 )
5300
5301 (define_expand "builtin_setjmp_receiver"
5302 [(label_ref (match_operand 0 "" ""))]
5303 "flag_pic"
5304 "
5305 {
5306 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5307 register. */
5308 if (arm_pic_register != INVALID_REGNUM)
5309 arm_load_pic_register (1UL << 3);
5310 DONE;
5311 }")
5312
5313 ;; If copying one reg to another we can set the condition codes according to
5314 ;; its value. Such a move is common after a return from subroutine and the
5315 ;; result is being tested against zero.
5316
5317 (define_insn "*movsi_compare0"
5318 [(set (reg:CC CC_REGNUM)
5319 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5320 (const_int 0)))
5321 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5322 (match_dup 1))]
5323 "TARGET_32BIT"
5324 "@
5325 cmp%?\\t%0, #0
5326 sub%.\\t%0, %1, #0"
5327 [(set_attr "conds" "set")]
5328 )
5329
5330 ;; Subroutine to store a half word from a register into memory.
5331 ;; Operand 0 is the source register (HImode)
5332 ;; Operand 1 is the destination address in a register (SImode)
5333
5334 ;; In both this routine and the next, we must be careful not to spill
5335 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5336 ;; can generate unrecognizable rtl.
5337
5338 (define_expand "storehi"
5339 [;; store the low byte
5340 (set (match_operand 1 "" "") (match_dup 3))
5341 ;; extract the high byte
5342 (set (match_dup 2)
5343 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5344 ;; store the high byte
5345 (set (match_dup 4) (match_dup 5))]
5346 "TARGET_ARM"
5347 "
5348 {
5349 rtx op1 = operands[1];
5350 rtx addr = XEXP (op1, 0);
5351 enum rtx_code code = GET_CODE (addr);
5352
5353 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5354 || code == MINUS)
5355 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5356
5357 operands[4] = adjust_address (op1, QImode, 1);
5358 operands[1] = adjust_address (operands[1], QImode, 0);
5359 operands[3] = gen_lowpart (QImode, operands[0]);
5360 operands[0] = gen_lowpart (SImode, operands[0]);
5361 operands[2] = gen_reg_rtx (SImode);
5362 operands[5] = gen_lowpart (QImode, operands[2]);
5363 }"
5364 )
5365
5366 (define_expand "storehi_bigend"
5367 [(set (match_dup 4) (match_dup 3))
5368 (set (match_dup 2)
5369 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5370 (set (match_operand 1 "" "") (match_dup 5))]
5371 "TARGET_ARM"
5372 "
5373 {
5374 rtx op1 = operands[1];
5375 rtx addr = XEXP (op1, 0);
5376 enum rtx_code code = GET_CODE (addr);
5377
5378 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5379 || code == MINUS)
5380 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5381
5382 operands[4] = adjust_address (op1, QImode, 1);
5383 operands[1] = adjust_address (operands[1], QImode, 0);
5384 operands[3] = gen_lowpart (QImode, operands[0]);
5385 operands[0] = gen_lowpart (SImode, operands[0]);
5386 operands[2] = gen_reg_rtx (SImode);
5387 operands[5] = gen_lowpart (QImode, operands[2]);
5388 }"
5389 )
5390
5391 ;; Subroutine to store a half word integer constant into memory.
5392 (define_expand "storeinthi"
5393 [(set (match_operand 0 "" "")
5394 (match_operand 1 "" ""))
5395 (set (match_dup 3) (match_dup 2))]
5396 "TARGET_ARM"
5397 "
5398 {
5399 HOST_WIDE_INT value = INTVAL (operands[1]);
5400 rtx addr = XEXP (operands[0], 0);
5401 rtx op0 = operands[0];
5402 enum rtx_code code = GET_CODE (addr);
5403
5404 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5405 || code == MINUS)
5406 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5407
5408 operands[1] = gen_reg_rtx (SImode);
5409 if (BYTES_BIG_ENDIAN)
5410 {
5411 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5412 if ((value & 255) == ((value >> 8) & 255))
5413 operands[2] = operands[1];
5414 else
5415 {
5416 operands[2] = gen_reg_rtx (SImode);
5417 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5418 }
5419 }
5420 else
5421 {
5422 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5423 if ((value & 255) == ((value >> 8) & 255))
5424 operands[2] = operands[1];
5425 else
5426 {
5427 operands[2] = gen_reg_rtx (SImode);
5428 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5429 }
5430 }
5431
5432 operands[3] = adjust_address (op0, QImode, 1);
5433 operands[0] = adjust_address (operands[0], QImode, 0);
5434 operands[2] = gen_lowpart (QImode, operands[2]);
5435 operands[1] = gen_lowpart (QImode, operands[1]);
5436 }"
5437 )
5438
5439 (define_expand "storehi_single_op"
5440 [(set (match_operand:HI 0 "memory_operand" "")
5441 (match_operand:HI 1 "general_operand" ""))]
5442 "TARGET_32BIT && arm_arch4"
5443 "
5444 if (!s_register_operand (operands[1], HImode))
5445 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5446 "
5447 )
5448
5449 (define_expand "movhi"
5450 [(set (match_operand:HI 0 "general_operand" "")
5451 (match_operand:HI 1 "general_operand" ""))]
5452 "TARGET_EITHER"
5453 "
5454 if (TARGET_ARM)
5455 {
5456 if (can_create_pseudo_p ())
5457 {
5458 if (GET_CODE (operands[0]) == MEM)
5459 {
5460 if (arm_arch4)
5461 {
5462 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5463 DONE;
5464 }
5465 if (GET_CODE (operands[1]) == CONST_INT)
5466 emit_insn (gen_storeinthi (operands[0], operands[1]));
5467 else
5468 {
5469 if (GET_CODE (operands[1]) == MEM)
5470 operands[1] = force_reg (HImode, operands[1]);
5471 if (BYTES_BIG_ENDIAN)
5472 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5473 else
5474 emit_insn (gen_storehi (operands[1], operands[0]));
5475 }
5476 DONE;
5477 }
5478 /* Sign extend a constant, and keep it in an SImode reg. */
5479 else if (GET_CODE (operands[1]) == CONST_INT)
5480 {
5481 rtx reg = gen_reg_rtx (SImode);
5482 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5483
5484 /* If the constant is already valid, leave it alone. */
5485 if (!const_ok_for_arm (val))
5486 {
5487 /* If setting all the top bits will make the constant
5488 loadable in a single instruction, then set them.
5489 Otherwise, sign extend the number. */
5490
5491 if (const_ok_for_arm (~(val | ~0xffff)))
5492 val |= ~0xffff;
5493 else if (val & 0x8000)
5494 val |= ~0xffff;
5495 }
5496
5497 emit_insn (gen_movsi (reg, GEN_INT (val)));
5498 operands[1] = gen_lowpart (HImode, reg);
5499 }
5500 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5501 && GET_CODE (operands[1]) == MEM)
5502 {
5503 rtx reg = gen_reg_rtx (SImode);
5504
5505 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5506 operands[1] = gen_lowpart (HImode, reg);
5507 }
5508 else if (!arm_arch4)
5509 {
5510 if (GET_CODE (operands[1]) == MEM)
5511 {
5512 rtx base;
5513 rtx offset = const0_rtx;
5514 rtx reg = gen_reg_rtx (SImode);
5515
5516 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5517 || (GET_CODE (base) == PLUS
5518 && (GET_CODE (offset = XEXP (base, 1))
5519 == CONST_INT)
5520 && ((INTVAL(offset) & 1) != 1)
5521 && GET_CODE (base = XEXP (base, 0)) == REG))
5522 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5523 {
5524 rtx new_rtx;
5525
5526 new_rtx = widen_memory_access (operands[1], SImode,
5527 ((INTVAL (offset) & ~3)
5528 - INTVAL (offset)));
5529 emit_insn (gen_movsi (reg, new_rtx));
5530 if (((INTVAL (offset) & 2) != 0)
5531 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5532 {
5533 rtx reg2 = gen_reg_rtx (SImode);
5534
5535 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5536 reg = reg2;
5537 }
5538 }
5539 else
5540 emit_insn (gen_movhi_bytes (reg, operands[1]));
5541
5542 operands[1] = gen_lowpart (HImode, reg);
5543 }
5544 }
5545 }
5546 /* Handle loading a large integer during reload. */
5547 else if (GET_CODE (operands[1]) == CONST_INT
5548 && !const_ok_for_arm (INTVAL (operands[1]))
5549 && !const_ok_for_arm (~INTVAL (operands[1])))
5550 {
5551 /* Writing a constant to memory needs a scratch, which should
5552 be handled with SECONDARY_RELOADs. */
5553 gcc_assert (GET_CODE (operands[0]) == REG);
5554
5555 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5556 emit_insn (gen_movsi (operands[0], operands[1]));
5557 DONE;
5558 }
5559 }
5560 else if (TARGET_THUMB2)
5561 {
5562 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5563 if (can_create_pseudo_p ())
5564 {
5565 if (GET_CODE (operands[0]) != REG)
5566 operands[1] = force_reg (HImode, operands[1]);
5567 /* Zero extend a constant, and keep it in an SImode reg. */
5568 else if (GET_CODE (operands[1]) == CONST_INT)
5569 {
5570 rtx reg = gen_reg_rtx (SImode);
5571 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5572
5573 emit_insn (gen_movsi (reg, GEN_INT (val)));
5574 operands[1] = gen_lowpart (HImode, reg);
5575 }
5576 }
5577 }
5578 else /* TARGET_THUMB1 */
5579 {
5580 if (can_create_pseudo_p ())
5581 {
5582 if (GET_CODE (operands[1]) == CONST_INT)
5583 {
5584 rtx reg = gen_reg_rtx (SImode);
5585
5586 emit_insn (gen_movsi (reg, operands[1]));
5587 operands[1] = gen_lowpart (HImode, reg);
5588 }
5589
5590 /* ??? We shouldn't really get invalid addresses here, but this can
5591 happen if we are passed a SP (never OK for HImode/QImode) or
5592 virtual register (also rejected as illegitimate for HImode/QImode)
5593 relative address. */
5594 /* ??? This should perhaps be fixed elsewhere, for instance, in
5595 fixup_stack_1, by checking for other kinds of invalid addresses,
5596 e.g. a bare reference to a virtual register. This may confuse the
5597 alpha though, which must handle this case differently. */
5598 if (GET_CODE (operands[0]) == MEM
5599 && !memory_address_p (GET_MODE (operands[0]),
5600 XEXP (operands[0], 0)))
5601 operands[0]
5602 = replace_equiv_address (operands[0],
5603 copy_to_reg (XEXP (operands[0], 0)));
5604
5605 if (GET_CODE (operands[1]) == MEM
5606 && !memory_address_p (GET_MODE (operands[1]),
5607 XEXP (operands[1], 0)))
5608 operands[1]
5609 = replace_equiv_address (operands[1],
5610 copy_to_reg (XEXP (operands[1], 0)));
5611
5612 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5613 {
5614 rtx reg = gen_reg_rtx (SImode);
5615
5616 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5617 operands[1] = gen_lowpart (HImode, reg);
5618 }
5619
5620 if (GET_CODE (operands[0]) == MEM)
5621 operands[1] = force_reg (HImode, operands[1]);
5622 }
5623 else if (GET_CODE (operands[1]) == CONST_INT
5624 && !satisfies_constraint_I (operands[1]))
5625 {
5626 /* Handle loading a large integer during reload. */
5627
5628 /* Writing a constant to memory needs a scratch, which should
5629 be handled with SECONDARY_RELOADs. */
5630 gcc_assert (GET_CODE (operands[0]) == REG);
5631
5632 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5633 emit_insn (gen_movsi (operands[0], operands[1]));
5634 DONE;
5635 }
5636 }
5637 "
5638 )
5639
5640 (define_insn "*thumb1_movhi_insn"
5641 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5642 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5643 "TARGET_THUMB1
5644 && ( register_operand (operands[0], HImode)
5645 || register_operand (operands[1], HImode))"
5646 "*
5647 switch (which_alternative)
5648 {
5649 case 0: return \"add %0, %1, #0\";
5650 case 2: return \"strh %1, %0\";
5651 case 3: return \"mov %0, %1\";
5652 case 4: return \"mov %0, %1\";
5653 case 5: return \"mov %0, %1\";
5654 default: gcc_unreachable ();
5655 case 1:
5656 /* The stack pointer can end up being taken as an index register.
5657 Catch this case here and deal with it. */
5658 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5659 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5660 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5661 {
5662 rtx ops[2];
5663 ops[0] = operands[0];
5664 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5665
5666 output_asm_insn (\"mov %0, %1\", ops);
5667
5668 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5669
5670 }
5671 return \"ldrh %0, %1\";
5672 }"
5673 [(set_attr "length" "2,4,2,2,2,2")
5674 (set_attr "type" "*,load1,store1,*,*,*")]
5675 )
5676
5677
5678 (define_expand "movhi_bytes"
5679 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5680 (set (match_dup 3)
5681 (zero_extend:SI (match_dup 6)))
5682 (set (match_operand:SI 0 "" "")
5683 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5684 "TARGET_ARM"
5685 "
5686 {
5687 rtx mem1, mem2;
5688 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5689
5690 mem1 = change_address (operands[1], QImode, addr);
5691 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5692 operands[0] = gen_lowpart (SImode, operands[0]);
5693 operands[1] = mem1;
5694 operands[2] = gen_reg_rtx (SImode);
5695 operands[3] = gen_reg_rtx (SImode);
5696 operands[6] = mem2;
5697
5698 if (BYTES_BIG_ENDIAN)
5699 {
5700 operands[4] = operands[2];
5701 operands[5] = operands[3];
5702 }
5703 else
5704 {
5705 operands[4] = operands[3];
5706 operands[5] = operands[2];
5707 }
5708 }"
5709 )
5710
5711 (define_expand "movhi_bigend"
5712 [(set (match_dup 2)
5713 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5714 (const_int 16)))
5715 (set (match_dup 3)
5716 (ashiftrt:SI (match_dup 2) (const_int 16)))
5717 (set (match_operand:HI 0 "s_register_operand" "")
5718 (match_dup 4))]
5719 "TARGET_ARM"
5720 "
5721 operands[2] = gen_reg_rtx (SImode);
5722 operands[3] = gen_reg_rtx (SImode);
5723 operands[4] = gen_lowpart (HImode, operands[3]);
5724 "
5725 )
5726
5727 ;; Pattern to recognize insn generated default case above
5728 (define_insn "*movhi_insn_arch4"
5729 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5730 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5731 "TARGET_ARM
5732 && arm_arch4
5733 && (GET_CODE (operands[1]) != CONST_INT
5734 || const_ok_for_arm (INTVAL (operands[1]))
5735 || const_ok_for_arm (~INTVAL (operands[1])))"
5736 "@
5737 mov%?\\t%0, %1\\t%@ movhi
5738 mvn%?\\t%0, #%B1\\t%@ movhi
5739 str%(h%)\\t%1, %0\\t%@ movhi
5740 ldr%(h%)\\t%0, %1\\t%@ movhi"
5741 [(set_attr "type" "*,*,store1,load1")
5742 (set_attr "predicable" "yes")
5743 (set_attr "pool_range" "*,*,*,256")
5744 (set_attr "neg_pool_range" "*,*,*,244")]
5745 )
5746
5747 (define_insn "*movhi_bytes"
5748 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5749 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5750 "TARGET_ARM"
5751 "@
5752 mov%?\\t%0, %1\\t%@ movhi
5753 mvn%?\\t%0, #%B1\\t%@ movhi"
5754 [(set_attr "predicable" "yes")]
5755 )
5756
5757 (define_expand "thumb_movhi_clobber"
5758 [(set (match_operand:HI 0 "memory_operand" "")
5759 (match_operand:HI 1 "register_operand" ""))
5760 (clobber (match_operand:DI 2 "register_operand" ""))]
5761 "TARGET_THUMB1"
5762 "
5763 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5764 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5765 {
5766 emit_insn (gen_movhi (operands[0], operands[1]));
5767 DONE;
5768 }
5769 /* XXX Fixme, need to handle other cases here as well. */
5770 gcc_unreachable ();
5771 "
5772 )
5773
5774 ;; We use a DImode scratch because we may occasionally need an additional
5775 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5776 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5777 (define_expand "reload_outhi"
5778 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5779 (match_operand:HI 1 "s_register_operand" "r")
5780 (match_operand:DI 2 "s_register_operand" "=&l")])]
5781 "TARGET_EITHER"
5782 "if (TARGET_ARM)
5783 arm_reload_out_hi (operands);
5784 else
5785 thumb_reload_out_hi (operands);
5786 DONE;
5787 "
5788 )
5789
5790 (define_expand "reload_inhi"
5791 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5792 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5793 (match_operand:DI 2 "s_register_operand" "=&r")])]
5794 "TARGET_EITHER"
5795 "
5796 if (TARGET_ARM)
5797 arm_reload_in_hi (operands);
5798 else
5799 thumb_reload_out_hi (operands);
5800 DONE;
5801 ")
5802
5803 (define_expand "movqi"
5804 [(set (match_operand:QI 0 "general_operand" "")
5805 (match_operand:QI 1 "general_operand" ""))]
5806 "TARGET_EITHER"
5807 "
5808 /* Everything except mem = const or mem = mem can be done easily */
5809
5810 if (can_create_pseudo_p ())
5811 {
5812 if (GET_CODE (operands[1]) == CONST_INT)
5813 {
5814 rtx reg = gen_reg_rtx (SImode);
5815
5816 emit_insn (gen_movsi (reg, operands[1]));
5817 operands[1] = gen_lowpart (QImode, reg);
5818 }
5819
5820 if (TARGET_THUMB)
5821 {
5822 /* ??? We shouldn't really get invalid addresses here, but this can
5823 happen if we are passed a SP (never OK for HImode/QImode) or
5824 virtual register (also rejected as illegitimate for HImode/QImode)
5825 relative address. */
5826 /* ??? This should perhaps be fixed elsewhere, for instance, in
5827 fixup_stack_1, by checking for other kinds of invalid addresses,
5828 e.g. a bare reference to a virtual register. This may confuse the
5829 alpha though, which must handle this case differently. */
5830 if (GET_CODE (operands[0]) == MEM
5831 && !memory_address_p (GET_MODE (operands[0]),
5832 XEXP (operands[0], 0)))
5833 operands[0]
5834 = replace_equiv_address (operands[0],
5835 copy_to_reg (XEXP (operands[0], 0)));
5836 if (GET_CODE (operands[1]) == MEM
5837 && !memory_address_p (GET_MODE (operands[1]),
5838 XEXP (operands[1], 0)))
5839 operands[1]
5840 = replace_equiv_address (operands[1],
5841 copy_to_reg (XEXP (operands[1], 0)));
5842 }
5843
5844 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5845 {
5846 rtx reg = gen_reg_rtx (SImode);
5847
5848 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5849 operands[1] = gen_lowpart (QImode, reg);
5850 }
5851
5852 if (GET_CODE (operands[0]) == MEM)
5853 operands[1] = force_reg (QImode, operands[1]);
5854 }
5855 else if (TARGET_THUMB
5856 && GET_CODE (operands[1]) == CONST_INT
5857 && !satisfies_constraint_I (operands[1]))
5858 {
5859 /* Handle loading a large integer during reload. */
5860
5861 /* Writing a constant to memory needs a scratch, which should
5862 be handled with SECONDARY_RELOADs. */
5863 gcc_assert (GET_CODE (operands[0]) == REG);
5864
5865 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5866 emit_insn (gen_movsi (operands[0], operands[1]));
5867 DONE;
5868 }
5869 "
5870 )
5871
5872
5873 (define_insn "*arm_movqi_insn"
5874 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5875 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5876 "TARGET_32BIT
5877 && ( register_operand (operands[0], QImode)
5878 || register_operand (operands[1], QImode))"
5879 "@
5880 mov%?\\t%0, %1
5881 mvn%?\\t%0, #%B1
5882 ldr%(b%)\\t%0, %1
5883 str%(b%)\\t%1, %0"
5884 [(set_attr "type" "*,*,load1,store1")
5885 (set_attr "predicable" "yes")]
5886 )
5887
5888 (define_insn "*thumb1_movqi_insn"
5889 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5890 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5891 "TARGET_THUMB1
5892 && ( register_operand (operands[0], QImode)
5893 || register_operand (operands[1], QImode))"
5894 "@
5895 add\\t%0, %1, #0
5896 ldrb\\t%0, %1
5897 strb\\t%1, %0
5898 mov\\t%0, %1
5899 mov\\t%0, %1
5900 mov\\t%0, %1"
5901 [(set_attr "length" "2")
5902 (set_attr "type" "*,load1,store1,*,*,*")
5903 (set_attr "pool_range" "*,32,*,*,*,*")]
5904 )
5905
5906 ;; HFmode moves
5907 (define_expand "movhf"
5908 [(set (match_operand:HF 0 "general_operand" "")
5909 (match_operand:HF 1 "general_operand" ""))]
5910 "TARGET_EITHER"
5911 "
5912 if (TARGET_32BIT)
5913 {
5914 if (GET_CODE (operands[0]) == MEM)
5915 operands[1] = force_reg (HFmode, operands[1]);
5916 }
5917 else /* TARGET_THUMB1 */
5918 {
5919 if (can_create_pseudo_p ())
5920 {
5921 if (GET_CODE (operands[0]) != REG)
5922 operands[1] = force_reg (HFmode, operands[1]);
5923 }
5924 }
5925 "
5926 )
5927
5928 (define_insn "*arm32_movhf"
5929 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5930 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5931 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_NEON_FP16)
5932 && ( s_register_operand (operands[0], HFmode)
5933 || s_register_operand (operands[1], HFmode))"
5934 "*
5935 switch (which_alternative)
5936 {
5937 case 0: /* ARM register from memory */
5938 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5939 case 1: /* memory from ARM register */
5940 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5941 case 2: /* ARM register from ARM register */
5942 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5943 case 3: /* ARM register from constant */
5944 {
5945 REAL_VALUE_TYPE r;
5946 long bits;
5947 rtx ops[4];
5948
5949 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5950 bits = real_to_target (NULL, &r, HFmode);
5951 ops[0] = operands[0];
5952 ops[1] = GEN_INT (bits);
5953 ops[2] = GEN_INT (bits & 0xff00);
5954 ops[3] = GEN_INT (bits & 0x00ff);
5955
5956 if (arm_arch_thumb2)
5957 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5958 else
5959 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5960 return \"\";
5961 }
5962 default:
5963 gcc_unreachable ();
5964 }
5965 "
5966 [(set_attr "conds" "unconditional")
5967 (set_attr "type" "load1,store1,*,*")
5968 (set_attr "length" "4,4,4,8")
5969 (set_attr "predicable" "yes")
5970 ]
5971 )
5972
5973 (define_insn "*thumb1_movhf"
5974 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
5975 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
5976 "TARGET_THUMB1
5977 && ( s_register_operand (operands[0], HFmode)
5978 || s_register_operand (operands[1], HFmode))"
5979 "*
5980 switch (which_alternative)
5981 {
5982 case 1:
5983 {
5984 rtx addr;
5985 gcc_assert (GET_CODE(operands[1]) == MEM);
5986 addr = XEXP (operands[1], 0);
5987 if (GET_CODE (addr) == LABEL_REF
5988 || (GET_CODE (addr) == CONST
5989 && GET_CODE (XEXP (addr, 0)) == PLUS
5990 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
5991 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
5992 {
5993 /* Constant pool entry. */
5994 return \"ldr\\t%0, %1\";
5995 }
5996 return \"ldrh\\t%0, %1\";
5997 }
5998 case 2: return \"strh\\t%1, %0\";
5999 default: return \"mov\\t%0, %1\";
6000 }
6001 "
6002 [(set_attr "length" "2")
6003 (set_attr "type" "*,load1,store1,*,*")
6004 (set_attr "pool_range" "*,1020,*,*,*")]
6005 )
6006
6007 (define_expand "movsf"
6008 [(set (match_operand:SF 0 "general_operand" "")
6009 (match_operand:SF 1 "general_operand" ""))]
6010 "TARGET_EITHER"
6011 "
6012 if (TARGET_32BIT)
6013 {
6014 if (GET_CODE (operands[0]) == MEM)
6015 operands[1] = force_reg (SFmode, operands[1]);
6016 }
6017 else /* TARGET_THUMB1 */
6018 {
6019 if (can_create_pseudo_p ())
6020 {
6021 if (GET_CODE (operands[0]) != REG)
6022 operands[1] = force_reg (SFmode, operands[1]);
6023 }
6024 }
6025 "
6026 )
6027
6028 ;; Transform a floating-point move of a constant into a core register into
6029 ;; an SImode operation.
6030 (define_split
6031 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6032 (match_operand:SF 1 "immediate_operand" ""))]
6033 "TARGET_32BIT
6034 && reload_completed
6035 && GET_CODE (operands[1]) == CONST_DOUBLE"
6036 [(set (match_dup 2) (match_dup 3))]
6037 "
6038 operands[2] = gen_lowpart (SImode, operands[0]);
6039 operands[3] = gen_lowpart (SImode, operands[1]);
6040 if (operands[2] == 0 || operands[3] == 0)
6041 FAIL;
6042 "
6043 )
6044
6045 (define_insn "*arm_movsf_soft_insn"
6046 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6047 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6048 "TARGET_ARM
6049 && TARGET_SOFT_FLOAT
6050 && (GET_CODE (operands[0]) != MEM
6051 || register_operand (operands[1], SFmode))"
6052 "@
6053 mov%?\\t%0, %1
6054 ldr%?\\t%0, %1\\t%@ float
6055 str%?\\t%1, %0\\t%@ float"
6056 [(set_attr "length" "4,4,4")
6057 (set_attr "predicable" "yes")
6058 (set_attr "type" "*,load1,store1")
6059 (set_attr "pool_range" "*,4096,*")
6060 (set_attr "neg_pool_range" "*,4084,*")]
6061 )
6062
6063 ;;; ??? This should have alternatives for constants.
6064 (define_insn "*thumb1_movsf_insn"
6065 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6066 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6067 "TARGET_THUMB1
6068 && ( register_operand (operands[0], SFmode)
6069 || register_operand (operands[1], SFmode))"
6070 "@
6071 add\\t%0, %1, #0
6072 ldmia\\t%1, {%0}
6073 stmia\\t%0, {%1}
6074 ldr\\t%0, %1
6075 str\\t%1, %0
6076 mov\\t%0, %1
6077 mov\\t%0, %1"
6078 [(set_attr "length" "2")
6079 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6080 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6081 )
6082
6083 (define_expand "movdf"
6084 [(set (match_operand:DF 0 "general_operand" "")
6085 (match_operand:DF 1 "general_operand" ""))]
6086 "TARGET_EITHER"
6087 "
6088 if (TARGET_32BIT)
6089 {
6090 if (GET_CODE (operands[0]) == MEM)
6091 operands[1] = force_reg (DFmode, operands[1]);
6092 }
6093 else /* TARGET_THUMB */
6094 {
6095 if (can_create_pseudo_p ())
6096 {
6097 if (GET_CODE (operands[0]) != REG)
6098 operands[1] = force_reg (DFmode, operands[1]);
6099 }
6100 }
6101 "
6102 )
6103
6104 ;; Reloading a df mode value stored in integer regs to memory can require a
6105 ;; scratch reg.
6106 (define_expand "reload_outdf"
6107 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6108 (match_operand:DF 1 "s_register_operand" "r")
6109 (match_operand:SI 2 "s_register_operand" "=&r")]
6110 "TARGET_32BIT"
6111 "
6112 {
6113 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6114
6115 if (code == REG)
6116 operands[2] = XEXP (operands[0], 0);
6117 else if (code == POST_INC || code == PRE_DEC)
6118 {
6119 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6120 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6121 emit_insn (gen_movdi (operands[0], operands[1]));
6122 DONE;
6123 }
6124 else if (code == PRE_INC)
6125 {
6126 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6127
6128 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6129 operands[2] = reg;
6130 }
6131 else if (code == POST_DEC)
6132 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6133 else
6134 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6135 XEXP (XEXP (operands[0], 0), 1)));
6136
6137 emit_insn (gen_rtx_SET (VOIDmode,
6138 replace_equiv_address (operands[0], operands[2]),
6139 operands[1]));
6140
6141 if (code == POST_DEC)
6142 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6143
6144 DONE;
6145 }"
6146 )
6147
6148 (define_insn "*movdf_soft_insn"
6149 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6150 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6151 "TARGET_ARM && TARGET_SOFT_FLOAT
6152 && ( register_operand (operands[0], DFmode)
6153 || register_operand (operands[1], DFmode))"
6154 "*
6155 switch (which_alternative)
6156 {
6157 case 0:
6158 case 1:
6159 case 2:
6160 return \"#\";
6161 default:
6162 return output_move_double (operands);
6163 }
6164 "
6165 [(set_attr "length" "8,12,16,8,8")
6166 (set_attr "type" "*,*,*,load2,store2")
6167 (set_attr "pool_range" "1020")
6168 (set_attr "neg_pool_range" "1008")]
6169 )
6170
6171 ;;; ??? This should have alternatives for constants.
6172 ;;; ??? This was originally identical to the movdi_insn pattern.
6173 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6174 ;;; thumb_reorg with a memory reference.
6175 (define_insn "*thumb_movdf_insn"
6176 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6177 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6178 "TARGET_THUMB1
6179 && ( register_operand (operands[0], DFmode)
6180 || register_operand (operands[1], DFmode))"
6181 "*
6182 switch (which_alternative)
6183 {
6184 default:
6185 case 0:
6186 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6187 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6188 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6189 case 1:
6190 return \"ldmia\\t%1, {%0, %H0}\";
6191 case 2:
6192 return \"stmia\\t%0, {%1, %H1}\";
6193 case 3:
6194 return thumb_load_double_from_address (operands);
6195 case 4:
6196 operands[2] = gen_rtx_MEM (SImode,
6197 plus_constant (XEXP (operands[0], 0), 4));
6198 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6199 return \"\";
6200 case 5:
6201 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6202 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6203 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6204 }
6205 "
6206 [(set_attr "length" "4,2,2,6,4,4")
6207 (set_attr "type" "*,load2,store2,load2,store2,*")
6208 (set_attr "pool_range" "*,*,*,1020,*,*")]
6209 )
6210
6211 (define_expand "movxf"
6212 [(set (match_operand:XF 0 "general_operand" "")
6213 (match_operand:XF 1 "general_operand" ""))]
6214 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6215 "
6216 if (GET_CODE (operands[0]) == MEM)
6217 operands[1] = force_reg (XFmode, operands[1]);
6218 "
6219 )
6220
6221 \f
6222
6223 ;; load- and store-multiple insns
6224 ;; The arm can load/store any set of registers, provided that they are in
6225 ;; ascending order; but that is beyond GCC so stick with what it knows.
6226
6227 (define_expand "load_multiple"
6228 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6229 (match_operand:SI 1 "" ""))
6230 (use (match_operand:SI 2 "" ""))])]
6231 "TARGET_32BIT"
6232 {
6233 HOST_WIDE_INT offset = 0;
6234
6235 /* Support only fixed point registers. */
6236 if (GET_CODE (operands[2]) != CONST_INT
6237 || INTVAL (operands[2]) > 14
6238 || INTVAL (operands[2]) < 2
6239 || GET_CODE (operands[1]) != MEM
6240 || GET_CODE (operands[0]) != REG
6241 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6242 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6243 FAIL;
6244
6245 operands[3]
6246 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6247 force_reg (SImode, XEXP (operands[1], 0)),
6248 TRUE, FALSE, operands[1], &offset);
6249 })
6250
6251 ;; Load multiple with write-back
6252
6253 (define_insn "*ldmsi_postinc4"
6254 [(match_parallel 0 "load_multiple_operation"
6255 [(set (match_operand:SI 1 "s_register_operand" "=r")
6256 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6257 (const_int 16)))
6258 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6259 (mem:SI (match_dup 2)))
6260 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6261 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6262 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6263 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6264 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6265 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6266 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6267 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6268 [(set_attr "type" "load4")
6269 (set_attr "predicable" "yes")]
6270 )
6271
6272 (define_insn "*ldmsi_postinc4_thumb1"
6273 [(match_parallel 0 "load_multiple_operation"
6274 [(set (match_operand:SI 1 "s_register_operand" "=l")
6275 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6276 (const_int 16)))
6277 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6278 (mem:SI (match_dup 2)))
6279 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6280 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6281 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6282 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6283 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6284 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6285 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6286 "ldmia\\t%1!, {%3, %4, %5, %6}"
6287 [(set_attr "type" "load4")]
6288 )
6289
6290 (define_insn "*ldmsi_postinc3"
6291 [(match_parallel 0 "load_multiple_operation"
6292 [(set (match_operand:SI 1 "s_register_operand" "=r")
6293 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6294 (const_int 12)))
6295 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6296 (mem:SI (match_dup 2)))
6297 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6298 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6299 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6300 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6301 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6302 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6303 [(set_attr "type" "load3")
6304 (set_attr "predicable" "yes")]
6305 )
6306
6307 (define_insn "*ldmsi_postinc2"
6308 [(match_parallel 0 "load_multiple_operation"
6309 [(set (match_operand:SI 1 "s_register_operand" "=r")
6310 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6311 (const_int 8)))
6312 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6313 (mem:SI (match_dup 2)))
6314 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6315 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6316 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6317 "ldm%(ia%)\\t%1!, {%3, %4}"
6318 [(set_attr "type" "load2")
6319 (set_attr "predicable" "yes")]
6320 )
6321
6322 ;; Ordinary load multiple
6323
6324 (define_insn "*ldmsi4"
6325 [(match_parallel 0 "load_multiple_operation"
6326 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6327 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6328 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6329 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6330 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6331 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6332 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6333 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6334 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6335 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6336 [(set_attr "type" "load4")
6337 (set_attr "predicable" "yes")]
6338 )
6339
6340 (define_insn "*ldmsi3"
6341 [(match_parallel 0 "load_multiple_operation"
6342 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6343 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6344 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6345 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6346 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6347 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6348 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6349 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6350 [(set_attr "type" "load3")
6351 (set_attr "predicable" "yes")]
6352 )
6353
6354 (define_insn "*ldmsi2"
6355 [(match_parallel 0 "load_multiple_operation"
6356 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6357 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6358 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6359 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6360 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6361 "ldm%(ia%)\\t%1, {%2, %3}"
6362 [(set_attr "type" "load2")
6363 (set_attr "predicable" "yes")]
6364 )
6365
6366 (define_expand "store_multiple"
6367 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6368 (match_operand:SI 1 "" ""))
6369 (use (match_operand:SI 2 "" ""))])]
6370 "TARGET_32BIT"
6371 {
6372 HOST_WIDE_INT offset = 0;
6373
6374 /* Support only fixed point registers. */
6375 if (GET_CODE (operands[2]) != CONST_INT
6376 || INTVAL (operands[2]) > 14
6377 || INTVAL (operands[2]) < 2
6378 || GET_CODE (operands[1]) != REG
6379 || GET_CODE (operands[0]) != MEM
6380 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6381 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6382 FAIL;
6383
6384 operands[3]
6385 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6386 force_reg (SImode, XEXP (operands[0], 0)),
6387 TRUE, FALSE, operands[0], &offset);
6388 })
6389
6390 ;; Store multiple with write-back
6391
6392 (define_insn "*stmsi_postinc4"
6393 [(match_parallel 0 "store_multiple_operation"
6394 [(set (match_operand:SI 1 "s_register_operand" "=r")
6395 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6396 (const_int 16)))
6397 (set (mem:SI (match_dup 2))
6398 (match_operand:SI 3 "arm_hard_register_operand" ""))
6399 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6400 (match_operand:SI 4 "arm_hard_register_operand" ""))
6401 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6402 (match_operand:SI 5 "arm_hard_register_operand" ""))
6403 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6404 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6405 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6406 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6407 [(set_attr "predicable" "yes")
6408 (set_attr "type" "store4")]
6409 )
6410
6411 (define_insn "*stmsi_postinc4_thumb1"
6412 [(match_parallel 0 "store_multiple_operation"
6413 [(set (match_operand:SI 1 "s_register_operand" "=l")
6414 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6415 (const_int 16)))
6416 (set (mem:SI (match_dup 2))
6417 (match_operand:SI 3 "arm_hard_register_operand" ""))
6418 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6419 (match_operand:SI 4 "arm_hard_register_operand" ""))
6420 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6421 (match_operand:SI 5 "arm_hard_register_operand" ""))
6422 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6423 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6424 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6425 "stmia\\t%1!, {%3, %4, %5, %6}"
6426 [(set_attr "type" "store4")]
6427 )
6428
6429 (define_insn "*stmsi_postinc3"
6430 [(match_parallel 0 "store_multiple_operation"
6431 [(set (match_operand:SI 1 "s_register_operand" "=r")
6432 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6433 (const_int 12)))
6434 (set (mem:SI (match_dup 2))
6435 (match_operand:SI 3 "arm_hard_register_operand" ""))
6436 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6437 (match_operand:SI 4 "arm_hard_register_operand" ""))
6438 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6439 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6440 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6441 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6442 [(set_attr "predicable" "yes")
6443 (set_attr "type" "store3")]
6444 )
6445
6446 (define_insn "*stmsi_postinc2"
6447 [(match_parallel 0 "store_multiple_operation"
6448 [(set (match_operand:SI 1 "s_register_operand" "=r")
6449 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6450 (const_int 8)))
6451 (set (mem:SI (match_dup 2))
6452 (match_operand:SI 3 "arm_hard_register_operand" ""))
6453 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6454 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6455 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6456 "stm%(ia%)\\t%1!, {%3, %4}"
6457 [(set_attr "predicable" "yes")
6458 (set_attr "type" "store2")]
6459 )
6460
6461 ;; Ordinary store multiple
6462
6463 (define_insn "*stmsi4"
6464 [(match_parallel 0 "store_multiple_operation"
6465 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6466 (match_operand:SI 2 "arm_hard_register_operand" ""))
6467 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6468 (match_operand:SI 3 "arm_hard_register_operand" ""))
6469 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6470 (match_operand:SI 4 "arm_hard_register_operand" ""))
6471 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6472 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6473 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6474 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6475 [(set_attr "predicable" "yes")
6476 (set_attr "type" "store4")]
6477 )
6478
6479 (define_insn "*stmsi3"
6480 [(match_parallel 0 "store_multiple_operation"
6481 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6482 (match_operand:SI 2 "arm_hard_register_operand" ""))
6483 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6484 (match_operand:SI 3 "arm_hard_register_operand" ""))
6485 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6486 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6487 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6488 "stm%(ia%)\\t%1, {%2, %3, %4}"
6489 [(set_attr "predicable" "yes")
6490 (set_attr "type" "store3")]
6491 )
6492
6493 (define_insn "*stmsi2"
6494 [(match_parallel 0 "store_multiple_operation"
6495 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6496 (match_operand:SI 2 "arm_hard_register_operand" ""))
6497 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6498 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6499 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6500 "stm%(ia%)\\t%1, {%2, %3}"
6501 [(set_attr "predicable" "yes")
6502 (set_attr "type" "store2")]
6503 )
6504
6505 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6506 ;; We could let this apply for blocks of less than this, but it clobbers so
6507 ;; many registers that there is then probably a better way.
6508
6509 (define_expand "movmemqi"
6510 [(match_operand:BLK 0 "general_operand" "")
6511 (match_operand:BLK 1 "general_operand" "")
6512 (match_operand:SI 2 "const_int_operand" "")
6513 (match_operand:SI 3 "const_int_operand" "")]
6514 "TARGET_EITHER"
6515 "
6516 if (TARGET_32BIT)
6517 {
6518 if (arm_gen_movmemqi (operands))
6519 DONE;
6520 FAIL;
6521 }
6522 else /* TARGET_THUMB1 */
6523 {
6524 if ( INTVAL (operands[3]) != 4
6525 || INTVAL (operands[2]) > 48)
6526 FAIL;
6527
6528 thumb_expand_movmemqi (operands);
6529 DONE;
6530 }
6531 "
6532 )
6533
6534 ;; Thumb block-move insns
6535
6536 (define_insn "movmem12b"
6537 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6538 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6539 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6540 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6541 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6542 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6543 (set (match_operand:SI 0 "register_operand" "=l")
6544 (plus:SI (match_dup 2) (const_int 12)))
6545 (set (match_operand:SI 1 "register_operand" "=l")
6546 (plus:SI (match_dup 3) (const_int 12)))
6547 (clobber (match_scratch:SI 4 "=&l"))
6548 (clobber (match_scratch:SI 5 "=&l"))
6549 (clobber (match_scratch:SI 6 "=&l"))]
6550 "TARGET_THUMB1"
6551 "* return thumb_output_move_mem_multiple (3, operands);"
6552 [(set_attr "length" "4")
6553 ; This isn't entirely accurate... It loads as well, but in terms of
6554 ; scheduling the following insn it is better to consider it as a store
6555 (set_attr "type" "store3")]
6556 )
6557
6558 (define_insn "movmem8b"
6559 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6560 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6561 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6562 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6563 (set (match_operand:SI 0 "register_operand" "=l")
6564 (plus:SI (match_dup 2) (const_int 8)))
6565 (set (match_operand:SI 1 "register_operand" "=l")
6566 (plus:SI (match_dup 3) (const_int 8)))
6567 (clobber (match_scratch:SI 4 "=&l"))
6568 (clobber (match_scratch:SI 5 "=&l"))]
6569 "TARGET_THUMB1"
6570 "* return thumb_output_move_mem_multiple (2, operands);"
6571 [(set_attr "length" "4")
6572 ; This isn't entirely accurate... It loads as well, but in terms of
6573 ; scheduling the following insn it is better to consider it as a store
6574 (set_attr "type" "store2")]
6575 )
6576
6577 \f
6578
6579 ;; Compare & branch insns
6580 ;; The range calculations are based as follows:
6581 ;; For forward branches, the address calculation returns the address of
6582 ;; the next instruction. This is 2 beyond the branch instruction.
6583 ;; For backward branches, the address calculation returns the address of
6584 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6585 ;; instruction for the shortest sequence, and 4 before the branch instruction
6586 ;; if we have to jump around an unconditional branch.
6587 ;; To the basic branch range the PC offset must be added (this is +4).
6588 ;; So for forward branches we have
6589 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6590 ;; And for backward branches we have
6591 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6592 ;;
6593 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6594 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6595
6596 (define_expand "cbranchsi4"
6597 [(set (pc) (if_then_else
6598 (match_operator 0 "arm_comparison_operator"
6599 [(match_operand:SI 1 "s_register_operand" "")
6600 (match_operand:SI 2 "nonmemory_operand" "")])
6601 (label_ref (match_operand 3 "" ""))
6602 (pc)))]
6603 "TARGET_THUMB1 || TARGET_32BIT"
6604 "
6605 if (!TARGET_THUMB1)
6606 {
6607 if (!arm_add_operand (operands[2], SImode))
6608 operands[2] = force_reg (SImode, operands[2]);
6609 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6610 operands[3]));
6611 DONE;
6612 }
6613 if (thumb1_cmpneg_operand (operands[2], SImode))
6614 {
6615 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6616 operands[3], operands[0]));
6617 DONE;
6618 }
6619 if (!thumb1_cmp_operand (operands[2], SImode))
6620 operands[2] = force_reg (SImode, operands[2]);
6621 ")
6622
6623 (define_expand "cbranchsf4"
6624 [(set (pc) (if_then_else
6625 (match_operator 0 "arm_comparison_operator"
6626 [(match_operand:SF 1 "s_register_operand" "")
6627 (match_operand:SF 2 "arm_float_compare_operand" "")])
6628 (label_ref (match_operand 3 "" ""))
6629 (pc)))]
6630 "TARGET_32BIT && TARGET_HARD_FLOAT"
6631 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6632 operands[3])); DONE;"
6633 )
6634
6635 (define_expand "cbranchdf4"
6636 [(set (pc) (if_then_else
6637 (match_operator 0 "arm_comparison_operator"
6638 [(match_operand:DF 1 "s_register_operand" "")
6639 (match_operand:DF 2 "arm_float_compare_operand" "")])
6640 (label_ref (match_operand 3 "" ""))
6641 (pc)))]
6642 "TARGET_32BIT && TARGET_HARD_FLOAT"
6643 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6644 operands[3])); DONE;"
6645 )
6646
6647 ;; this uses the Cirrus DI compare instruction
6648 (define_expand "cbranchdi4"
6649 [(set (pc) (if_then_else
6650 (match_operator 0 "arm_comparison_operator"
6651 [(match_operand:DI 1 "cirrus_fp_register" "")
6652 (match_operand:DI 2 "cirrus_fp_register" "")])
6653 (label_ref (match_operand 3 "" ""))
6654 (pc)))]
6655 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6656 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6657 operands[3])); DONE;"
6658 )
6659
6660 (define_insn "*cbranchsi4_insn"
6661 [(set (pc) (if_then_else
6662 (match_operator 0 "arm_comparison_operator"
6663 [(match_operand:SI 1 "s_register_operand" "l,*h")
6664 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6665 (label_ref (match_operand 3 "" ""))
6666 (pc)))]
6667 "TARGET_THUMB1"
6668 "*
6669 output_asm_insn (\"cmp\\t%1, %2\", operands);
6670
6671 switch (get_attr_length (insn))
6672 {
6673 case 4: return \"b%d0\\t%l3\";
6674 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6675 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6676 }
6677 "
6678 [(set (attr "far_jump")
6679 (if_then_else
6680 (eq_attr "length" "8")
6681 (const_string "yes")
6682 (const_string "no")))
6683 (set (attr "length")
6684 (if_then_else
6685 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6686 (le (minus (match_dup 3) (pc)) (const_int 256)))
6687 (const_int 4)
6688 (if_then_else
6689 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6690 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6691 (const_int 6)
6692 (const_int 8))))]
6693 )
6694
6695 (define_insn "cbranchsi4_scratch"
6696 [(set (pc) (if_then_else
6697 (match_operator 4 "arm_comparison_operator"
6698 [(match_operand:SI 1 "s_register_operand" "l,0")
6699 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6700 (label_ref (match_operand 3 "" ""))
6701 (pc)))
6702 (clobber (match_scratch:SI 0 "=l,l"))]
6703 "TARGET_THUMB1"
6704 "*
6705 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6706
6707 switch (get_attr_length (insn))
6708 {
6709 case 4: return \"b%d4\\t%l3\";
6710 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6711 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6712 }
6713 "
6714 [(set (attr "far_jump")
6715 (if_then_else
6716 (eq_attr "length" "8")
6717 (const_string "yes")
6718 (const_string "no")))
6719 (set (attr "length")
6720 (if_then_else
6721 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6722 (le (minus (match_dup 3) (pc)) (const_int 256)))
6723 (const_int 4)
6724 (if_then_else
6725 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6726 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6727 (const_int 6)
6728 (const_int 8))))]
6729 )
6730 (define_insn "*movsi_cbranchsi4"
6731 [(set (pc)
6732 (if_then_else
6733 (match_operator 3 "arm_comparison_operator"
6734 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6735 (const_int 0)])
6736 (label_ref (match_operand 2 "" ""))
6737 (pc)))
6738 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6739 (match_dup 1))]
6740 "TARGET_THUMB1"
6741 "*{
6742 if (which_alternative == 0)
6743 output_asm_insn (\"cmp\t%0, #0\", operands);
6744 else if (which_alternative == 1)
6745 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6746 else
6747 {
6748 output_asm_insn (\"cmp\t%1, #0\", operands);
6749 if (which_alternative == 2)
6750 output_asm_insn (\"mov\t%0, %1\", operands);
6751 else
6752 output_asm_insn (\"str\t%1, %0\", operands);
6753 }
6754 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6755 {
6756 case 4: return \"b%d3\\t%l2\";
6757 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6758 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6759 }
6760 }"
6761 [(set (attr "far_jump")
6762 (if_then_else
6763 (ior (and (gt (symbol_ref ("which_alternative"))
6764 (const_int 1))
6765 (eq_attr "length" "8"))
6766 (eq_attr "length" "10"))
6767 (const_string "yes")
6768 (const_string "no")))
6769 (set (attr "length")
6770 (if_then_else
6771 (le (symbol_ref ("which_alternative"))
6772 (const_int 1))
6773 (if_then_else
6774 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6775 (le (minus (match_dup 2) (pc)) (const_int 256)))
6776 (const_int 4)
6777 (if_then_else
6778 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6779 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6780 (const_int 6)
6781 (const_int 8)))
6782 (if_then_else
6783 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6784 (le (minus (match_dup 2) (pc)) (const_int 256)))
6785 (const_int 6)
6786 (if_then_else
6787 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6788 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6789 (const_int 8)
6790 (const_int 10)))))]
6791 )
6792
6793 (define_insn "*negated_cbranchsi4"
6794 [(set (pc)
6795 (if_then_else
6796 (match_operator 0 "equality_operator"
6797 [(match_operand:SI 1 "s_register_operand" "l")
6798 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6799 (label_ref (match_operand 3 "" ""))
6800 (pc)))]
6801 "TARGET_THUMB1"
6802 "*
6803 output_asm_insn (\"cmn\\t%1, %2\", operands);
6804 switch (get_attr_length (insn))
6805 {
6806 case 4: return \"b%d0\\t%l3\";
6807 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6808 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6809 }
6810 "
6811 [(set (attr "far_jump")
6812 (if_then_else
6813 (eq_attr "length" "8")
6814 (const_string "yes")
6815 (const_string "no")))
6816 (set (attr "length")
6817 (if_then_else
6818 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6819 (le (minus (match_dup 3) (pc)) (const_int 256)))
6820 (const_int 4)
6821 (if_then_else
6822 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6823 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6824 (const_int 6)
6825 (const_int 8))))]
6826 )
6827
6828 (define_insn "*tbit_cbranch"
6829 [(set (pc)
6830 (if_then_else
6831 (match_operator 0 "equality_operator"
6832 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6833 (const_int 1)
6834 (match_operand:SI 2 "const_int_operand" "i"))
6835 (const_int 0)])
6836 (label_ref (match_operand 3 "" ""))
6837 (pc)))
6838 (clobber (match_scratch:SI 4 "=l"))]
6839 "TARGET_THUMB1"
6840 "*
6841 {
6842 rtx op[3];
6843 op[0] = operands[4];
6844 op[1] = operands[1];
6845 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6846
6847 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6848 switch (get_attr_length (insn))
6849 {
6850 case 4: return \"b%d0\\t%l3\";
6851 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6852 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6853 }
6854 }"
6855 [(set (attr "far_jump")
6856 (if_then_else
6857 (eq_attr "length" "8")
6858 (const_string "yes")
6859 (const_string "no")))
6860 (set (attr "length")
6861 (if_then_else
6862 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6863 (le (minus (match_dup 3) (pc)) (const_int 256)))
6864 (const_int 4)
6865 (if_then_else
6866 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6867 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6868 (const_int 6)
6869 (const_int 8))))]
6870 )
6871
6872 (define_insn "*tlobits_cbranch"
6873 [(set (pc)
6874 (if_then_else
6875 (match_operator 0 "equality_operator"
6876 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6877 (match_operand:SI 2 "const_int_operand" "i")
6878 (const_int 0))
6879 (const_int 0)])
6880 (label_ref (match_operand 3 "" ""))
6881 (pc)))
6882 (clobber (match_scratch:SI 4 "=l"))]
6883 "TARGET_THUMB1"
6884 "*
6885 {
6886 rtx op[3];
6887 op[0] = operands[4];
6888 op[1] = operands[1];
6889 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6890
6891 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6892 switch (get_attr_length (insn))
6893 {
6894 case 4: return \"b%d0\\t%l3\";
6895 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6896 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6897 }
6898 }"
6899 [(set (attr "far_jump")
6900 (if_then_else
6901 (eq_attr "length" "8")
6902 (const_string "yes")
6903 (const_string "no")))
6904 (set (attr "length")
6905 (if_then_else
6906 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6907 (le (minus (match_dup 3) (pc)) (const_int 256)))
6908 (const_int 4)
6909 (if_then_else
6910 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6911 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6912 (const_int 6)
6913 (const_int 8))))]
6914 )
6915
6916 (define_insn "*tstsi3_cbranch"
6917 [(set (pc)
6918 (if_then_else
6919 (match_operator 3 "equality_operator"
6920 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6921 (match_operand:SI 1 "s_register_operand" "l"))
6922 (const_int 0)])
6923 (label_ref (match_operand 2 "" ""))
6924 (pc)))]
6925 "TARGET_THUMB1"
6926 "*
6927 {
6928 output_asm_insn (\"tst\\t%0, %1\", operands);
6929 switch (get_attr_length (insn))
6930 {
6931 case 4: return \"b%d3\\t%l2\";
6932 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6933 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6934 }
6935 }"
6936 [(set (attr "far_jump")
6937 (if_then_else
6938 (eq_attr "length" "8")
6939 (const_string "yes")
6940 (const_string "no")))
6941 (set (attr "length")
6942 (if_then_else
6943 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6944 (le (minus (match_dup 2) (pc)) (const_int 256)))
6945 (const_int 4)
6946 (if_then_else
6947 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6948 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6949 (const_int 6)
6950 (const_int 8))))]
6951 )
6952
6953 (define_insn "*andsi3_cbranch"
6954 [(set (pc)
6955 (if_then_else
6956 (match_operator 5 "equality_operator"
6957 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6958 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6959 (const_int 0)])
6960 (label_ref (match_operand 4 "" ""))
6961 (pc)))
6962 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6963 (and:SI (match_dup 2) (match_dup 3)))
6964 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6965 "TARGET_THUMB1"
6966 "*
6967 {
6968 if (which_alternative == 0)
6969 output_asm_insn (\"and\\t%0, %3\", operands);
6970 else if (which_alternative == 1)
6971 {
6972 output_asm_insn (\"and\\t%1, %3\", operands);
6973 output_asm_insn (\"mov\\t%0, %1\", operands);
6974 }
6975 else
6976 {
6977 output_asm_insn (\"and\\t%1, %3\", operands);
6978 output_asm_insn (\"str\\t%1, %0\", operands);
6979 }
6980
6981 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6982 {
6983 case 4: return \"b%d5\\t%l4\";
6984 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6985 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6986 }
6987 }"
6988 [(set (attr "far_jump")
6989 (if_then_else
6990 (ior (and (eq (symbol_ref ("which_alternative"))
6991 (const_int 0))
6992 (eq_attr "length" "8"))
6993 (eq_attr "length" "10"))
6994 (const_string "yes")
6995 (const_string "no")))
6996 (set (attr "length")
6997 (if_then_else
6998 (eq (symbol_ref ("which_alternative"))
6999 (const_int 0))
7000 (if_then_else
7001 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7002 (le (minus (match_dup 4) (pc)) (const_int 256)))
7003 (const_int 4)
7004 (if_then_else
7005 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7006 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7007 (const_int 6)
7008 (const_int 8)))
7009 (if_then_else
7010 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7011 (le (minus (match_dup 4) (pc)) (const_int 256)))
7012 (const_int 6)
7013 (if_then_else
7014 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7015 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7016 (const_int 8)
7017 (const_int 10)))))]
7018 )
7019
7020 (define_insn "*orrsi3_cbranch_scratch"
7021 [(set (pc)
7022 (if_then_else
7023 (match_operator 4 "equality_operator"
7024 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7025 (match_operand:SI 2 "s_register_operand" "l"))
7026 (const_int 0)])
7027 (label_ref (match_operand 3 "" ""))
7028 (pc)))
7029 (clobber (match_scratch:SI 0 "=l"))]
7030 "TARGET_THUMB1"
7031 "*
7032 {
7033 output_asm_insn (\"orr\\t%0, %2\", operands);
7034 switch (get_attr_length (insn))
7035 {
7036 case 4: return \"b%d4\\t%l3\";
7037 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7038 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7039 }
7040 }"
7041 [(set (attr "far_jump")
7042 (if_then_else
7043 (eq_attr "length" "8")
7044 (const_string "yes")
7045 (const_string "no")))
7046 (set (attr "length")
7047 (if_then_else
7048 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7049 (le (minus (match_dup 3) (pc)) (const_int 256)))
7050 (const_int 4)
7051 (if_then_else
7052 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7053 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7054 (const_int 6)
7055 (const_int 8))))]
7056 )
7057
7058 (define_insn "*orrsi3_cbranch"
7059 [(set (pc)
7060 (if_then_else
7061 (match_operator 5 "equality_operator"
7062 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7063 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7064 (const_int 0)])
7065 (label_ref (match_operand 4 "" ""))
7066 (pc)))
7067 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7068 (ior:SI (match_dup 2) (match_dup 3)))
7069 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7070 "TARGET_THUMB1"
7071 "*
7072 {
7073 if (which_alternative == 0)
7074 output_asm_insn (\"orr\\t%0, %3\", operands);
7075 else if (which_alternative == 1)
7076 {
7077 output_asm_insn (\"orr\\t%1, %3\", operands);
7078 output_asm_insn (\"mov\\t%0, %1\", operands);
7079 }
7080 else
7081 {
7082 output_asm_insn (\"orr\\t%1, %3\", operands);
7083 output_asm_insn (\"str\\t%1, %0\", operands);
7084 }
7085
7086 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7087 {
7088 case 4: return \"b%d5\\t%l4\";
7089 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7090 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7091 }
7092 }"
7093 [(set (attr "far_jump")
7094 (if_then_else
7095 (ior (and (eq (symbol_ref ("which_alternative"))
7096 (const_int 0))
7097 (eq_attr "length" "8"))
7098 (eq_attr "length" "10"))
7099 (const_string "yes")
7100 (const_string "no")))
7101 (set (attr "length")
7102 (if_then_else
7103 (eq (symbol_ref ("which_alternative"))
7104 (const_int 0))
7105 (if_then_else
7106 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7107 (le (minus (match_dup 4) (pc)) (const_int 256)))
7108 (const_int 4)
7109 (if_then_else
7110 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7111 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7112 (const_int 6)
7113 (const_int 8)))
7114 (if_then_else
7115 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7116 (le (minus (match_dup 4) (pc)) (const_int 256)))
7117 (const_int 6)
7118 (if_then_else
7119 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7120 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7121 (const_int 8)
7122 (const_int 10)))))]
7123 )
7124
7125 (define_insn "*xorsi3_cbranch_scratch"
7126 [(set (pc)
7127 (if_then_else
7128 (match_operator 4 "equality_operator"
7129 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7130 (match_operand:SI 2 "s_register_operand" "l"))
7131 (const_int 0)])
7132 (label_ref (match_operand 3 "" ""))
7133 (pc)))
7134 (clobber (match_scratch:SI 0 "=l"))]
7135 "TARGET_THUMB1"
7136 "*
7137 {
7138 output_asm_insn (\"eor\\t%0, %2\", operands);
7139 switch (get_attr_length (insn))
7140 {
7141 case 4: return \"b%d4\\t%l3\";
7142 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7143 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7144 }
7145 }"
7146 [(set (attr "far_jump")
7147 (if_then_else
7148 (eq_attr "length" "8")
7149 (const_string "yes")
7150 (const_string "no")))
7151 (set (attr "length")
7152 (if_then_else
7153 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7154 (le (minus (match_dup 3) (pc)) (const_int 256)))
7155 (const_int 4)
7156 (if_then_else
7157 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7158 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7159 (const_int 6)
7160 (const_int 8))))]
7161 )
7162
7163 (define_insn "*xorsi3_cbranch"
7164 [(set (pc)
7165 (if_then_else
7166 (match_operator 5 "equality_operator"
7167 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7168 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7169 (const_int 0)])
7170 (label_ref (match_operand 4 "" ""))
7171 (pc)))
7172 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7173 (xor:SI (match_dup 2) (match_dup 3)))
7174 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7175 "TARGET_THUMB1"
7176 "*
7177 {
7178 if (which_alternative == 0)
7179 output_asm_insn (\"eor\\t%0, %3\", operands);
7180 else if (which_alternative == 1)
7181 {
7182 output_asm_insn (\"eor\\t%1, %3\", operands);
7183 output_asm_insn (\"mov\\t%0, %1\", operands);
7184 }
7185 else
7186 {
7187 output_asm_insn (\"eor\\t%1, %3\", operands);
7188 output_asm_insn (\"str\\t%1, %0\", operands);
7189 }
7190
7191 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7192 {
7193 case 4: return \"b%d5\\t%l4\";
7194 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7195 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7196 }
7197 }"
7198 [(set (attr "far_jump")
7199 (if_then_else
7200 (ior (and (eq (symbol_ref ("which_alternative"))
7201 (const_int 0))
7202 (eq_attr "length" "8"))
7203 (eq_attr "length" "10"))
7204 (const_string "yes")
7205 (const_string "no")))
7206 (set (attr "length")
7207 (if_then_else
7208 (eq (symbol_ref ("which_alternative"))
7209 (const_int 0))
7210 (if_then_else
7211 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7212 (le (minus (match_dup 4) (pc)) (const_int 256)))
7213 (const_int 4)
7214 (if_then_else
7215 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7216 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7217 (const_int 6)
7218 (const_int 8)))
7219 (if_then_else
7220 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7221 (le (minus (match_dup 4) (pc)) (const_int 256)))
7222 (const_int 6)
7223 (if_then_else
7224 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7225 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7226 (const_int 8)
7227 (const_int 10)))))]
7228 )
7229
7230 (define_insn "*bicsi3_cbranch_scratch"
7231 [(set (pc)
7232 (if_then_else
7233 (match_operator 4 "equality_operator"
7234 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7235 (match_operand:SI 1 "s_register_operand" "0"))
7236 (const_int 0)])
7237 (label_ref (match_operand 3 "" ""))
7238 (pc)))
7239 (clobber (match_scratch:SI 0 "=l"))]
7240 "TARGET_THUMB1"
7241 "*
7242 {
7243 output_asm_insn (\"bic\\t%0, %2\", operands);
7244 switch (get_attr_length (insn))
7245 {
7246 case 4: return \"b%d4\\t%l3\";
7247 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7248 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7249 }
7250 }"
7251 [(set (attr "far_jump")
7252 (if_then_else
7253 (eq_attr "length" "8")
7254 (const_string "yes")
7255 (const_string "no")))
7256 (set (attr "length")
7257 (if_then_else
7258 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7259 (le (minus (match_dup 3) (pc)) (const_int 256)))
7260 (const_int 4)
7261 (if_then_else
7262 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7263 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7264 (const_int 6)
7265 (const_int 8))))]
7266 )
7267
7268 (define_insn "*bicsi3_cbranch"
7269 [(set (pc)
7270 (if_then_else
7271 (match_operator 5 "equality_operator"
7272 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7273 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7274 (const_int 0)])
7275 (label_ref (match_operand 4 "" ""))
7276 (pc)))
7277 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7278 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7279 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7280 "TARGET_THUMB1"
7281 "*
7282 {
7283 if (which_alternative == 0)
7284 output_asm_insn (\"bic\\t%0, %3\", operands);
7285 else if (which_alternative <= 2)
7286 {
7287 output_asm_insn (\"bic\\t%1, %3\", operands);
7288 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7289 conditions again, since we're only testing for equality. */
7290 output_asm_insn (\"mov\\t%0, %1\", operands);
7291 }
7292 else
7293 {
7294 output_asm_insn (\"bic\\t%1, %3\", operands);
7295 output_asm_insn (\"str\\t%1, %0\", operands);
7296 }
7297
7298 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7299 {
7300 case 4: return \"b%d5\\t%l4\";
7301 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7302 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7303 }
7304 }"
7305 [(set (attr "far_jump")
7306 (if_then_else
7307 (ior (and (eq (symbol_ref ("which_alternative"))
7308 (const_int 0))
7309 (eq_attr "length" "8"))
7310 (eq_attr "length" "10"))
7311 (const_string "yes")
7312 (const_string "no")))
7313 (set (attr "length")
7314 (if_then_else
7315 (eq (symbol_ref ("which_alternative"))
7316 (const_int 0))
7317 (if_then_else
7318 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7319 (le (minus (match_dup 4) (pc)) (const_int 256)))
7320 (const_int 4)
7321 (if_then_else
7322 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7323 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7324 (const_int 6)
7325 (const_int 8)))
7326 (if_then_else
7327 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7328 (le (minus (match_dup 4) (pc)) (const_int 256)))
7329 (const_int 6)
7330 (if_then_else
7331 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7332 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7333 (const_int 8)
7334 (const_int 10)))))]
7335 )
7336
7337 (define_insn "*cbranchne_decr1"
7338 [(set (pc)
7339 (if_then_else (match_operator 3 "equality_operator"
7340 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7341 (const_int 0)])
7342 (label_ref (match_operand 4 "" ""))
7343 (pc)))
7344 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7345 (plus:SI (match_dup 2) (const_int -1)))
7346 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7347 "TARGET_THUMB1"
7348 "*
7349 {
7350 rtx cond[2];
7351 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7352 ? GEU : LTU),
7353 VOIDmode, operands[2], const1_rtx);
7354 cond[1] = operands[4];
7355
7356 if (which_alternative == 0)
7357 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7358 else if (which_alternative == 1)
7359 {
7360 /* We must provide an alternative for a hi reg because reload
7361 cannot handle output reloads on a jump instruction, but we
7362 can't subtract into that. Fortunately a mov from lo to hi
7363 does not clobber the condition codes. */
7364 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7365 output_asm_insn (\"mov\\t%0, %1\", operands);
7366 }
7367 else
7368 {
7369 /* Similarly, but the target is memory. */
7370 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7371 output_asm_insn (\"str\\t%1, %0\", operands);
7372 }
7373
7374 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7375 {
7376 case 4:
7377 output_asm_insn (\"b%d0\\t%l1\", cond);
7378 return \"\";
7379 case 6:
7380 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7381 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7382 default:
7383 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7384 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7385 }
7386 }
7387 "
7388 [(set (attr "far_jump")
7389 (if_then_else
7390 (ior (and (eq (symbol_ref ("which_alternative"))
7391 (const_int 0))
7392 (eq_attr "length" "8"))
7393 (eq_attr "length" "10"))
7394 (const_string "yes")
7395 (const_string "no")))
7396 (set_attr_alternative "length"
7397 [
7398 ;; Alternative 0
7399 (if_then_else
7400 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7401 (le (minus (match_dup 4) (pc)) (const_int 256)))
7402 (const_int 4)
7403 (if_then_else
7404 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7405 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7406 (const_int 6)
7407 (const_int 8)))
7408 ;; Alternative 1
7409 (if_then_else
7410 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7411 (le (minus (match_dup 4) (pc)) (const_int 256)))
7412 (const_int 6)
7413 (if_then_else
7414 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7415 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7416 (const_int 8)
7417 (const_int 10)))
7418 ;; Alternative 2
7419 (if_then_else
7420 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7421 (le (minus (match_dup 4) (pc)) (const_int 256)))
7422 (const_int 6)
7423 (if_then_else
7424 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7425 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7426 (const_int 8)
7427 (const_int 10)))
7428 ;; Alternative 3
7429 (if_then_else
7430 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7431 (le (minus (match_dup 4) (pc)) (const_int 256)))
7432 (const_int 6)
7433 (if_then_else
7434 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7435 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7436 (const_int 8)
7437 (const_int 10)))])]
7438 )
7439
7440 (define_insn "*addsi3_cbranch"
7441 [(set (pc)
7442 (if_then_else
7443 (match_operator 4 "arm_comparison_operator"
7444 [(plus:SI
7445 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7446 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7447 (const_int 0)])
7448 (label_ref (match_operand 5 "" ""))
7449 (pc)))
7450 (set
7451 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7452 (plus:SI (match_dup 2) (match_dup 3)))
7453 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7454 "TARGET_THUMB1
7455 && (GET_CODE (operands[4]) == EQ
7456 || GET_CODE (operands[4]) == NE
7457 || GET_CODE (operands[4]) == GE
7458 || GET_CODE (operands[4]) == LT)"
7459 "*
7460 {
7461 rtx cond[3];
7462
7463
7464 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7465 cond[1] = operands[2];
7466 cond[2] = operands[3];
7467
7468 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7469 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7470 else
7471 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7472
7473 if (which_alternative >= 3
7474 && which_alternative < 4)
7475 output_asm_insn (\"mov\\t%0, %1\", operands);
7476 else if (which_alternative >= 4)
7477 output_asm_insn (\"str\\t%1, %0\", operands);
7478
7479 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7480 {
7481 case 4:
7482 return \"b%d4\\t%l5\";
7483 case 6:
7484 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7485 default:
7486 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7487 }
7488 }
7489 "
7490 [(set (attr "far_jump")
7491 (if_then_else
7492 (ior (and (lt (symbol_ref ("which_alternative"))
7493 (const_int 3))
7494 (eq_attr "length" "8"))
7495 (eq_attr "length" "10"))
7496 (const_string "yes")
7497 (const_string "no")))
7498 (set (attr "length")
7499 (if_then_else
7500 (lt (symbol_ref ("which_alternative"))
7501 (const_int 3))
7502 (if_then_else
7503 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7504 (le (minus (match_dup 5) (pc)) (const_int 256)))
7505 (const_int 4)
7506 (if_then_else
7507 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7508 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7509 (const_int 6)
7510 (const_int 8)))
7511 (if_then_else
7512 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7513 (le (minus (match_dup 5) (pc)) (const_int 256)))
7514 (const_int 6)
7515 (if_then_else
7516 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7517 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7518 (const_int 8)
7519 (const_int 10)))))]
7520 )
7521
7522 (define_insn "*addsi3_cbranch_scratch"
7523 [(set (pc)
7524 (if_then_else
7525 (match_operator 3 "arm_comparison_operator"
7526 [(plus:SI
7527 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7528 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7529 (const_int 0)])
7530 (label_ref (match_operand 4 "" ""))
7531 (pc)))
7532 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7533 "TARGET_THUMB1
7534 && (GET_CODE (operands[3]) == EQ
7535 || GET_CODE (operands[3]) == NE
7536 || GET_CODE (operands[3]) == GE
7537 || GET_CODE (operands[3]) == LT)"
7538 "*
7539 {
7540 switch (which_alternative)
7541 {
7542 case 0:
7543 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7544 break;
7545 case 1:
7546 output_asm_insn (\"cmn\t%1, %2\", operands);
7547 break;
7548 case 2:
7549 if (INTVAL (operands[2]) < 0)
7550 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7551 else
7552 output_asm_insn (\"add\t%0, %1, %2\", operands);
7553 break;
7554 case 3:
7555 if (INTVAL (operands[2]) < 0)
7556 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7557 else
7558 output_asm_insn (\"add\t%0, %0, %2\", operands);
7559 break;
7560 }
7561
7562 switch (get_attr_length (insn))
7563 {
7564 case 4:
7565 return \"b%d3\\t%l4\";
7566 case 6:
7567 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7568 default:
7569 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7570 }
7571 }
7572 "
7573 [(set (attr "far_jump")
7574 (if_then_else
7575 (eq_attr "length" "8")
7576 (const_string "yes")
7577 (const_string "no")))
7578 (set (attr "length")
7579 (if_then_else
7580 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7581 (le (minus (match_dup 4) (pc)) (const_int 256)))
7582 (const_int 4)
7583 (if_then_else
7584 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7585 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7586 (const_int 6)
7587 (const_int 8))))]
7588 )
7589
7590 (define_insn "*subsi3_cbranch"
7591 [(set (pc)
7592 (if_then_else
7593 (match_operator 4 "arm_comparison_operator"
7594 [(minus:SI
7595 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7596 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7597 (const_int 0)])
7598 (label_ref (match_operand 5 "" ""))
7599 (pc)))
7600 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7601 (minus:SI (match_dup 2) (match_dup 3)))
7602 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7603 "TARGET_THUMB1
7604 && (GET_CODE (operands[4]) == EQ
7605 || GET_CODE (operands[4]) == NE
7606 || GET_CODE (operands[4]) == GE
7607 || GET_CODE (operands[4]) == LT)"
7608 "*
7609 {
7610 if (which_alternative == 0)
7611 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7612 else if (which_alternative == 1)
7613 {
7614 /* We must provide an alternative for a hi reg because reload
7615 cannot handle output reloads on a jump instruction, but we
7616 can't subtract into that. Fortunately a mov from lo to hi
7617 does not clobber the condition codes. */
7618 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7619 output_asm_insn (\"mov\\t%0, %1\", operands);
7620 }
7621 else
7622 {
7623 /* Similarly, but the target is memory. */
7624 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7625 output_asm_insn (\"str\\t%1, %0\", operands);
7626 }
7627
7628 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7629 {
7630 case 4:
7631 return \"b%d4\\t%l5\";
7632 case 6:
7633 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7634 default:
7635 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7636 }
7637 }
7638 "
7639 [(set (attr "far_jump")
7640 (if_then_else
7641 (ior (and (eq (symbol_ref ("which_alternative"))
7642 (const_int 0))
7643 (eq_attr "length" "8"))
7644 (eq_attr "length" "10"))
7645 (const_string "yes")
7646 (const_string "no")))
7647 (set (attr "length")
7648 (if_then_else
7649 (eq (symbol_ref ("which_alternative"))
7650 (const_int 0))
7651 (if_then_else
7652 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7653 (le (minus (match_dup 5) (pc)) (const_int 256)))
7654 (const_int 4)
7655 (if_then_else
7656 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7657 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7658 (const_int 6)
7659 (const_int 8)))
7660 (if_then_else
7661 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7662 (le (minus (match_dup 5) (pc)) (const_int 256)))
7663 (const_int 6)
7664 (if_then_else
7665 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7666 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7667 (const_int 8)
7668 (const_int 10)))))]
7669 )
7670
7671 (define_insn "*subsi3_cbranch_scratch"
7672 [(set (pc)
7673 (if_then_else
7674 (match_operator 0 "arm_comparison_operator"
7675 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7676 (match_operand:SI 2 "nonmemory_operand" "l"))
7677 (const_int 0)])
7678 (label_ref (match_operand 3 "" ""))
7679 (pc)))]
7680 "TARGET_THUMB1
7681 && (GET_CODE (operands[0]) == EQ
7682 || GET_CODE (operands[0]) == NE
7683 || GET_CODE (operands[0]) == GE
7684 || GET_CODE (operands[0]) == LT)"
7685 "*
7686 output_asm_insn (\"cmp\\t%1, %2\", operands);
7687 switch (get_attr_length (insn))
7688 {
7689 case 4: return \"b%d0\\t%l3\";
7690 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7691 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7692 }
7693 "
7694 [(set (attr "far_jump")
7695 (if_then_else
7696 (eq_attr "length" "8")
7697 (const_string "yes")
7698 (const_string "no")))
7699 (set (attr "length")
7700 (if_then_else
7701 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7702 (le (minus (match_dup 3) (pc)) (const_int 256)))
7703 (const_int 4)
7704 (if_then_else
7705 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7706 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7707 (const_int 6)
7708 (const_int 8))))]
7709 )
7710
7711 ;; Comparison and test insns
7712
7713 (define_insn "*arm_cmpsi_insn"
7714 [(set (reg:CC CC_REGNUM)
7715 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7716 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7717 "TARGET_32BIT"
7718 "@
7719 cmp%?\\t%0, %1
7720 cmn%?\\t%0, #%n1"
7721 [(set_attr "conds" "set")]
7722 )
7723
7724 (define_insn "*arm_cmpsi_shiftsi"
7725 [(set (reg:CC CC_REGNUM)
7726 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7727 (match_operator:SI 3 "shift_operator"
7728 [(match_operand:SI 1 "s_register_operand" "r")
7729 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7730 "TARGET_ARM"
7731 "cmp%?\\t%0, %1%S3"
7732 [(set_attr "conds" "set")
7733 (set_attr "shift" "1")
7734 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7735 (const_string "alu_shift")
7736 (const_string "alu_shift_reg")))]
7737 )
7738
7739 (define_insn "*arm_cmpsi_shiftsi_swp"
7740 [(set (reg:CC_SWP CC_REGNUM)
7741 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7742 [(match_operand:SI 1 "s_register_operand" "r")
7743 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7744 (match_operand:SI 0 "s_register_operand" "r")))]
7745 "TARGET_ARM"
7746 "cmp%?\\t%0, %1%S3"
7747 [(set_attr "conds" "set")
7748 (set_attr "shift" "1")
7749 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7750 (const_string "alu_shift")
7751 (const_string "alu_shift_reg")))]
7752 )
7753
7754 (define_insn "*arm_cmpsi_negshiftsi_si"
7755 [(set (reg:CC_Z CC_REGNUM)
7756 (compare:CC_Z
7757 (neg:SI (match_operator:SI 1 "shift_operator"
7758 [(match_operand:SI 2 "s_register_operand" "r")
7759 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7760 (match_operand:SI 0 "s_register_operand" "r")))]
7761 "TARGET_ARM"
7762 "cmn%?\\t%0, %2%S1"
7763 [(set_attr "conds" "set")
7764 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7765 (const_string "alu_shift")
7766 (const_string "alu_shift_reg")))]
7767 )
7768
7769 ;; Cirrus SF compare instruction
7770 (define_insn "*cirrus_cmpsf"
7771 [(set (reg:CCFP CC_REGNUM)
7772 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7773 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7774 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7775 "cfcmps%?\\tr15, %V0, %V1"
7776 [(set_attr "type" "mav_farith")
7777 (set_attr "cirrus" "compare")]
7778 )
7779
7780 ;; Cirrus DF compare instruction
7781 (define_insn "*cirrus_cmpdf"
7782 [(set (reg:CCFP CC_REGNUM)
7783 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7784 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7785 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7786 "cfcmpd%?\\tr15, %V0, %V1"
7787 [(set_attr "type" "mav_farith")
7788 (set_attr "cirrus" "compare")]
7789 )
7790
7791 (define_insn "*cirrus_cmpdi"
7792 [(set (reg:CC CC_REGNUM)
7793 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7794 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7795 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7796 "cfcmp64%?\\tr15, %V0, %V1"
7797 [(set_attr "type" "mav_farith")
7798 (set_attr "cirrus" "compare")]
7799 )
7800
7801 ; This insn allows redundant compares to be removed by cse, nothing should
7802 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7803 ; is deleted later on. The match_dup will match the mode here, so that
7804 ; mode changes of the condition codes aren't lost by this even though we don't
7805 ; specify what they are.
7806
7807 (define_insn "*deleted_compare"
7808 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7809 "TARGET_32BIT"
7810 "\\t%@ deleted compare"
7811 [(set_attr "conds" "set")
7812 (set_attr "length" "0")]
7813 )
7814
7815 \f
7816 ;; Conditional branch insns
7817
7818 (define_expand "cbranch_cc"
7819 [(set (pc)
7820 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7821 (match_operand 2 "" "")])
7822 (label_ref (match_operand 3 "" ""))
7823 (pc)))]
7824 "TARGET_32BIT"
7825 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7826 operands[1], operands[2]);
7827 operands[2] = const0_rtx;"
7828 )
7829
7830 ;;
7831 ;; Patterns to match conditional branch insns.
7832 ;;
7833
7834 (define_insn "*arm_cond_branch"
7835 [(set (pc)
7836 (if_then_else (match_operator 1 "arm_comparison_operator"
7837 [(match_operand 2 "cc_register" "") (const_int 0)])
7838 (label_ref (match_operand 0 "" ""))
7839 (pc)))]
7840 "TARGET_32BIT"
7841 "*
7842 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7843 {
7844 arm_ccfsm_state += 2;
7845 return \"\";
7846 }
7847 return \"b%d1\\t%l0\";
7848 "
7849 [(set_attr "conds" "use")
7850 (set_attr "type" "branch")]
7851 )
7852
7853 (define_insn "*arm_cond_branch_reversed"
7854 [(set (pc)
7855 (if_then_else (match_operator 1 "arm_comparison_operator"
7856 [(match_operand 2 "cc_register" "") (const_int 0)])
7857 (pc)
7858 (label_ref (match_operand 0 "" ""))))]
7859 "TARGET_32BIT"
7860 "*
7861 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7862 {
7863 arm_ccfsm_state += 2;
7864 return \"\";
7865 }
7866 return \"b%D1\\t%l0\";
7867 "
7868 [(set_attr "conds" "use")
7869 (set_attr "type" "branch")]
7870 )
7871
7872 \f
7873
7874 ; scc insns
7875
7876 (define_expand "cstore_cc"
7877 [(set (match_operand:SI 0 "s_register_operand" "")
7878 (match_operator:SI 1 "" [(match_operand 2 "" "")
7879 (match_operand 3 "" "")]))]
7880 "TARGET_32BIT"
7881 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7882 operands[2], operands[3]);
7883 operands[3] = const0_rtx;"
7884 )
7885
7886 (define_insn "*mov_scc"
7887 [(set (match_operand:SI 0 "s_register_operand" "=r")
7888 (match_operator:SI 1 "arm_comparison_operator"
7889 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7890 "TARGET_ARM"
7891 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7892 [(set_attr "conds" "use")
7893 (set_attr "length" "8")]
7894 )
7895
7896 (define_insn "*mov_negscc"
7897 [(set (match_operand:SI 0 "s_register_operand" "=r")
7898 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7899 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7900 "TARGET_ARM"
7901 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7902 [(set_attr "conds" "use")
7903 (set_attr "length" "8")]
7904 )
7905
7906 (define_insn "*mov_notscc"
7907 [(set (match_operand:SI 0 "s_register_operand" "=r")
7908 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7909 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7910 "TARGET_ARM"
7911 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7912 [(set_attr "conds" "use")
7913 (set_attr "length" "8")]
7914 )
7915
7916 (define_expand "cstoresi4"
7917 [(set (match_operand:SI 0 "s_register_operand" "")
7918 (match_operator:SI 1 "arm_comparison_operator"
7919 [(match_operand:SI 2 "s_register_operand" "")
7920 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7921 "TARGET_32BIT || TARGET_THUMB1"
7922 "{
7923 rtx op3, scratch, scratch2;
7924
7925 if (!TARGET_THUMB1)
7926 {
7927 if (!arm_add_operand (operands[3], SImode))
7928 operands[3] = force_reg (SImode, operands[3]);
7929 emit_insn (gen_cstore_cc (operands[0], operands[1],
7930 operands[2], operands[3]));
7931 DONE;
7932 }
7933
7934 if (operands[3] == const0_rtx)
7935 {
7936 switch (GET_CODE (operands[1]))
7937 {
7938 case EQ:
7939 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7940 break;
7941
7942 case NE:
7943 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7944 break;
7945
7946 case LE:
7947 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7948 NULL_RTX, 0, OPTAB_WIDEN);
7949 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7950 NULL_RTX, 0, OPTAB_WIDEN);
7951 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7952 operands[0], 1, OPTAB_WIDEN);
7953 break;
7954
7955 case GE:
7956 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7957 NULL_RTX, 1);
7958 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7959 NULL_RTX, 1, OPTAB_WIDEN);
7960 break;
7961
7962 case GT:
7963 scratch = expand_binop (SImode, ashr_optab, operands[2],
7964 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7965 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7966 NULL_RTX, 0, OPTAB_WIDEN);
7967 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7968 0, OPTAB_WIDEN);
7969 break;
7970
7971 /* LT is handled by generic code. No need for unsigned with 0. */
7972 default:
7973 FAIL;
7974 }
7975 DONE;
7976 }
7977
7978 switch (GET_CODE (operands[1]))
7979 {
7980 case EQ:
7981 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7982 NULL_RTX, 0, OPTAB_WIDEN);
7983 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7984 break;
7985
7986 case NE:
7987 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7988 NULL_RTX, 0, OPTAB_WIDEN);
7989 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7990 break;
7991
7992 case LE:
7993 op3 = force_reg (SImode, operands[3]);
7994
7995 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7996 NULL_RTX, 1, OPTAB_WIDEN);
7997 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7998 NULL_RTX, 0, OPTAB_WIDEN);
7999 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8000 op3, operands[2]));
8001 break;
8002
8003 case GE:
8004 op3 = operands[3];
8005 if (!thumb1_cmp_operand (op3, SImode))
8006 op3 = force_reg (SImode, op3);
8007 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8008 NULL_RTX, 0, OPTAB_WIDEN);
8009 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8010 NULL_RTX, 1, OPTAB_WIDEN);
8011 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8012 operands[2], op3));
8013 break;
8014
8015 case LEU:
8016 op3 = force_reg (SImode, operands[3]);
8017 scratch = force_reg (SImode, const0_rtx);
8018 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8019 op3, operands[2]));
8020 break;
8021
8022 case GEU:
8023 op3 = operands[3];
8024 if (!thumb1_cmp_operand (op3, SImode))
8025 op3 = force_reg (SImode, op3);
8026 scratch = force_reg (SImode, const0_rtx);
8027 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8028 operands[2], op3));
8029 break;
8030
8031 case LTU:
8032 op3 = operands[3];
8033 if (!thumb1_cmp_operand (op3, SImode))
8034 op3 = force_reg (SImode, op3);
8035 scratch = gen_reg_rtx (SImode);
8036 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8037 emit_insn (gen_negsi2 (operands[0], scratch));
8038 break;
8039
8040 case GTU:
8041 op3 = force_reg (SImode, operands[3]);
8042 scratch = gen_reg_rtx (SImode);
8043 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8044 emit_insn (gen_negsi2 (operands[0], scratch));
8045 break;
8046
8047 /* No good sequences for GT, LT. */
8048 default:
8049 FAIL;
8050 }
8051 DONE;
8052 }")
8053
8054 (define_expand "cstoresf4"
8055 [(set (match_operand:SI 0 "s_register_operand" "")
8056 (match_operator:SI 1 "arm_comparison_operator"
8057 [(match_operand:SF 2 "s_register_operand" "")
8058 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8059 "TARGET_32BIT && TARGET_HARD_FLOAT"
8060 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8061 operands[2], operands[3])); DONE;"
8062 )
8063
8064 (define_expand "cstoredf4"
8065 [(set (match_operand:SI 0 "s_register_operand" "")
8066 (match_operator:SI 1 "arm_comparison_operator"
8067 [(match_operand:DF 2 "s_register_operand" "")
8068 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8069 "TARGET_32BIT && TARGET_HARD_FLOAT"
8070 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8071 operands[2], operands[3])); DONE;"
8072 )
8073
8074 ;; this uses the Cirrus DI compare instruction
8075 (define_expand "cstoredi4"
8076 [(set (match_operand:SI 0 "s_register_operand" "")
8077 (match_operator:SI 1 "arm_comparison_operator"
8078 [(match_operand:DI 2 "cirrus_fp_register" "")
8079 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8080 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8081 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8082 operands[2], operands[3])); DONE;"
8083 )
8084
8085
8086 (define_expand "cstoresi_eq0_thumb1"
8087 [(parallel
8088 [(set (match_operand:SI 0 "s_register_operand" "")
8089 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8090 (const_int 0)))
8091 (clobber (match_dup:SI 2))])]
8092 "TARGET_THUMB1"
8093 "operands[2] = gen_reg_rtx (SImode);"
8094 )
8095
8096 (define_expand "cstoresi_ne0_thumb1"
8097 [(parallel
8098 [(set (match_operand:SI 0 "s_register_operand" "")
8099 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8100 (const_int 0)))
8101 (clobber (match_dup:SI 2))])]
8102 "TARGET_THUMB1"
8103 "operands[2] = gen_reg_rtx (SImode);"
8104 )
8105
8106 (define_insn "*cstoresi_eq0_thumb1_insn"
8107 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8108 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8109 (const_int 0)))
8110 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8111 "TARGET_THUMB1"
8112 "@
8113 neg\\t%0, %1\;adc\\t%0, %0, %1
8114 neg\\t%2, %1\;adc\\t%0, %1, %2"
8115 [(set_attr "length" "4")]
8116 )
8117
8118 (define_insn "*cstoresi_ne0_thumb1_insn"
8119 [(set (match_operand:SI 0 "s_register_operand" "=l")
8120 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8121 (const_int 0)))
8122 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8123 "TARGET_THUMB1"
8124 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8125 [(set_attr "length" "4")]
8126 )
8127
8128 (define_insn "cstoresi_nltu_thumb1"
8129 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8130 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8131 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8132 "TARGET_THUMB1"
8133 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8134 [(set_attr "length" "4")]
8135 )
8136
8137 ;; Used as part of the expansion of thumb les sequence.
8138 (define_insn "thumb1_addsi3_addgeu"
8139 [(set (match_operand:SI 0 "s_register_operand" "=l")
8140 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8141 (match_operand:SI 2 "s_register_operand" "l"))
8142 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8143 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8144 "TARGET_THUMB1"
8145 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8146 [(set_attr "length" "4")]
8147 )
8148
8149 \f
8150 ;; Conditional move insns
8151
8152 (define_expand "movsicc"
8153 [(set (match_operand:SI 0 "s_register_operand" "")
8154 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8155 (match_operand:SI 2 "arm_not_operand" "")
8156 (match_operand:SI 3 "arm_not_operand" "")))]
8157 "TARGET_32BIT"
8158 "
8159 {
8160 enum rtx_code code = GET_CODE (operands[1]);
8161 rtx ccreg;
8162
8163 if (code == UNEQ || code == LTGT)
8164 FAIL;
8165
8166 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8167 XEXP (operands[1], 1));
8168 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8169 }"
8170 )
8171
8172 (define_expand "movsfcc"
8173 [(set (match_operand:SF 0 "s_register_operand" "")
8174 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8175 (match_operand:SF 2 "s_register_operand" "")
8176 (match_operand:SF 3 "nonmemory_operand" "")))]
8177 "TARGET_32BIT && TARGET_HARD_FLOAT"
8178 "
8179 {
8180 enum rtx_code code = GET_CODE (operands[1]);
8181 rtx ccreg;
8182
8183 if (code == UNEQ || code == LTGT)
8184 FAIL;
8185
8186 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8187 Otherwise, ensure it is a valid FP add operand */
8188 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8189 || (!arm_float_add_operand (operands[3], SFmode)))
8190 operands[3] = force_reg (SFmode, operands[3]);
8191
8192 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8193 XEXP (operands[1], 1));
8194 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8195 }"
8196 )
8197
8198 (define_expand "movdfcc"
8199 [(set (match_operand:DF 0 "s_register_operand" "")
8200 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8201 (match_operand:DF 2 "s_register_operand" "")
8202 (match_operand:DF 3 "arm_float_add_operand" "")))]
8203 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8204 "
8205 {
8206 enum rtx_code code = GET_CODE (operands[1]);
8207 rtx ccreg;
8208
8209 if (code == UNEQ || code == LTGT)
8210 FAIL;
8211
8212 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8213 XEXP (operands[1], 1));
8214 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8215 }"
8216 )
8217
8218 (define_insn "*movsicc_insn"
8219 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8220 (if_then_else:SI
8221 (match_operator 3 "arm_comparison_operator"
8222 [(match_operand 4 "cc_register" "") (const_int 0)])
8223 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8224 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8225 "TARGET_ARM"
8226 "@
8227 mov%D3\\t%0, %2
8228 mvn%D3\\t%0, #%B2
8229 mov%d3\\t%0, %1
8230 mvn%d3\\t%0, #%B1
8231 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8232 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8233 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8234 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8235 [(set_attr "length" "4,4,4,4,8,8,8,8")
8236 (set_attr "conds" "use")]
8237 )
8238
8239 (define_insn "*movsfcc_soft_insn"
8240 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8241 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8242 [(match_operand 4 "cc_register" "") (const_int 0)])
8243 (match_operand:SF 1 "s_register_operand" "0,r")
8244 (match_operand:SF 2 "s_register_operand" "r,0")))]
8245 "TARGET_ARM && TARGET_SOFT_FLOAT"
8246 "@
8247 mov%D3\\t%0, %2
8248 mov%d3\\t%0, %1"
8249 [(set_attr "conds" "use")]
8250 )
8251
8252 \f
8253 ;; Jump and linkage insns
8254
8255 (define_expand "jump"
8256 [(set (pc)
8257 (label_ref (match_operand 0 "" "")))]
8258 "TARGET_EITHER"
8259 ""
8260 )
8261
8262 (define_insn "*arm_jump"
8263 [(set (pc)
8264 (label_ref (match_operand 0 "" "")))]
8265 "TARGET_32BIT"
8266 "*
8267 {
8268 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8269 {
8270 arm_ccfsm_state += 2;
8271 return \"\";
8272 }
8273 return \"b%?\\t%l0\";
8274 }
8275 "
8276 [(set_attr "predicable" "yes")]
8277 )
8278
8279 (define_insn "*thumb_jump"
8280 [(set (pc)
8281 (label_ref (match_operand 0 "" "")))]
8282 "TARGET_THUMB1"
8283 "*
8284 if (get_attr_length (insn) == 2)
8285 return \"b\\t%l0\";
8286 return \"bl\\t%l0\\t%@ far jump\";
8287 "
8288 [(set (attr "far_jump")
8289 (if_then_else
8290 (eq_attr "length" "4")
8291 (const_string "yes")
8292 (const_string "no")))
8293 (set (attr "length")
8294 (if_then_else
8295 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8296 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8297 (const_int 2)
8298 (const_int 4)))]
8299 )
8300
8301 (define_expand "call"
8302 [(parallel [(call (match_operand 0 "memory_operand" "")
8303 (match_operand 1 "general_operand" ""))
8304 (use (match_operand 2 "" ""))
8305 (clobber (reg:SI LR_REGNUM))])]
8306 "TARGET_EITHER"
8307 "
8308 {
8309 rtx callee, pat;
8310
8311 /* In an untyped call, we can get NULL for operand 2. */
8312 if (operands[2] == NULL_RTX)
8313 operands[2] = const0_rtx;
8314
8315 /* Decide if we should generate indirect calls by loading the
8316 32-bit address of the callee into a register before performing the
8317 branch and link. */
8318 callee = XEXP (operands[0], 0);
8319 if (GET_CODE (callee) == SYMBOL_REF
8320 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8321 : !REG_P (callee))
8322 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8323
8324 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8325 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8326 DONE;
8327 }"
8328 )
8329
8330 (define_expand "call_internal"
8331 [(parallel [(call (match_operand 0 "memory_operand" "")
8332 (match_operand 1 "general_operand" ""))
8333 (use (match_operand 2 "" ""))
8334 (clobber (reg:SI LR_REGNUM))])])
8335
8336 (define_insn "*call_reg_armv5"
8337 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8338 (match_operand 1 "" ""))
8339 (use (match_operand 2 "" ""))
8340 (clobber (reg:SI LR_REGNUM))]
8341 "TARGET_ARM && arm_arch5"
8342 "blx%?\\t%0"
8343 [(set_attr "type" "call")]
8344 )
8345
8346 (define_insn "*call_reg_arm"
8347 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8348 (match_operand 1 "" ""))
8349 (use (match_operand 2 "" ""))
8350 (clobber (reg:SI LR_REGNUM))]
8351 "TARGET_ARM && !arm_arch5"
8352 "*
8353 return output_call (operands);
8354 "
8355 ;; length is worst case, normally it is only two
8356 [(set_attr "length" "12")
8357 (set_attr "type" "call")]
8358 )
8359
8360 (define_insn "*call_mem"
8361 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8362 (match_operand 1 "" ""))
8363 (use (match_operand 2 "" ""))
8364 (clobber (reg:SI LR_REGNUM))]
8365 "TARGET_ARM"
8366 "*
8367 return output_call_mem (operands);
8368 "
8369 [(set_attr "length" "12")
8370 (set_attr "type" "call")]
8371 )
8372
8373 (define_insn "*call_reg_thumb1_v5"
8374 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8375 (match_operand 1 "" ""))
8376 (use (match_operand 2 "" ""))
8377 (clobber (reg:SI LR_REGNUM))]
8378 "TARGET_THUMB1 && arm_arch5"
8379 "blx\\t%0"
8380 [(set_attr "length" "2")
8381 (set_attr "type" "call")]
8382 )
8383
8384 (define_insn "*call_reg_thumb1"
8385 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8386 (match_operand 1 "" ""))
8387 (use (match_operand 2 "" ""))
8388 (clobber (reg:SI LR_REGNUM))]
8389 "TARGET_THUMB1 && !arm_arch5"
8390 "*
8391 {
8392 if (!TARGET_CALLER_INTERWORKING)
8393 return thumb_call_via_reg (operands[0]);
8394 else if (operands[1] == const0_rtx)
8395 return \"bl\\t%__interwork_call_via_%0\";
8396 else if (frame_pointer_needed)
8397 return \"bl\\t%__interwork_r7_call_via_%0\";
8398 else
8399 return \"bl\\t%__interwork_r11_call_via_%0\";
8400 }"
8401 [(set_attr "type" "call")]
8402 )
8403
8404 (define_expand "call_value"
8405 [(parallel [(set (match_operand 0 "" "")
8406 (call (match_operand 1 "memory_operand" "")
8407 (match_operand 2 "general_operand" "")))
8408 (use (match_operand 3 "" ""))
8409 (clobber (reg:SI LR_REGNUM))])]
8410 "TARGET_EITHER"
8411 "
8412 {
8413 rtx pat, callee;
8414
8415 /* In an untyped call, we can get NULL for operand 2. */
8416 if (operands[3] == 0)
8417 operands[3] = const0_rtx;
8418
8419 /* Decide if we should generate indirect calls by loading the
8420 32-bit address of the callee into a register before performing the
8421 branch and link. */
8422 callee = XEXP (operands[1], 0);
8423 if (GET_CODE (callee) == SYMBOL_REF
8424 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8425 : !REG_P (callee))
8426 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8427
8428 pat = gen_call_value_internal (operands[0], operands[1],
8429 operands[2], operands[3]);
8430 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8431 DONE;
8432 }"
8433 )
8434
8435 (define_expand "call_value_internal"
8436 [(parallel [(set (match_operand 0 "" "")
8437 (call (match_operand 1 "memory_operand" "")
8438 (match_operand 2 "general_operand" "")))
8439 (use (match_operand 3 "" ""))
8440 (clobber (reg:SI LR_REGNUM))])])
8441
8442 (define_insn "*call_value_reg_armv5"
8443 [(set (match_operand 0 "" "")
8444 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8445 (match_operand 2 "" "")))
8446 (use (match_operand 3 "" ""))
8447 (clobber (reg:SI LR_REGNUM))]
8448 "TARGET_ARM && arm_arch5"
8449 "blx%?\\t%1"
8450 [(set_attr "type" "call")]
8451 )
8452
8453 (define_insn "*call_value_reg_arm"
8454 [(set (match_operand 0 "" "")
8455 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8456 (match_operand 2 "" "")))
8457 (use (match_operand 3 "" ""))
8458 (clobber (reg:SI LR_REGNUM))]
8459 "TARGET_ARM && !arm_arch5"
8460 "*
8461 return output_call (&operands[1]);
8462 "
8463 [(set_attr "length" "12")
8464 (set_attr "type" "call")]
8465 )
8466
8467 (define_insn "*call_value_mem"
8468 [(set (match_operand 0 "" "")
8469 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8470 (match_operand 2 "" "")))
8471 (use (match_operand 3 "" ""))
8472 (clobber (reg:SI LR_REGNUM))]
8473 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8474 "*
8475 return output_call_mem (&operands[1]);
8476 "
8477 [(set_attr "length" "12")
8478 (set_attr "type" "call")]
8479 )
8480
8481 (define_insn "*call_value_reg_thumb1_v5"
8482 [(set (match_operand 0 "" "")
8483 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8484 (match_operand 2 "" "")))
8485 (use (match_operand 3 "" ""))
8486 (clobber (reg:SI LR_REGNUM))]
8487 "TARGET_THUMB1 && arm_arch5"
8488 "blx\\t%1"
8489 [(set_attr "length" "2")
8490 (set_attr "type" "call")]
8491 )
8492
8493 (define_insn "*call_value_reg_thumb1"
8494 [(set (match_operand 0 "" "")
8495 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8496 (match_operand 2 "" "")))
8497 (use (match_operand 3 "" ""))
8498 (clobber (reg:SI LR_REGNUM))]
8499 "TARGET_THUMB1 && !arm_arch5"
8500 "*
8501 {
8502 if (!TARGET_CALLER_INTERWORKING)
8503 return thumb_call_via_reg (operands[1]);
8504 else if (operands[2] == const0_rtx)
8505 return \"bl\\t%__interwork_call_via_%1\";
8506 else if (frame_pointer_needed)
8507 return \"bl\\t%__interwork_r7_call_via_%1\";
8508 else
8509 return \"bl\\t%__interwork_r11_call_via_%1\";
8510 }"
8511 [(set_attr "type" "call")]
8512 )
8513
8514 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8515 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8516
8517 (define_insn "*call_symbol"
8518 [(call (mem:SI (match_operand:SI 0 "" ""))
8519 (match_operand 1 "" ""))
8520 (use (match_operand 2 "" ""))
8521 (clobber (reg:SI LR_REGNUM))]
8522 "TARGET_ARM
8523 && (GET_CODE (operands[0]) == SYMBOL_REF)
8524 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8525 "*
8526 {
8527 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8528 }"
8529 [(set_attr "type" "call")]
8530 )
8531
8532 (define_insn "*call_value_symbol"
8533 [(set (match_operand 0 "" "")
8534 (call (mem:SI (match_operand:SI 1 "" ""))
8535 (match_operand:SI 2 "" "")))
8536 (use (match_operand 3 "" ""))
8537 (clobber (reg:SI LR_REGNUM))]
8538 "TARGET_ARM
8539 && (GET_CODE (operands[1]) == SYMBOL_REF)
8540 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8541 "*
8542 {
8543 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8544 }"
8545 [(set_attr "type" "call")]
8546 )
8547
8548 (define_insn "*call_insn"
8549 [(call (mem:SI (match_operand:SI 0 "" ""))
8550 (match_operand:SI 1 "" ""))
8551 (use (match_operand 2 "" ""))
8552 (clobber (reg:SI LR_REGNUM))]
8553 "TARGET_THUMB
8554 && GET_CODE (operands[0]) == SYMBOL_REF
8555 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8556 "bl\\t%a0"
8557 [(set_attr "length" "4")
8558 (set_attr "type" "call")]
8559 )
8560
8561 (define_insn "*call_value_insn"
8562 [(set (match_operand 0 "" "")
8563 (call (mem:SI (match_operand 1 "" ""))
8564 (match_operand 2 "" "")))
8565 (use (match_operand 3 "" ""))
8566 (clobber (reg:SI LR_REGNUM))]
8567 "TARGET_THUMB
8568 && GET_CODE (operands[1]) == SYMBOL_REF
8569 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8570 "bl\\t%a1"
8571 [(set_attr "length" "4")
8572 (set_attr "type" "call")]
8573 )
8574
8575 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8576 (define_expand "sibcall"
8577 [(parallel [(call (match_operand 0 "memory_operand" "")
8578 (match_operand 1 "general_operand" ""))
8579 (return)
8580 (use (match_operand 2 "" ""))])]
8581 "TARGET_ARM"
8582 "
8583 {
8584 if (operands[2] == NULL_RTX)
8585 operands[2] = const0_rtx;
8586 }"
8587 )
8588
8589 (define_expand "sibcall_value"
8590 [(parallel [(set (match_operand 0 "" "")
8591 (call (match_operand 1 "memory_operand" "")
8592 (match_operand 2 "general_operand" "")))
8593 (return)
8594 (use (match_operand 3 "" ""))])]
8595 "TARGET_ARM"
8596 "
8597 {
8598 if (operands[3] == NULL_RTX)
8599 operands[3] = const0_rtx;
8600 }"
8601 )
8602
8603 (define_insn "*sibcall_insn"
8604 [(call (mem:SI (match_operand:SI 0 "" "X"))
8605 (match_operand 1 "" ""))
8606 (return)
8607 (use (match_operand 2 "" ""))]
8608 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8609 "*
8610 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8611 "
8612 [(set_attr "type" "call")]
8613 )
8614
8615 (define_insn "*sibcall_value_insn"
8616 [(set (match_operand 0 "" "")
8617 (call (mem:SI (match_operand:SI 1 "" "X"))
8618 (match_operand 2 "" "")))
8619 (return)
8620 (use (match_operand 3 "" ""))]
8621 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8622 "*
8623 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8624 "
8625 [(set_attr "type" "call")]
8626 )
8627
8628 ;; Often the return insn will be the same as loading from memory, so set attr
8629 (define_insn "return"
8630 [(return)]
8631 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8632 "*
8633 {
8634 if (arm_ccfsm_state == 2)
8635 {
8636 arm_ccfsm_state += 2;
8637 return \"\";
8638 }
8639 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8640 }"
8641 [(set_attr "type" "load1")
8642 (set_attr "length" "12")
8643 (set_attr "predicable" "yes")]
8644 )
8645
8646 (define_insn "*cond_return"
8647 [(set (pc)
8648 (if_then_else (match_operator 0 "arm_comparison_operator"
8649 [(match_operand 1 "cc_register" "") (const_int 0)])
8650 (return)
8651 (pc)))]
8652 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8653 "*
8654 {
8655 if (arm_ccfsm_state == 2)
8656 {
8657 arm_ccfsm_state += 2;
8658 return \"\";
8659 }
8660 return output_return_instruction (operands[0], TRUE, FALSE);
8661 }"
8662 [(set_attr "conds" "use")
8663 (set_attr "length" "12")
8664 (set_attr "type" "load1")]
8665 )
8666
8667 (define_insn "*cond_return_inverted"
8668 [(set (pc)
8669 (if_then_else (match_operator 0 "arm_comparison_operator"
8670 [(match_operand 1 "cc_register" "") (const_int 0)])
8671 (pc)
8672 (return)))]
8673 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8674 "*
8675 {
8676 if (arm_ccfsm_state == 2)
8677 {
8678 arm_ccfsm_state += 2;
8679 return \"\";
8680 }
8681 return output_return_instruction (operands[0], TRUE, TRUE);
8682 }"
8683 [(set_attr "conds" "use")
8684 (set_attr "length" "12")
8685 (set_attr "type" "load1")]
8686 )
8687
8688 ;; Generate a sequence of instructions to determine if the processor is
8689 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8690 ;; mask.
8691
8692 (define_expand "return_addr_mask"
8693 [(set (match_dup 1)
8694 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8695 (const_int 0)))
8696 (set (match_operand:SI 0 "s_register_operand" "")
8697 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8698 (const_int -1)
8699 (const_int 67108860)))] ; 0x03fffffc
8700 "TARGET_ARM"
8701 "
8702 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8703 ")
8704
8705 (define_insn "*check_arch2"
8706 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8707 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8708 (const_int 0)))]
8709 "TARGET_ARM"
8710 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8711 [(set_attr "length" "8")
8712 (set_attr "conds" "set")]
8713 )
8714
8715 ;; Call subroutine returning any type.
8716
8717 (define_expand "untyped_call"
8718 [(parallel [(call (match_operand 0 "" "")
8719 (const_int 0))
8720 (match_operand 1 "" "")
8721 (match_operand 2 "" "")])]
8722 "TARGET_EITHER"
8723 "
8724 {
8725 int i;
8726 rtx par = gen_rtx_PARALLEL (VOIDmode,
8727 rtvec_alloc (XVECLEN (operands[2], 0)));
8728 rtx addr = gen_reg_rtx (Pmode);
8729 rtx mem;
8730 int size = 0;
8731
8732 emit_move_insn (addr, XEXP (operands[1], 0));
8733 mem = change_address (operands[1], BLKmode, addr);
8734
8735 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8736 {
8737 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8738
8739 /* Default code only uses r0 as a return value, but we could
8740 be using anything up to 4 registers. */
8741 if (REGNO (src) == R0_REGNUM)
8742 src = gen_rtx_REG (TImode, R0_REGNUM);
8743
8744 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8745 GEN_INT (size));
8746 size += GET_MODE_SIZE (GET_MODE (src));
8747 }
8748
8749 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8750 const0_rtx));
8751
8752 size = 0;
8753
8754 for (i = 0; i < XVECLEN (par, 0); i++)
8755 {
8756 HOST_WIDE_INT offset = 0;
8757 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8758
8759 if (size != 0)
8760 emit_move_insn (addr, plus_constant (addr, size));
8761
8762 mem = change_address (mem, GET_MODE (reg), NULL);
8763 if (REGNO (reg) == R0_REGNUM)
8764 {
8765 /* On thumb we have to use a write-back instruction. */
8766 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8767 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8768 size = TARGET_ARM ? 16 : 0;
8769 }
8770 else
8771 {
8772 emit_move_insn (mem, reg);
8773 size = GET_MODE_SIZE (GET_MODE (reg));
8774 }
8775 }
8776
8777 /* The optimizer does not know that the call sets the function value
8778 registers we stored in the result block. We avoid problems by
8779 claiming that all hard registers are used and clobbered at this
8780 point. */
8781 emit_insn (gen_blockage ());
8782
8783 DONE;
8784 }"
8785 )
8786
8787 (define_expand "untyped_return"
8788 [(match_operand:BLK 0 "memory_operand" "")
8789 (match_operand 1 "" "")]
8790 "TARGET_EITHER"
8791 "
8792 {
8793 int i;
8794 rtx addr = gen_reg_rtx (Pmode);
8795 rtx mem;
8796 int size = 0;
8797
8798 emit_move_insn (addr, XEXP (operands[0], 0));
8799 mem = change_address (operands[0], BLKmode, addr);
8800
8801 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8802 {
8803 HOST_WIDE_INT offset = 0;
8804 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8805
8806 if (size != 0)
8807 emit_move_insn (addr, plus_constant (addr, size));
8808
8809 mem = change_address (mem, GET_MODE (reg), NULL);
8810 if (REGNO (reg) == R0_REGNUM)
8811 {
8812 /* On thumb we have to use a write-back instruction. */
8813 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8814 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8815 size = TARGET_ARM ? 16 : 0;
8816 }
8817 else
8818 {
8819 emit_move_insn (reg, mem);
8820 size = GET_MODE_SIZE (GET_MODE (reg));
8821 }
8822 }
8823
8824 /* Emit USE insns before the return. */
8825 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8826 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8827
8828 /* Construct the return. */
8829 expand_naked_return ();
8830
8831 DONE;
8832 }"
8833 )
8834
8835 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8836 ;; all of memory. This blocks insns from being moved across this point.
8837
8838 (define_insn "blockage"
8839 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8840 "TARGET_EITHER"
8841 ""
8842 [(set_attr "length" "0")
8843 (set_attr "type" "block")]
8844 )
8845
8846 (define_expand "casesi"
8847 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8848 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8849 (match_operand:SI 2 "const_int_operand" "") ; total range
8850 (match_operand:SI 3 "" "") ; table label
8851 (match_operand:SI 4 "" "")] ; Out of range label
8852 "TARGET_32BIT || optimize_size || flag_pic"
8853 "
8854 {
8855 enum insn_code code;
8856 if (operands[1] != const0_rtx)
8857 {
8858 rtx reg = gen_reg_rtx (SImode);
8859
8860 emit_insn (gen_addsi3 (reg, operands[0],
8861 GEN_INT (-INTVAL (operands[1]))));
8862 operands[0] = reg;
8863 }
8864
8865 if (TARGET_ARM)
8866 code = CODE_FOR_arm_casesi_internal;
8867 else if (TARGET_THUMB1)
8868 code = CODE_FOR_thumb1_casesi_internal_pic;
8869 else if (flag_pic)
8870 code = CODE_FOR_thumb2_casesi_internal_pic;
8871 else
8872 code = CODE_FOR_thumb2_casesi_internal;
8873
8874 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8875 operands[2] = force_reg (SImode, operands[2]);
8876
8877 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8878 operands[3], operands[4]));
8879 DONE;
8880 }"
8881 )
8882
8883 ;; The USE in this pattern is needed to tell flow analysis that this is
8884 ;; a CASESI insn. It has no other purpose.
8885 (define_insn "arm_casesi_internal"
8886 [(parallel [(set (pc)
8887 (if_then_else
8888 (leu (match_operand:SI 0 "s_register_operand" "r")
8889 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8890 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8891 (label_ref (match_operand 2 "" ""))))
8892 (label_ref (match_operand 3 "" ""))))
8893 (clobber (reg:CC CC_REGNUM))
8894 (use (label_ref (match_dup 2)))])]
8895 "TARGET_ARM"
8896 "*
8897 if (flag_pic)
8898 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8899 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8900 "
8901 [(set_attr "conds" "clob")
8902 (set_attr "length" "12")]
8903 )
8904
8905 (define_expand "thumb1_casesi_internal_pic"
8906 [(match_operand:SI 0 "s_register_operand" "")
8907 (match_operand:SI 1 "thumb1_cmp_operand" "")
8908 (match_operand 2 "" "")
8909 (match_operand 3 "" "")]
8910 "TARGET_THUMB1"
8911 {
8912 rtx reg0;
8913 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8914 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8915 operands[3]));
8916 reg0 = gen_rtx_REG (SImode, 0);
8917 emit_move_insn (reg0, operands[0]);
8918 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8919 DONE;
8920 }
8921 )
8922
8923 (define_insn "thumb1_casesi_dispatch"
8924 [(parallel [(set (pc) (unspec [(reg:SI 0)
8925 (label_ref (match_operand 0 "" ""))
8926 ;; (label_ref (match_operand 1 "" ""))
8927 ]
8928 UNSPEC_THUMB1_CASESI))
8929 (clobber (reg:SI IP_REGNUM))
8930 (clobber (reg:SI LR_REGNUM))])]
8931 "TARGET_THUMB1"
8932 "* return thumb1_output_casesi(operands);"
8933 [(set_attr "length" "4")]
8934 )
8935
8936 (define_expand "indirect_jump"
8937 [(set (pc)
8938 (match_operand:SI 0 "s_register_operand" ""))]
8939 "TARGET_EITHER"
8940 "
8941 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8942 address and use bx. */
8943 if (TARGET_THUMB2)
8944 {
8945 rtx tmp;
8946 tmp = gen_reg_rtx (SImode);
8947 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8948 operands[0] = tmp;
8949 }
8950 "
8951 )
8952
8953 ;; NB Never uses BX.
8954 (define_insn "*arm_indirect_jump"
8955 [(set (pc)
8956 (match_operand:SI 0 "s_register_operand" "r"))]
8957 "TARGET_ARM"
8958 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8959 [(set_attr "predicable" "yes")]
8960 )
8961
8962 (define_insn "*load_indirect_jump"
8963 [(set (pc)
8964 (match_operand:SI 0 "memory_operand" "m"))]
8965 "TARGET_ARM"
8966 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8967 [(set_attr "type" "load1")
8968 (set_attr "pool_range" "4096")
8969 (set_attr "neg_pool_range" "4084")
8970 (set_attr "predicable" "yes")]
8971 )
8972
8973 ;; NB Never uses BX.
8974 (define_insn "*thumb1_indirect_jump"
8975 [(set (pc)
8976 (match_operand:SI 0 "register_operand" "l*r"))]
8977 "TARGET_THUMB1"
8978 "mov\\tpc, %0"
8979 [(set_attr "conds" "clob")
8980 (set_attr "length" "2")]
8981 )
8982
8983 \f
8984 ;; Misc insns
8985
8986 (define_insn "nop"
8987 [(const_int 0)]
8988 "TARGET_EITHER"
8989 "*
8990 if (TARGET_UNIFIED_ASM)
8991 return \"nop\";
8992 if (TARGET_ARM)
8993 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8994 return \"mov\\tr8, r8\";
8995 "
8996 [(set (attr "length")
8997 (if_then_else (eq_attr "is_thumb" "yes")
8998 (const_int 2)
8999 (const_int 4)))]
9000 )
9001
9002 \f
9003 ;; Patterns to allow combination of arithmetic, cond code and shifts
9004
9005 (define_insn "*arith_shiftsi"
9006 [(set (match_operand:SI 0 "s_register_operand" "=r")
9007 (match_operator:SI 1 "shiftable_operator"
9008 [(match_operator:SI 3 "shift_operator"
9009 [(match_operand:SI 4 "s_register_operand" "r")
9010 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9011 (match_operand:SI 2 "s_register_operand" "r")]))]
9012 "TARGET_ARM"
9013 "%i1%?\\t%0, %2, %4%S3"
9014 [(set_attr "predicable" "yes")
9015 (set_attr "shift" "4")
9016 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9017 (const_string "alu_shift")
9018 (const_string "alu_shift_reg")))]
9019 )
9020
9021 (define_split
9022 [(set (match_operand:SI 0 "s_register_operand" "")
9023 (match_operator:SI 1 "shiftable_operator"
9024 [(match_operator:SI 2 "shiftable_operator"
9025 [(match_operator:SI 3 "shift_operator"
9026 [(match_operand:SI 4 "s_register_operand" "")
9027 (match_operand:SI 5 "reg_or_int_operand" "")])
9028 (match_operand:SI 6 "s_register_operand" "")])
9029 (match_operand:SI 7 "arm_rhs_operand" "")]))
9030 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9031 "TARGET_ARM"
9032 [(set (match_dup 8)
9033 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9034 (match_dup 6)]))
9035 (set (match_dup 0)
9036 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9037 "")
9038
9039 (define_insn "*arith_shiftsi_compare0"
9040 [(set (reg:CC_NOOV CC_REGNUM)
9041 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9042 [(match_operator:SI 3 "shift_operator"
9043 [(match_operand:SI 4 "s_register_operand" "r")
9044 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9045 (match_operand:SI 2 "s_register_operand" "r")])
9046 (const_int 0)))
9047 (set (match_operand:SI 0 "s_register_operand" "=r")
9048 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9049 (match_dup 2)]))]
9050 "TARGET_ARM"
9051 "%i1%.\\t%0, %2, %4%S3"
9052 [(set_attr "conds" "set")
9053 (set_attr "shift" "4")
9054 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9055 (const_string "alu_shift")
9056 (const_string "alu_shift_reg")))]
9057 )
9058
9059 (define_insn "*arith_shiftsi_compare0_scratch"
9060 [(set (reg:CC_NOOV CC_REGNUM)
9061 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9062 [(match_operator:SI 3 "shift_operator"
9063 [(match_operand:SI 4 "s_register_operand" "r")
9064 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9065 (match_operand:SI 2 "s_register_operand" "r")])
9066 (const_int 0)))
9067 (clobber (match_scratch:SI 0 "=r"))]
9068 "TARGET_ARM"
9069 "%i1%.\\t%0, %2, %4%S3"
9070 [(set_attr "conds" "set")
9071 (set_attr "shift" "4")
9072 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9073 (const_string "alu_shift")
9074 (const_string "alu_shift_reg")))]
9075 )
9076
9077 (define_insn "*sub_shiftsi"
9078 [(set (match_operand:SI 0 "s_register_operand" "=r")
9079 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9080 (match_operator:SI 2 "shift_operator"
9081 [(match_operand:SI 3 "s_register_operand" "r")
9082 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9083 "TARGET_ARM"
9084 "sub%?\\t%0, %1, %3%S2"
9085 [(set_attr "predicable" "yes")
9086 (set_attr "shift" "3")
9087 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9088 (const_string "alu_shift")
9089 (const_string "alu_shift_reg")))]
9090 )
9091
9092 (define_insn "*sub_shiftsi_compare0"
9093 [(set (reg:CC_NOOV CC_REGNUM)
9094 (compare:CC_NOOV
9095 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9096 (match_operator:SI 2 "shift_operator"
9097 [(match_operand:SI 3 "s_register_operand" "r")
9098 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9099 (const_int 0)))
9100 (set (match_operand:SI 0 "s_register_operand" "=r")
9101 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9102 (match_dup 4)])))]
9103 "TARGET_ARM"
9104 "sub%.\\t%0, %1, %3%S2"
9105 [(set_attr "conds" "set")
9106 (set_attr "shift" "3")
9107 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9108 (const_string "alu_shift")
9109 (const_string "alu_shift_reg")))]
9110 )
9111
9112 (define_insn "*sub_shiftsi_compare0_scratch"
9113 [(set (reg:CC_NOOV CC_REGNUM)
9114 (compare:CC_NOOV
9115 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9116 (match_operator:SI 2 "shift_operator"
9117 [(match_operand:SI 3 "s_register_operand" "r")
9118 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9119 (const_int 0)))
9120 (clobber (match_scratch:SI 0 "=r"))]
9121 "TARGET_ARM"
9122 "sub%.\\t%0, %1, %3%S2"
9123 [(set_attr "conds" "set")
9124 (set_attr "shift" "3")
9125 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9126 (const_string "alu_shift")
9127 (const_string "alu_shift_reg")))]
9128 )
9129
9130 \f
9131
9132 (define_insn "*and_scc"
9133 [(set (match_operand:SI 0 "s_register_operand" "=r")
9134 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9135 [(match_operand 3 "cc_register" "") (const_int 0)])
9136 (match_operand:SI 2 "s_register_operand" "r")))]
9137 "TARGET_ARM"
9138 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9139 [(set_attr "conds" "use")
9140 (set_attr "length" "8")]
9141 )
9142
9143 (define_insn "*ior_scc"
9144 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9145 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9146 [(match_operand 3 "cc_register" "") (const_int 0)])
9147 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9148 "TARGET_ARM"
9149 "@
9150 orr%d2\\t%0, %1, #1
9151 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9152 [(set_attr "conds" "use")
9153 (set_attr "length" "4,8")]
9154 )
9155
9156 (define_insn "*compare_scc"
9157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9158 (match_operator:SI 1 "arm_comparison_operator"
9159 [(match_operand:SI 2 "s_register_operand" "r,r")
9160 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9161 (clobber (reg:CC CC_REGNUM))]
9162 "TARGET_ARM"
9163 "*
9164 if (operands[3] == const0_rtx)
9165 {
9166 if (GET_CODE (operands[1]) == LT)
9167 return \"mov\\t%0, %2, lsr #31\";
9168
9169 if (GET_CODE (operands[1]) == GE)
9170 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9171
9172 if (GET_CODE (operands[1]) == EQ)
9173 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9174 }
9175
9176 if (GET_CODE (operands[1]) == NE)
9177 {
9178 if (which_alternative == 1)
9179 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9180 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9181 }
9182 if (which_alternative == 1)
9183 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9184 else
9185 output_asm_insn (\"cmp\\t%2, %3\", operands);
9186 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9187 "
9188 [(set_attr "conds" "clob")
9189 (set_attr "length" "12")]
9190 )
9191
9192 (define_insn "*cond_move"
9193 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9194 (if_then_else:SI (match_operator 3 "equality_operator"
9195 [(match_operator 4 "arm_comparison_operator"
9196 [(match_operand 5 "cc_register" "") (const_int 0)])
9197 (const_int 0)])
9198 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9199 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9200 "TARGET_ARM"
9201 "*
9202 if (GET_CODE (operands[3]) == NE)
9203 {
9204 if (which_alternative != 1)
9205 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9206 if (which_alternative != 0)
9207 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9208 return \"\";
9209 }
9210 if (which_alternative != 0)
9211 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9212 if (which_alternative != 1)
9213 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9214 return \"\";
9215 "
9216 [(set_attr "conds" "use")
9217 (set_attr "length" "4,4,8")]
9218 )
9219
9220 (define_insn "*cond_arith"
9221 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9222 (match_operator:SI 5 "shiftable_operator"
9223 [(match_operator:SI 4 "arm_comparison_operator"
9224 [(match_operand:SI 2 "s_register_operand" "r,r")
9225 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9226 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9227 (clobber (reg:CC CC_REGNUM))]
9228 "TARGET_ARM"
9229 "*
9230 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9231 return \"%i5\\t%0, %1, %2, lsr #31\";
9232
9233 output_asm_insn (\"cmp\\t%2, %3\", operands);
9234 if (GET_CODE (operands[5]) == AND)
9235 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9236 else if (GET_CODE (operands[5]) == MINUS)
9237 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9238 else if (which_alternative != 0)
9239 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9240 return \"%i5%d4\\t%0, %1, #1\";
9241 "
9242 [(set_attr "conds" "clob")
9243 (set_attr "length" "12")]
9244 )
9245
9246 (define_insn "*cond_sub"
9247 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9248 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9249 (match_operator:SI 4 "arm_comparison_operator"
9250 [(match_operand:SI 2 "s_register_operand" "r,r")
9251 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9252 (clobber (reg:CC CC_REGNUM))]
9253 "TARGET_ARM"
9254 "*
9255 output_asm_insn (\"cmp\\t%2, %3\", operands);
9256 if (which_alternative != 0)
9257 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9258 return \"sub%d4\\t%0, %1, #1\";
9259 "
9260 [(set_attr "conds" "clob")
9261 (set_attr "length" "8,12")]
9262 )
9263
9264 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9265 (define_insn "*cmp_ite0"
9266 [(set (match_operand 6 "dominant_cc_register" "")
9267 (compare
9268 (if_then_else:SI
9269 (match_operator 4 "arm_comparison_operator"
9270 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9271 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9272 (match_operator:SI 5 "arm_comparison_operator"
9273 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9274 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9275 (const_int 0))
9276 (const_int 0)))]
9277 "TARGET_ARM"
9278 "*
9279 {
9280 static const char * const opcodes[4][2] =
9281 {
9282 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9283 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9284 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9285 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9286 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9287 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9288 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9289 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9290 };
9291 int swap =
9292 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9293
9294 return opcodes[which_alternative][swap];
9295 }"
9296 [(set_attr "conds" "set")
9297 (set_attr "length" "8")]
9298 )
9299
9300 (define_insn "*cmp_ite1"
9301 [(set (match_operand 6 "dominant_cc_register" "")
9302 (compare
9303 (if_then_else:SI
9304 (match_operator 4 "arm_comparison_operator"
9305 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9306 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9307 (match_operator:SI 5 "arm_comparison_operator"
9308 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9309 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9310 (const_int 1))
9311 (const_int 0)))]
9312 "TARGET_ARM"
9313 "*
9314 {
9315 static const char * const opcodes[4][2] =
9316 {
9317 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9318 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9319 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9320 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9321 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9322 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9323 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9324 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9325 };
9326 int swap =
9327 comparison_dominates_p (GET_CODE (operands[5]),
9328 reverse_condition (GET_CODE (operands[4])));
9329
9330 return opcodes[which_alternative][swap];
9331 }"
9332 [(set_attr "conds" "set")
9333 (set_attr "length" "8")]
9334 )
9335
9336 (define_insn "*cmp_and"
9337 [(set (match_operand 6 "dominant_cc_register" "")
9338 (compare
9339 (and:SI
9340 (match_operator 4 "arm_comparison_operator"
9341 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9342 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9343 (match_operator:SI 5 "arm_comparison_operator"
9344 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9345 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9346 (const_int 0)))]
9347 "TARGET_ARM"
9348 "*
9349 {
9350 static const char *const opcodes[4][2] =
9351 {
9352 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9353 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9354 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9355 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9356 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9357 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9358 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9359 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9360 };
9361 int swap =
9362 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9363
9364 return opcodes[which_alternative][swap];
9365 }"
9366 [(set_attr "conds" "set")
9367 (set_attr "predicable" "no")
9368 (set_attr "length" "8")]
9369 )
9370
9371 (define_insn "*cmp_ior"
9372 [(set (match_operand 6 "dominant_cc_register" "")
9373 (compare
9374 (ior:SI
9375 (match_operator 4 "arm_comparison_operator"
9376 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9377 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9378 (match_operator:SI 5 "arm_comparison_operator"
9379 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9380 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9381 (const_int 0)))]
9382 "TARGET_ARM"
9383 "*
9384 {
9385 static const char *const opcodes[4][2] =
9386 {
9387 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9388 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9389 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9390 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9391 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9392 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9393 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9394 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9395 };
9396 int swap =
9397 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9398
9399 return opcodes[which_alternative][swap];
9400 }
9401 "
9402 [(set_attr "conds" "set")
9403 (set_attr "length" "8")]
9404 )
9405
9406 (define_insn_and_split "*ior_scc_scc"
9407 [(set (match_operand:SI 0 "s_register_operand" "=r")
9408 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9409 [(match_operand:SI 1 "s_register_operand" "r")
9410 (match_operand:SI 2 "arm_add_operand" "rIL")])
9411 (match_operator:SI 6 "arm_comparison_operator"
9412 [(match_operand:SI 4 "s_register_operand" "r")
9413 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9414 (clobber (reg:CC CC_REGNUM))]
9415 "TARGET_ARM
9416 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9417 != CCmode)"
9418 "#"
9419 "TARGET_ARM && reload_completed"
9420 [(set (match_dup 7)
9421 (compare
9422 (ior:SI
9423 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9424 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9425 (const_int 0)))
9426 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9427 "operands[7]
9428 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9429 DOM_CC_X_OR_Y),
9430 CC_REGNUM);"
9431 [(set_attr "conds" "clob")
9432 (set_attr "length" "16")])
9433
9434 ; If the above pattern is followed by a CMP insn, then the compare is
9435 ; redundant, since we can rework the conditional instruction that follows.
9436 (define_insn_and_split "*ior_scc_scc_cmp"
9437 [(set (match_operand 0 "dominant_cc_register" "")
9438 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9439 [(match_operand:SI 1 "s_register_operand" "r")
9440 (match_operand:SI 2 "arm_add_operand" "rIL")])
9441 (match_operator:SI 6 "arm_comparison_operator"
9442 [(match_operand:SI 4 "s_register_operand" "r")
9443 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9444 (const_int 0)))
9445 (set (match_operand:SI 7 "s_register_operand" "=r")
9446 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9447 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9448 "TARGET_ARM"
9449 "#"
9450 "TARGET_ARM && reload_completed"
9451 [(set (match_dup 0)
9452 (compare
9453 (ior:SI
9454 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9455 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9456 (const_int 0)))
9457 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9458 ""
9459 [(set_attr "conds" "set")
9460 (set_attr "length" "16")])
9461
9462 (define_insn_and_split "*and_scc_scc"
9463 [(set (match_operand:SI 0 "s_register_operand" "=r")
9464 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9465 [(match_operand:SI 1 "s_register_operand" "r")
9466 (match_operand:SI 2 "arm_add_operand" "rIL")])
9467 (match_operator:SI 6 "arm_comparison_operator"
9468 [(match_operand:SI 4 "s_register_operand" "r")
9469 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9470 (clobber (reg:CC CC_REGNUM))]
9471 "TARGET_ARM
9472 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9473 != CCmode)"
9474 "#"
9475 "TARGET_ARM && reload_completed
9476 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9477 != CCmode)"
9478 [(set (match_dup 7)
9479 (compare
9480 (and:SI
9481 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9482 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9483 (const_int 0)))
9484 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9485 "operands[7]
9486 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9487 DOM_CC_X_AND_Y),
9488 CC_REGNUM);"
9489 [(set_attr "conds" "clob")
9490 (set_attr "length" "16")])
9491
9492 ; If the above pattern is followed by a CMP insn, then the compare is
9493 ; redundant, since we can rework the conditional instruction that follows.
9494 (define_insn_and_split "*and_scc_scc_cmp"
9495 [(set (match_operand 0 "dominant_cc_register" "")
9496 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9497 [(match_operand:SI 1 "s_register_operand" "r")
9498 (match_operand:SI 2 "arm_add_operand" "rIL")])
9499 (match_operator:SI 6 "arm_comparison_operator"
9500 [(match_operand:SI 4 "s_register_operand" "r")
9501 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9502 (const_int 0)))
9503 (set (match_operand:SI 7 "s_register_operand" "=r")
9504 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9505 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9506 "TARGET_ARM"
9507 "#"
9508 "TARGET_ARM && reload_completed"
9509 [(set (match_dup 0)
9510 (compare
9511 (and:SI
9512 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9513 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9514 (const_int 0)))
9515 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9516 ""
9517 [(set_attr "conds" "set")
9518 (set_attr "length" "16")])
9519
9520 ;; If there is no dominance in the comparison, then we can still save an
9521 ;; instruction in the AND case, since we can know that the second compare
9522 ;; need only zero the value if false (if true, then the value is already
9523 ;; correct).
9524 (define_insn_and_split "*and_scc_scc_nodom"
9525 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9526 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9527 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9528 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9529 (match_operator:SI 6 "arm_comparison_operator"
9530 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9531 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9532 (clobber (reg:CC CC_REGNUM))]
9533 "TARGET_ARM
9534 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9535 == CCmode)"
9536 "#"
9537 "TARGET_ARM && reload_completed"
9538 [(parallel [(set (match_dup 0)
9539 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9540 (clobber (reg:CC CC_REGNUM))])
9541 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9542 (set (match_dup 0)
9543 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9544 (match_dup 0)
9545 (const_int 0)))]
9546 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9547 operands[4], operands[5]),
9548 CC_REGNUM);
9549 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9550 operands[5]);"
9551 [(set_attr "conds" "clob")
9552 (set_attr "length" "20")])
9553
9554 (define_split
9555 [(set (reg:CC_NOOV CC_REGNUM)
9556 (compare:CC_NOOV (ior:SI
9557 (and:SI (match_operand:SI 0 "s_register_operand" "")
9558 (const_int 1))
9559 (match_operator:SI 1 "arm_comparison_operator"
9560 [(match_operand:SI 2 "s_register_operand" "")
9561 (match_operand:SI 3 "arm_add_operand" "")]))
9562 (const_int 0)))
9563 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9564 "TARGET_ARM"
9565 [(set (match_dup 4)
9566 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9567 (match_dup 0)))
9568 (set (reg:CC_NOOV CC_REGNUM)
9569 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9570 (const_int 0)))]
9571 "")
9572
9573 (define_split
9574 [(set (reg:CC_NOOV CC_REGNUM)
9575 (compare:CC_NOOV (ior:SI
9576 (match_operator:SI 1 "arm_comparison_operator"
9577 [(match_operand:SI 2 "s_register_operand" "")
9578 (match_operand:SI 3 "arm_add_operand" "")])
9579 (and:SI (match_operand:SI 0 "s_register_operand" "")
9580 (const_int 1)))
9581 (const_int 0)))
9582 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9583 "TARGET_ARM"
9584 [(set (match_dup 4)
9585 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9586 (match_dup 0)))
9587 (set (reg:CC_NOOV CC_REGNUM)
9588 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9589 (const_int 0)))]
9590 "")
9591 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9592
9593 (define_insn "*negscc"
9594 [(set (match_operand:SI 0 "s_register_operand" "=r")
9595 (neg:SI (match_operator 3 "arm_comparison_operator"
9596 [(match_operand:SI 1 "s_register_operand" "r")
9597 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9598 (clobber (reg:CC CC_REGNUM))]
9599 "TARGET_ARM"
9600 "*
9601 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9602 return \"mov\\t%0, %1, asr #31\";
9603
9604 if (GET_CODE (operands[3]) == NE)
9605 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9606
9607 output_asm_insn (\"cmp\\t%1, %2\", operands);
9608 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9609 return \"mvn%d3\\t%0, #0\";
9610 "
9611 [(set_attr "conds" "clob")
9612 (set_attr "length" "12")]
9613 )
9614
9615 (define_insn "movcond"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9617 (if_then_else:SI
9618 (match_operator 5 "arm_comparison_operator"
9619 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9620 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9621 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9622 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9623 (clobber (reg:CC CC_REGNUM))]
9624 "TARGET_ARM"
9625 "*
9626 if (GET_CODE (operands[5]) == LT
9627 && (operands[4] == const0_rtx))
9628 {
9629 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9630 {
9631 if (operands[2] == const0_rtx)
9632 return \"and\\t%0, %1, %3, asr #31\";
9633 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9634 }
9635 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9636 {
9637 if (operands[1] == const0_rtx)
9638 return \"bic\\t%0, %2, %3, asr #31\";
9639 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9640 }
9641 /* The only case that falls through to here is when both ops 1 & 2
9642 are constants. */
9643 }
9644
9645 if (GET_CODE (operands[5]) == GE
9646 && (operands[4] == const0_rtx))
9647 {
9648 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9649 {
9650 if (operands[2] == const0_rtx)
9651 return \"bic\\t%0, %1, %3, asr #31\";
9652 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9653 }
9654 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9655 {
9656 if (operands[1] == const0_rtx)
9657 return \"and\\t%0, %2, %3, asr #31\";
9658 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9659 }
9660 /* The only case that falls through to here is when both ops 1 & 2
9661 are constants. */
9662 }
9663 if (GET_CODE (operands[4]) == CONST_INT
9664 && !const_ok_for_arm (INTVAL (operands[4])))
9665 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9666 else
9667 output_asm_insn (\"cmp\\t%3, %4\", operands);
9668 if (which_alternative != 0)
9669 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9670 if (which_alternative != 1)
9671 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9672 return \"\";
9673 "
9674 [(set_attr "conds" "clob")
9675 (set_attr "length" "8,8,12")]
9676 )
9677
9678 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9679
9680 (define_insn "*ifcompare_plus_move"
9681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9682 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9683 [(match_operand:SI 4 "s_register_operand" "r,r")
9684 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9685 (plus:SI
9686 (match_operand:SI 2 "s_register_operand" "r,r")
9687 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9688 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9689 (clobber (reg:CC CC_REGNUM))]
9690 "TARGET_ARM"
9691 "#"
9692 [(set_attr "conds" "clob")
9693 (set_attr "length" "8,12")]
9694 )
9695
9696 (define_insn "*if_plus_move"
9697 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9698 (if_then_else:SI
9699 (match_operator 4 "arm_comparison_operator"
9700 [(match_operand 5 "cc_register" "") (const_int 0)])
9701 (plus:SI
9702 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9703 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9704 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9705 "TARGET_ARM"
9706 "@
9707 add%d4\\t%0, %2, %3
9708 sub%d4\\t%0, %2, #%n3
9709 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9710 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9711 [(set_attr "conds" "use")
9712 (set_attr "length" "4,4,8,8")
9713 (set_attr "type" "*,*,*,*")]
9714 )
9715
9716 (define_insn "*ifcompare_move_plus"
9717 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9718 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9719 [(match_operand:SI 4 "s_register_operand" "r,r")
9720 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9721 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9722 (plus:SI
9723 (match_operand:SI 2 "s_register_operand" "r,r")
9724 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9725 (clobber (reg:CC CC_REGNUM))]
9726 "TARGET_ARM"
9727 "#"
9728 [(set_attr "conds" "clob")
9729 (set_attr "length" "8,12")]
9730 )
9731
9732 (define_insn "*if_move_plus"
9733 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9734 (if_then_else:SI
9735 (match_operator 4 "arm_comparison_operator"
9736 [(match_operand 5 "cc_register" "") (const_int 0)])
9737 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9738 (plus:SI
9739 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9740 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9741 "TARGET_ARM"
9742 "@
9743 add%D4\\t%0, %2, %3
9744 sub%D4\\t%0, %2, #%n3
9745 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9746 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9747 [(set_attr "conds" "use")
9748 (set_attr "length" "4,4,8,8")
9749 (set_attr "type" "*,*,*,*")]
9750 )
9751
9752 (define_insn "*ifcompare_arith_arith"
9753 [(set (match_operand:SI 0 "s_register_operand" "=r")
9754 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9755 [(match_operand:SI 5 "s_register_operand" "r")
9756 (match_operand:SI 6 "arm_add_operand" "rIL")])
9757 (match_operator:SI 8 "shiftable_operator"
9758 [(match_operand:SI 1 "s_register_operand" "r")
9759 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9760 (match_operator:SI 7 "shiftable_operator"
9761 [(match_operand:SI 3 "s_register_operand" "r")
9762 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9763 (clobber (reg:CC CC_REGNUM))]
9764 "TARGET_ARM"
9765 "#"
9766 [(set_attr "conds" "clob")
9767 (set_attr "length" "12")]
9768 )
9769
9770 (define_insn "*if_arith_arith"
9771 [(set (match_operand:SI 0 "s_register_operand" "=r")
9772 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9773 [(match_operand 8 "cc_register" "") (const_int 0)])
9774 (match_operator:SI 6 "shiftable_operator"
9775 [(match_operand:SI 1 "s_register_operand" "r")
9776 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9777 (match_operator:SI 7 "shiftable_operator"
9778 [(match_operand:SI 3 "s_register_operand" "r")
9779 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9780 "TARGET_ARM"
9781 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9782 [(set_attr "conds" "use")
9783 (set_attr "length" "8")]
9784 )
9785
9786 (define_insn "*ifcompare_arith_move"
9787 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9788 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9789 [(match_operand:SI 2 "s_register_operand" "r,r")
9790 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9791 (match_operator:SI 7 "shiftable_operator"
9792 [(match_operand:SI 4 "s_register_operand" "r,r")
9793 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9794 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9795 (clobber (reg:CC CC_REGNUM))]
9796 "TARGET_ARM"
9797 "*
9798 /* If we have an operation where (op x 0) is the identity operation and
9799 the conditional operator is LT or GE and we are comparing against zero and
9800 everything is in registers then we can do this in two instructions. */
9801 if (operands[3] == const0_rtx
9802 && GET_CODE (operands[7]) != AND
9803 && GET_CODE (operands[5]) == REG
9804 && GET_CODE (operands[1]) == REG
9805 && REGNO (operands[1]) == REGNO (operands[4])
9806 && REGNO (operands[4]) != REGNO (operands[0]))
9807 {
9808 if (GET_CODE (operands[6]) == LT)
9809 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9810 else if (GET_CODE (operands[6]) == GE)
9811 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9812 }
9813 if (GET_CODE (operands[3]) == CONST_INT
9814 && !const_ok_for_arm (INTVAL (operands[3])))
9815 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9816 else
9817 output_asm_insn (\"cmp\\t%2, %3\", operands);
9818 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9819 if (which_alternative != 0)
9820 return \"mov%D6\\t%0, %1\";
9821 return \"\";
9822 "
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "8,12")]
9825 )
9826
9827 (define_insn "*if_arith_move"
9828 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9829 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9830 [(match_operand 6 "cc_register" "") (const_int 0)])
9831 (match_operator:SI 5 "shiftable_operator"
9832 [(match_operand:SI 2 "s_register_operand" "r,r")
9833 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9834 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9835 "TARGET_ARM"
9836 "@
9837 %I5%d4\\t%0, %2, %3
9838 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9839 [(set_attr "conds" "use")
9840 (set_attr "length" "4,8")
9841 (set_attr "type" "*,*")]
9842 )
9843
9844 (define_insn "*ifcompare_move_arith"
9845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9846 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9847 [(match_operand:SI 4 "s_register_operand" "r,r")
9848 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9849 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9850 (match_operator:SI 7 "shiftable_operator"
9851 [(match_operand:SI 2 "s_register_operand" "r,r")
9852 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9853 (clobber (reg:CC CC_REGNUM))]
9854 "TARGET_ARM"
9855 "*
9856 /* If we have an operation where (op x 0) is the identity operation and
9857 the conditional operator is LT or GE and we are comparing against zero and
9858 everything is in registers then we can do this in two instructions */
9859 if (operands[5] == const0_rtx
9860 && GET_CODE (operands[7]) != AND
9861 && GET_CODE (operands[3]) == REG
9862 && GET_CODE (operands[1]) == REG
9863 && REGNO (operands[1]) == REGNO (operands[2])
9864 && REGNO (operands[2]) != REGNO (operands[0]))
9865 {
9866 if (GET_CODE (operands[6]) == GE)
9867 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9868 else if (GET_CODE (operands[6]) == LT)
9869 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9870 }
9871
9872 if (GET_CODE (operands[5]) == CONST_INT
9873 && !const_ok_for_arm (INTVAL (operands[5])))
9874 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9875 else
9876 output_asm_insn (\"cmp\\t%4, %5\", operands);
9877
9878 if (which_alternative != 0)
9879 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9880 return \"%I7%D6\\t%0, %2, %3\";
9881 "
9882 [(set_attr "conds" "clob")
9883 (set_attr "length" "8,12")]
9884 )
9885
9886 (define_insn "*if_move_arith"
9887 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9888 (if_then_else:SI
9889 (match_operator 4 "arm_comparison_operator"
9890 [(match_operand 6 "cc_register" "") (const_int 0)])
9891 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9892 (match_operator:SI 5 "shiftable_operator"
9893 [(match_operand:SI 2 "s_register_operand" "r,r")
9894 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9895 "TARGET_ARM"
9896 "@
9897 %I5%D4\\t%0, %2, %3
9898 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9899 [(set_attr "conds" "use")
9900 (set_attr "length" "4,8")
9901 (set_attr "type" "*,*")]
9902 )
9903
9904 (define_insn "*ifcompare_move_not"
9905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9906 (if_then_else:SI
9907 (match_operator 5 "arm_comparison_operator"
9908 [(match_operand:SI 3 "s_register_operand" "r,r")
9909 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9910 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9911 (not:SI
9912 (match_operand:SI 2 "s_register_operand" "r,r"))))
9913 (clobber (reg:CC CC_REGNUM))]
9914 "TARGET_ARM"
9915 "#"
9916 [(set_attr "conds" "clob")
9917 (set_attr "length" "8,12")]
9918 )
9919
9920 (define_insn "*if_move_not"
9921 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9922 (if_then_else:SI
9923 (match_operator 4 "arm_comparison_operator"
9924 [(match_operand 3 "cc_register" "") (const_int 0)])
9925 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9926 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9927 "TARGET_ARM"
9928 "@
9929 mvn%D4\\t%0, %2
9930 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9931 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9932 [(set_attr "conds" "use")
9933 (set_attr "length" "4,8,8")]
9934 )
9935
9936 (define_insn "*ifcompare_not_move"
9937 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9938 (if_then_else:SI
9939 (match_operator 5 "arm_comparison_operator"
9940 [(match_operand:SI 3 "s_register_operand" "r,r")
9941 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9942 (not:SI
9943 (match_operand:SI 2 "s_register_operand" "r,r"))
9944 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9945 (clobber (reg:CC CC_REGNUM))]
9946 "TARGET_ARM"
9947 "#"
9948 [(set_attr "conds" "clob")
9949 (set_attr "length" "8,12")]
9950 )
9951
9952 (define_insn "*if_not_move"
9953 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9954 (if_then_else:SI
9955 (match_operator 4 "arm_comparison_operator"
9956 [(match_operand 3 "cc_register" "") (const_int 0)])
9957 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9958 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9959 "TARGET_ARM"
9960 "@
9961 mvn%d4\\t%0, %2
9962 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9963 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9964 [(set_attr "conds" "use")
9965 (set_attr "length" "4,8,8")]
9966 )
9967
9968 (define_insn "*ifcompare_shift_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9970 (if_then_else:SI
9971 (match_operator 6 "arm_comparison_operator"
9972 [(match_operand:SI 4 "s_register_operand" "r,r")
9973 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9974 (match_operator:SI 7 "shift_operator"
9975 [(match_operand:SI 2 "s_register_operand" "r,r")
9976 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9977 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9978 (clobber (reg:CC CC_REGNUM))]
9979 "TARGET_ARM"
9980 "#"
9981 [(set_attr "conds" "clob")
9982 (set_attr "length" "8,12")]
9983 )
9984
9985 (define_insn "*if_shift_move"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9987 (if_then_else:SI
9988 (match_operator 5 "arm_comparison_operator"
9989 [(match_operand 6 "cc_register" "") (const_int 0)])
9990 (match_operator:SI 4 "shift_operator"
9991 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9992 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9993 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9994 "TARGET_ARM"
9995 "@
9996 mov%d5\\t%0, %2%S4
9997 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9998 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9999 [(set_attr "conds" "use")
10000 (set_attr "shift" "2")
10001 (set_attr "length" "4,8,8")
10002 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10003 (const_string "alu_shift")
10004 (const_string "alu_shift_reg")))]
10005 )
10006
10007 (define_insn "*ifcompare_move_shift"
10008 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10009 (if_then_else:SI
10010 (match_operator 6 "arm_comparison_operator"
10011 [(match_operand:SI 4 "s_register_operand" "r,r")
10012 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10013 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10014 (match_operator:SI 7 "shift_operator"
10015 [(match_operand:SI 2 "s_register_operand" "r,r")
10016 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10017 (clobber (reg:CC CC_REGNUM))]
10018 "TARGET_ARM"
10019 "#"
10020 [(set_attr "conds" "clob")
10021 (set_attr "length" "8,12")]
10022 )
10023
10024 (define_insn "*if_move_shift"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10026 (if_then_else:SI
10027 (match_operator 5 "arm_comparison_operator"
10028 [(match_operand 6 "cc_register" "") (const_int 0)])
10029 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10030 (match_operator:SI 4 "shift_operator"
10031 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10032 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10033 "TARGET_ARM"
10034 "@
10035 mov%D5\\t%0, %2%S4
10036 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10037 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10038 [(set_attr "conds" "use")
10039 (set_attr "shift" "2")
10040 (set_attr "length" "4,8,8")
10041 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10042 (const_string "alu_shift")
10043 (const_string "alu_shift_reg")))]
10044 )
10045
10046 (define_insn "*ifcompare_shift_shift"
10047 [(set (match_operand:SI 0 "s_register_operand" "=r")
10048 (if_then_else:SI
10049 (match_operator 7 "arm_comparison_operator"
10050 [(match_operand:SI 5 "s_register_operand" "r")
10051 (match_operand:SI 6 "arm_add_operand" "rIL")])
10052 (match_operator:SI 8 "shift_operator"
10053 [(match_operand:SI 1 "s_register_operand" "r")
10054 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10055 (match_operator:SI 9 "shift_operator"
10056 [(match_operand:SI 3 "s_register_operand" "r")
10057 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10058 (clobber (reg:CC CC_REGNUM))]
10059 "TARGET_ARM"
10060 "#"
10061 [(set_attr "conds" "clob")
10062 (set_attr "length" "12")]
10063 )
10064
10065 (define_insn "*if_shift_shift"
10066 [(set (match_operand:SI 0 "s_register_operand" "=r")
10067 (if_then_else:SI
10068 (match_operator 5 "arm_comparison_operator"
10069 [(match_operand 8 "cc_register" "") (const_int 0)])
10070 (match_operator:SI 6 "shift_operator"
10071 [(match_operand:SI 1 "s_register_operand" "r")
10072 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10073 (match_operator:SI 7 "shift_operator"
10074 [(match_operand:SI 3 "s_register_operand" "r")
10075 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10076 "TARGET_ARM"
10077 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10078 [(set_attr "conds" "use")
10079 (set_attr "shift" "1")
10080 (set_attr "length" "8")
10081 (set (attr "type") (if_then_else
10082 (and (match_operand 2 "const_int_operand" "")
10083 (match_operand 4 "const_int_operand" ""))
10084 (const_string "alu_shift")
10085 (const_string "alu_shift_reg")))]
10086 )
10087
10088 (define_insn "*ifcompare_not_arith"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r")
10090 (if_then_else:SI
10091 (match_operator 6 "arm_comparison_operator"
10092 [(match_operand:SI 4 "s_register_operand" "r")
10093 (match_operand:SI 5 "arm_add_operand" "rIL")])
10094 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10095 (match_operator:SI 7 "shiftable_operator"
10096 [(match_operand:SI 2 "s_register_operand" "r")
10097 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10098 (clobber (reg:CC CC_REGNUM))]
10099 "TARGET_ARM"
10100 "#"
10101 [(set_attr "conds" "clob")
10102 (set_attr "length" "12")]
10103 )
10104
10105 (define_insn "*if_not_arith"
10106 [(set (match_operand:SI 0 "s_register_operand" "=r")
10107 (if_then_else:SI
10108 (match_operator 5 "arm_comparison_operator"
10109 [(match_operand 4 "cc_register" "") (const_int 0)])
10110 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10111 (match_operator:SI 6 "shiftable_operator"
10112 [(match_operand:SI 2 "s_register_operand" "r")
10113 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10114 "TARGET_ARM"
10115 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10116 [(set_attr "conds" "use")
10117 (set_attr "length" "8")]
10118 )
10119
10120 (define_insn "*ifcompare_arith_not"
10121 [(set (match_operand:SI 0 "s_register_operand" "=r")
10122 (if_then_else:SI
10123 (match_operator 6 "arm_comparison_operator"
10124 [(match_operand:SI 4 "s_register_operand" "r")
10125 (match_operand:SI 5 "arm_add_operand" "rIL")])
10126 (match_operator:SI 7 "shiftable_operator"
10127 [(match_operand:SI 2 "s_register_operand" "r")
10128 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10129 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10130 (clobber (reg:CC CC_REGNUM))]
10131 "TARGET_ARM"
10132 "#"
10133 [(set_attr "conds" "clob")
10134 (set_attr "length" "12")]
10135 )
10136
10137 (define_insn "*if_arith_not"
10138 [(set (match_operand:SI 0 "s_register_operand" "=r")
10139 (if_then_else:SI
10140 (match_operator 5 "arm_comparison_operator"
10141 [(match_operand 4 "cc_register" "") (const_int 0)])
10142 (match_operator:SI 6 "shiftable_operator"
10143 [(match_operand:SI 2 "s_register_operand" "r")
10144 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10145 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10146 "TARGET_ARM"
10147 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10148 [(set_attr "conds" "use")
10149 (set_attr "length" "8")]
10150 )
10151
10152 (define_insn "*ifcompare_neg_move"
10153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10154 (if_then_else:SI
10155 (match_operator 5 "arm_comparison_operator"
10156 [(match_operand:SI 3 "s_register_operand" "r,r")
10157 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10158 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10159 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10160 (clobber (reg:CC CC_REGNUM))]
10161 "TARGET_ARM"
10162 "#"
10163 [(set_attr "conds" "clob")
10164 (set_attr "length" "8,12")]
10165 )
10166
10167 (define_insn "*if_neg_move"
10168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10169 (if_then_else:SI
10170 (match_operator 4 "arm_comparison_operator"
10171 [(match_operand 3 "cc_register" "") (const_int 0)])
10172 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10173 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10174 "TARGET_ARM"
10175 "@
10176 rsb%d4\\t%0, %2, #0
10177 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10178 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10179 [(set_attr "conds" "use")
10180 (set_attr "length" "4,8,8")]
10181 )
10182
10183 (define_insn "*ifcompare_move_neg"
10184 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10185 (if_then_else:SI
10186 (match_operator 5 "arm_comparison_operator"
10187 [(match_operand:SI 3 "s_register_operand" "r,r")
10188 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10189 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10190 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10191 (clobber (reg:CC CC_REGNUM))]
10192 "TARGET_ARM"
10193 "#"
10194 [(set_attr "conds" "clob")
10195 (set_attr "length" "8,12")]
10196 )
10197
10198 (define_insn "*if_move_neg"
10199 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10200 (if_then_else:SI
10201 (match_operator 4 "arm_comparison_operator"
10202 [(match_operand 3 "cc_register" "") (const_int 0)])
10203 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10204 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10205 "TARGET_ARM"
10206 "@
10207 rsb%D4\\t%0, %2, #0
10208 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10209 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10210 [(set_attr "conds" "use")
10211 (set_attr "length" "4,8,8")]
10212 )
10213
10214 (define_insn "*arith_adjacentmem"
10215 [(set (match_operand:SI 0 "s_register_operand" "=r")
10216 (match_operator:SI 1 "shiftable_operator"
10217 [(match_operand:SI 2 "memory_operand" "m")
10218 (match_operand:SI 3 "memory_operand" "m")]))
10219 (clobber (match_scratch:SI 4 "=r"))]
10220 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10221 "*
10222 {
10223 rtx ldm[3];
10224 rtx arith[4];
10225 rtx base_reg;
10226 HOST_WIDE_INT val1 = 0, val2 = 0;
10227
10228 if (REGNO (operands[0]) > REGNO (operands[4]))
10229 {
10230 ldm[1] = operands[4];
10231 ldm[2] = operands[0];
10232 }
10233 else
10234 {
10235 ldm[1] = operands[0];
10236 ldm[2] = operands[4];
10237 }
10238
10239 base_reg = XEXP (operands[2], 0);
10240
10241 if (!REG_P (base_reg))
10242 {
10243 val1 = INTVAL (XEXP (base_reg, 1));
10244 base_reg = XEXP (base_reg, 0);
10245 }
10246
10247 if (!REG_P (XEXP (operands[3], 0)))
10248 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10249
10250 arith[0] = operands[0];
10251 arith[3] = operands[1];
10252
10253 if (val1 < val2)
10254 {
10255 arith[1] = ldm[1];
10256 arith[2] = ldm[2];
10257 }
10258 else
10259 {
10260 arith[1] = ldm[2];
10261 arith[2] = ldm[1];
10262 }
10263
10264 ldm[0] = base_reg;
10265 if (val1 !=0 && val2 != 0)
10266 {
10267 rtx ops[3];
10268
10269 if (val1 == 4 || val2 == 4)
10270 /* Other val must be 8, since we know they are adjacent and neither
10271 is zero. */
10272 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10273 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10274 {
10275 ldm[0] = ops[0] = operands[4];
10276 ops[1] = base_reg;
10277 ops[2] = GEN_INT (val1);
10278 output_add_immediate (ops);
10279 if (val1 < val2)
10280 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10281 else
10282 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10283 }
10284 else
10285 {
10286 /* Offset is out of range for a single add, so use two ldr. */
10287 ops[0] = ldm[1];
10288 ops[1] = base_reg;
10289 ops[2] = GEN_INT (val1);
10290 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10291 ops[0] = ldm[2];
10292 ops[2] = GEN_INT (val2);
10293 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10294 }
10295 }
10296 else if (val1 != 0)
10297 {
10298 if (val1 < val2)
10299 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10300 else
10301 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10302 }
10303 else
10304 {
10305 if (val1 < val2)
10306 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10307 else
10308 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10309 }
10310 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10311 return \"\";
10312 }"
10313 [(set_attr "length" "12")
10314 (set_attr "predicable" "yes")
10315 (set_attr "type" "load1")]
10316 )
10317
10318 ; This pattern is never tried by combine, so do it as a peephole
10319
10320 (define_peephole2
10321 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10322 (match_operand:SI 1 "arm_general_register_operand" ""))
10323 (set (reg:CC CC_REGNUM)
10324 (compare:CC (match_dup 1) (const_int 0)))]
10325 "TARGET_ARM"
10326 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10327 (set (match_dup 0) (match_dup 1))])]
10328 ""
10329 )
10330
10331 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10332 ; reversed, check that the memory references aren't volatile.
10333
10334 (define_peephole
10335 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10336 (match_operand:SI 4 "memory_operand" "m"))
10337 (set (match_operand:SI 1 "s_register_operand" "=rk")
10338 (match_operand:SI 5 "memory_operand" "m"))
10339 (set (match_operand:SI 2 "s_register_operand" "=rk")
10340 (match_operand:SI 6 "memory_operand" "m"))
10341 (set (match_operand:SI 3 "s_register_operand" "=rk")
10342 (match_operand:SI 7 "memory_operand" "m"))]
10343 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10344 "*
10345 return emit_ldm_seq (operands, 4);
10346 "
10347 )
10348
10349 (define_peephole
10350 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10351 (match_operand:SI 3 "memory_operand" "m"))
10352 (set (match_operand:SI 1 "s_register_operand" "=rk")
10353 (match_operand:SI 4 "memory_operand" "m"))
10354 (set (match_operand:SI 2 "s_register_operand" "=rk")
10355 (match_operand:SI 5 "memory_operand" "m"))]
10356 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10357 "*
10358 return emit_ldm_seq (operands, 3);
10359 "
10360 )
10361
10362 (define_peephole
10363 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10364 (match_operand:SI 2 "memory_operand" "m"))
10365 (set (match_operand:SI 1 "s_register_operand" "=rk")
10366 (match_operand:SI 3 "memory_operand" "m"))]
10367 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10368 "*
10369 return emit_ldm_seq (operands, 2);
10370 "
10371 )
10372
10373 (define_peephole
10374 [(set (match_operand:SI 4 "memory_operand" "=m")
10375 (match_operand:SI 0 "s_register_operand" "rk"))
10376 (set (match_operand:SI 5 "memory_operand" "=m")
10377 (match_operand:SI 1 "s_register_operand" "rk"))
10378 (set (match_operand:SI 6 "memory_operand" "=m")
10379 (match_operand:SI 2 "s_register_operand" "rk"))
10380 (set (match_operand:SI 7 "memory_operand" "=m")
10381 (match_operand:SI 3 "s_register_operand" "rk"))]
10382 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10383 "*
10384 return emit_stm_seq (operands, 4);
10385 "
10386 )
10387
10388 (define_peephole
10389 [(set (match_operand:SI 3 "memory_operand" "=m")
10390 (match_operand:SI 0 "s_register_operand" "rk"))
10391 (set (match_operand:SI 4 "memory_operand" "=m")
10392 (match_operand:SI 1 "s_register_operand" "rk"))
10393 (set (match_operand:SI 5 "memory_operand" "=m")
10394 (match_operand:SI 2 "s_register_operand" "rk"))]
10395 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10396 "*
10397 return emit_stm_seq (operands, 3);
10398 "
10399 )
10400
10401 (define_peephole
10402 [(set (match_operand:SI 2 "memory_operand" "=m")
10403 (match_operand:SI 0 "s_register_operand" "rk"))
10404 (set (match_operand:SI 3 "memory_operand" "=m")
10405 (match_operand:SI 1 "s_register_operand" "rk"))]
10406 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10407 "*
10408 return emit_stm_seq (operands, 2);
10409 "
10410 )
10411
10412 (define_split
10413 [(set (match_operand:SI 0 "s_register_operand" "")
10414 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10415 (const_int 0))
10416 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10417 [(match_operand:SI 3 "s_register_operand" "")
10418 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10419 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10420 "TARGET_ARM"
10421 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10422 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10423 (match_dup 5)))]
10424 ""
10425 )
10426
10427 ;; This split can be used because CC_Z mode implies that the following
10428 ;; branch will be an equality, or an unsigned inequality, so the sign
10429 ;; extension is not needed.
10430
10431 (define_split
10432 [(set (reg:CC_Z CC_REGNUM)
10433 (compare:CC_Z
10434 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10435 (const_int 24))
10436 (match_operand 1 "const_int_operand" "")))
10437 (clobber (match_scratch:SI 2 ""))]
10438 "TARGET_ARM
10439 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10440 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10441 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10442 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10443 "
10444 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10445 "
10446 )
10447 ;; ??? Check the patterns above for Thumb-2 usefulness
10448
10449 (define_expand "prologue"
10450 [(clobber (const_int 0))]
10451 "TARGET_EITHER"
10452 "if (TARGET_32BIT)
10453 arm_expand_prologue ();
10454 else
10455 thumb1_expand_prologue ();
10456 DONE;
10457 "
10458 )
10459
10460 (define_expand "epilogue"
10461 [(clobber (const_int 0))]
10462 "TARGET_EITHER"
10463 "
10464 if (crtl->calls_eh_return)
10465 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10466 if (TARGET_THUMB1)
10467 thumb1_expand_epilogue ();
10468 else if (USE_RETURN_INSN (FALSE))
10469 {
10470 emit_jump_insn (gen_return ());
10471 DONE;
10472 }
10473 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10474 gen_rtvec (1,
10475 gen_rtx_RETURN (VOIDmode)),
10476 VUNSPEC_EPILOGUE));
10477 DONE;
10478 "
10479 )
10480
10481 ;; Note - although unspec_volatile's USE all hard registers,
10482 ;; USEs are ignored after relaod has completed. Thus we need
10483 ;; to add an unspec of the link register to ensure that flow
10484 ;; does not think that it is unused by the sibcall branch that
10485 ;; will replace the standard function epilogue.
10486 (define_insn "sibcall_epilogue"
10487 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10488 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10489 "TARGET_32BIT"
10490 "*
10491 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10492 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10493 return arm_output_epilogue (next_nonnote_insn (insn));
10494 "
10495 ;; Length is absolute worst case
10496 [(set_attr "length" "44")
10497 (set_attr "type" "block")
10498 ;; We don't clobber the conditions, but the potential length of this
10499 ;; operation is sufficient to make conditionalizing the sequence
10500 ;; unlikely to be profitable.
10501 (set_attr "conds" "clob")]
10502 )
10503
10504 (define_insn "*epilogue_insns"
10505 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10506 "TARGET_EITHER"
10507 "*
10508 if (TARGET_32BIT)
10509 return arm_output_epilogue (NULL);
10510 else /* TARGET_THUMB1 */
10511 return thumb_unexpanded_epilogue ();
10512 "
10513 ; Length is absolute worst case
10514 [(set_attr "length" "44")
10515 (set_attr "type" "block")
10516 ;; We don't clobber the conditions, but the potential length of this
10517 ;; operation is sufficient to make conditionalizing the sequence
10518 ;; unlikely to be profitable.
10519 (set_attr "conds" "clob")]
10520 )
10521
10522 (define_expand "eh_epilogue"
10523 [(use (match_operand:SI 0 "register_operand" ""))
10524 (use (match_operand:SI 1 "register_operand" ""))
10525 (use (match_operand:SI 2 "register_operand" ""))]
10526 "TARGET_EITHER"
10527 "
10528 {
10529 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10530 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10531 {
10532 rtx ra = gen_rtx_REG (Pmode, 2);
10533
10534 emit_move_insn (ra, operands[2]);
10535 operands[2] = ra;
10536 }
10537 /* This is a hack -- we may have crystalized the function type too
10538 early. */
10539 cfun->machine->func_type = 0;
10540 }"
10541 )
10542
10543 ;; This split is only used during output to reduce the number of patterns
10544 ;; that need assembler instructions adding to them. We allowed the setting
10545 ;; of the conditions to be implicit during rtl generation so that
10546 ;; the conditional compare patterns would work. However this conflicts to
10547 ;; some extent with the conditional data operations, so we have to split them
10548 ;; up again here.
10549
10550 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10551 ;; conditional execution sufficient?
10552
10553 (define_split
10554 [(set (match_operand:SI 0 "s_register_operand" "")
10555 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10556 [(match_operand 2 "" "") (match_operand 3 "" "")])
10557 (match_dup 0)
10558 (match_operand 4 "" "")))
10559 (clobber (reg:CC CC_REGNUM))]
10560 "TARGET_ARM && reload_completed"
10561 [(set (match_dup 5) (match_dup 6))
10562 (cond_exec (match_dup 7)
10563 (set (match_dup 0) (match_dup 4)))]
10564 "
10565 {
10566 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10567 operands[2], operands[3]);
10568 enum rtx_code rc = GET_CODE (operands[1]);
10569
10570 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10571 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10572 if (mode == CCFPmode || mode == CCFPEmode)
10573 rc = reverse_condition_maybe_unordered (rc);
10574 else
10575 rc = reverse_condition (rc);
10576
10577 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10578 }"
10579 )
10580
10581 (define_split
10582 [(set (match_operand:SI 0 "s_register_operand" "")
10583 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10584 [(match_operand 2 "" "") (match_operand 3 "" "")])
10585 (match_operand 4 "" "")
10586 (match_dup 0)))
10587 (clobber (reg:CC CC_REGNUM))]
10588 "TARGET_ARM && reload_completed"
10589 [(set (match_dup 5) (match_dup 6))
10590 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10591 (set (match_dup 0) (match_dup 4)))]
10592 "
10593 {
10594 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10595 operands[2], operands[3]);
10596
10597 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10598 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10599 }"
10600 )
10601
10602 (define_split
10603 [(set (match_operand:SI 0 "s_register_operand" "")
10604 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10605 [(match_operand 2 "" "") (match_operand 3 "" "")])
10606 (match_operand 4 "" "")
10607 (match_operand 5 "" "")))
10608 (clobber (reg:CC CC_REGNUM))]
10609 "TARGET_ARM && reload_completed"
10610 [(set (match_dup 6) (match_dup 7))
10611 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10612 (set (match_dup 0) (match_dup 4)))
10613 (cond_exec (match_dup 8)
10614 (set (match_dup 0) (match_dup 5)))]
10615 "
10616 {
10617 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10618 operands[2], operands[3]);
10619 enum rtx_code rc = GET_CODE (operands[1]);
10620
10621 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10622 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10623 if (mode == CCFPmode || mode == CCFPEmode)
10624 rc = reverse_condition_maybe_unordered (rc);
10625 else
10626 rc = reverse_condition (rc);
10627
10628 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10629 }"
10630 )
10631
10632 (define_split
10633 [(set (match_operand:SI 0 "s_register_operand" "")
10634 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10635 [(match_operand:SI 2 "s_register_operand" "")
10636 (match_operand:SI 3 "arm_add_operand" "")])
10637 (match_operand:SI 4 "arm_rhs_operand" "")
10638 (not:SI
10639 (match_operand:SI 5 "s_register_operand" ""))))
10640 (clobber (reg:CC CC_REGNUM))]
10641 "TARGET_ARM && reload_completed"
10642 [(set (match_dup 6) (match_dup 7))
10643 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10644 (set (match_dup 0) (match_dup 4)))
10645 (cond_exec (match_dup 8)
10646 (set (match_dup 0) (not:SI (match_dup 5))))]
10647 "
10648 {
10649 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10650 operands[2], operands[3]);
10651 enum rtx_code rc = GET_CODE (operands[1]);
10652
10653 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10654 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10655 if (mode == CCFPmode || mode == CCFPEmode)
10656 rc = reverse_condition_maybe_unordered (rc);
10657 else
10658 rc = reverse_condition (rc);
10659
10660 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10661 }"
10662 )
10663
10664 (define_insn "*cond_move_not"
10665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10666 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10667 [(match_operand 3 "cc_register" "") (const_int 0)])
10668 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10669 (not:SI
10670 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10671 "TARGET_ARM"
10672 "@
10673 mvn%D4\\t%0, %2
10674 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10675 [(set_attr "conds" "use")
10676 (set_attr "length" "4,8")]
10677 )
10678
10679 ;; The next two patterns occur when an AND operation is followed by a
10680 ;; scc insn sequence
10681
10682 (define_insn "*sign_extract_onebit"
10683 [(set (match_operand:SI 0 "s_register_operand" "=r")
10684 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10685 (const_int 1)
10686 (match_operand:SI 2 "const_int_operand" "n")))
10687 (clobber (reg:CC CC_REGNUM))]
10688 "TARGET_ARM"
10689 "*
10690 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10691 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10692 return \"mvnne\\t%0, #0\";
10693 "
10694 [(set_attr "conds" "clob")
10695 (set_attr "length" "8")]
10696 )
10697
10698 (define_insn "*not_signextract_onebit"
10699 [(set (match_operand:SI 0 "s_register_operand" "=r")
10700 (not:SI
10701 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10702 (const_int 1)
10703 (match_operand:SI 2 "const_int_operand" "n"))))
10704 (clobber (reg:CC CC_REGNUM))]
10705 "TARGET_ARM"
10706 "*
10707 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10708 output_asm_insn (\"tst\\t%1, %2\", operands);
10709 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10710 return \"movne\\t%0, #0\";
10711 "
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "12")]
10714 )
10715 ;; ??? The above patterns need auditing for Thumb-2
10716
10717 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10718 ;; expressions. For simplicity, the first register is also in the unspec
10719 ;; part.
10720 (define_insn "*push_multi"
10721 [(match_parallel 2 "multi_register_push"
10722 [(set (match_operand:BLK 0 "memory_operand" "=m")
10723 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10724 UNSPEC_PUSH_MULT))])]
10725 "TARGET_32BIT"
10726 "*
10727 {
10728 int num_saves = XVECLEN (operands[2], 0);
10729
10730 /* For the StrongARM at least it is faster to
10731 use STR to store only a single register.
10732 In Thumb mode always use push, and the assembler will pick
10733 something appropriate. */
10734 if (num_saves == 1 && TARGET_ARM)
10735 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10736 else
10737 {
10738 int i;
10739 char pattern[100];
10740
10741 if (TARGET_ARM)
10742 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10743 else
10744 strcpy (pattern, \"push\\t{%1\");
10745
10746 for (i = 1; i < num_saves; i++)
10747 {
10748 strcat (pattern, \", %|\");
10749 strcat (pattern,
10750 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10751 }
10752
10753 strcat (pattern, \"}\");
10754 output_asm_insn (pattern, operands);
10755 }
10756
10757 return \"\";
10758 }"
10759 [(set_attr "type" "store4")]
10760 )
10761
10762 (define_insn "stack_tie"
10763 [(set (mem:BLK (scratch))
10764 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10765 (match_operand:SI 1 "s_register_operand" "rk")]
10766 UNSPEC_PRLG_STK))]
10767 ""
10768 ""
10769 [(set_attr "length" "0")]
10770 )
10771
10772 ;; Similarly for the floating point registers
10773 (define_insn "*push_fp_multi"
10774 [(match_parallel 2 "multi_register_push"
10775 [(set (match_operand:BLK 0 "memory_operand" "=m")
10776 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10777 UNSPEC_PUSH_MULT))])]
10778 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10779 "*
10780 {
10781 char pattern[100];
10782
10783 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10784 output_asm_insn (pattern, operands);
10785 return \"\";
10786 }"
10787 [(set_attr "type" "f_store")]
10788 )
10789
10790 ;; Special patterns for dealing with the constant pool
10791
10792 (define_insn "align_4"
10793 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10794 "TARGET_EITHER"
10795 "*
10796 assemble_align (32);
10797 return \"\";
10798 "
10799 )
10800
10801 (define_insn "align_8"
10802 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10803 "TARGET_EITHER"
10804 "*
10805 assemble_align (64);
10806 return \"\";
10807 "
10808 )
10809
10810 (define_insn "consttable_end"
10811 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10812 "TARGET_EITHER"
10813 "*
10814 making_const_table = FALSE;
10815 return \"\";
10816 "
10817 )
10818
10819 (define_insn "consttable_1"
10820 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10821 "TARGET_THUMB1"
10822 "*
10823 making_const_table = TRUE;
10824 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10825 assemble_zeros (3);
10826 return \"\";
10827 "
10828 [(set_attr "length" "4")]
10829 )
10830
10831 (define_insn "consttable_2"
10832 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10833 "TARGET_THUMB1"
10834 "*
10835 making_const_table = TRUE;
10836 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10837 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10838 assemble_zeros (2);
10839 return \"\";
10840 "
10841 [(set_attr "length" "4")]
10842 )
10843
10844 (define_insn "consttable_4"
10845 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10846 "TARGET_EITHER"
10847 "*
10848 {
10849 rtx x = operands[0];
10850 making_const_table = TRUE;
10851 switch (GET_MODE_CLASS (GET_MODE (x)))
10852 {
10853 case MODE_FLOAT:
10854 if (GET_MODE (x) == HFmode)
10855 arm_emit_fp16_const (x);
10856 else
10857 {
10858 REAL_VALUE_TYPE r;
10859 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10860 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10861 }
10862 break;
10863 default:
10864 assemble_integer (x, 4, BITS_PER_WORD, 1);
10865 mark_symbol_refs_as_used (x);
10866 break;
10867 }
10868 return \"\";
10869 }"
10870 [(set_attr "length" "4")]
10871 )
10872
10873 (define_insn "consttable_8"
10874 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10875 "TARGET_EITHER"
10876 "*
10877 {
10878 making_const_table = TRUE;
10879 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10880 {
10881 case MODE_FLOAT:
10882 {
10883 REAL_VALUE_TYPE r;
10884 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10885 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10886 break;
10887 }
10888 default:
10889 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10890 break;
10891 }
10892 return \"\";
10893 }"
10894 [(set_attr "length" "8")]
10895 )
10896
10897 (define_insn "consttable_16"
10898 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10899 "TARGET_EITHER"
10900 "*
10901 {
10902 making_const_table = TRUE;
10903 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10904 {
10905 case MODE_FLOAT:
10906 {
10907 REAL_VALUE_TYPE r;
10908 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10909 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10910 break;
10911 }
10912 default:
10913 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10914 break;
10915 }
10916 return \"\";
10917 }"
10918 [(set_attr "length" "16")]
10919 )
10920
10921 ;; Miscellaneous Thumb patterns
10922
10923 (define_expand "tablejump"
10924 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10925 (use (label_ref (match_operand 1 "" "")))])]
10926 "TARGET_THUMB1"
10927 "
10928 if (flag_pic)
10929 {
10930 /* Hopefully, CSE will eliminate this copy. */
10931 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10932 rtx reg2 = gen_reg_rtx (SImode);
10933
10934 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10935 operands[0] = reg2;
10936 }
10937 "
10938 )
10939
10940 ;; NB never uses BX.
10941 (define_insn "*thumb1_tablejump"
10942 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10943 (use (label_ref (match_operand 1 "" "")))]
10944 "TARGET_THUMB1"
10945 "mov\\t%|pc, %0"
10946 [(set_attr "length" "2")]
10947 )
10948
10949 ;; V5 Instructions,
10950
10951 (define_insn "clzsi2"
10952 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10954 "TARGET_32BIT && arm_arch5"
10955 "clz%?\\t%0, %1"
10956 [(set_attr "predicable" "yes")
10957 (set_attr "insn" "clz")])
10958
10959 (define_insn "rbitsi2"
10960 [(set (match_operand:SI 0 "s_register_operand" "=r")
10961 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10962 "TARGET_32BIT && arm_arch_thumb2"
10963 "rbit%?\\t%0, %1"
10964 [(set_attr "predicable" "yes")
10965 (set_attr "insn" "clz")])
10966
10967 (define_expand "ctzsi2"
10968 [(set (match_operand:SI 0 "s_register_operand" "")
10969 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10970 "TARGET_32BIT && arm_arch_thumb2"
10971 "
10972 {
10973 rtx tmp = gen_reg_rtx (SImode);
10974 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10975 emit_insn (gen_clzsi2 (operands[0], tmp));
10976 }
10977 DONE;
10978 "
10979 )
10980
10981 ;; V5E instructions.
10982
10983 (define_insn "prefetch"
10984 [(prefetch (match_operand:SI 0 "address_operand" "p")
10985 (match_operand:SI 1 "" "")
10986 (match_operand:SI 2 "" ""))]
10987 "TARGET_32BIT && arm_arch5e"
10988 "pld\\t%a0")
10989
10990 ;; General predication pattern
10991
10992 (define_cond_exec
10993 [(match_operator 0 "arm_comparison_operator"
10994 [(match_operand 1 "cc_register" "")
10995 (const_int 0)])]
10996 "TARGET_32BIT"
10997 ""
10998 )
10999
11000 (define_insn "prologue_use"
11001 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11002 ""
11003 "%@ %0 needed for prologue"
11004 [(set_attr "length" "0")]
11005 )
11006
11007
11008 ;; Patterns for exception handling
11009
11010 (define_expand "eh_return"
11011 [(use (match_operand 0 "general_operand" ""))]
11012 "TARGET_EITHER"
11013 "
11014 {
11015 if (TARGET_32BIT)
11016 emit_insn (gen_arm_eh_return (operands[0]));
11017 else
11018 emit_insn (gen_thumb_eh_return (operands[0]));
11019 DONE;
11020 }"
11021 )
11022
11023 ;; We can't expand this before we know where the link register is stored.
11024 (define_insn_and_split "arm_eh_return"
11025 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11026 VUNSPEC_EH_RETURN)
11027 (clobber (match_scratch:SI 1 "=&r"))]
11028 "TARGET_ARM"
11029 "#"
11030 "&& reload_completed"
11031 [(const_int 0)]
11032 "
11033 {
11034 arm_set_return_address (operands[0], operands[1]);
11035 DONE;
11036 }"
11037 )
11038
11039 (define_insn_and_split "thumb_eh_return"
11040 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11041 VUNSPEC_EH_RETURN)
11042 (clobber (match_scratch:SI 1 "=&l"))]
11043 "TARGET_THUMB1"
11044 "#"
11045 "&& reload_completed"
11046 [(const_int 0)]
11047 "
11048 {
11049 thumb_set_return_address (operands[0], operands[1]);
11050 DONE;
11051 }"
11052 )
11053
11054 \f
11055 ;; TLS support
11056
11057 (define_insn "load_tp_hard"
11058 [(set (match_operand:SI 0 "register_operand" "=r")
11059 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11060 "TARGET_HARD_TP"
11061 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11062 [(set_attr "predicable" "yes")]
11063 )
11064
11065 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11066 (define_insn "load_tp_soft"
11067 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11068 (clobber (reg:SI LR_REGNUM))
11069 (clobber (reg:SI IP_REGNUM))
11070 (clobber (reg:CC CC_REGNUM))]
11071 "TARGET_SOFT_TP"
11072 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11073 [(set_attr "conds" "clob")]
11074 )
11075
11076 (define_insn "*arm_movtas_ze"
11077 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11078 (const_int 16)
11079 (const_int 16))
11080 (match_operand:SI 1 "const_int_operand" ""))]
11081 "TARGET_32BIT"
11082 "movt%?\t%0, %c1"
11083 [(set_attr "predicable" "yes")
11084 (set_attr "length" "4")]
11085 )
11086
11087 ;; Load the FPA co-processor patterns
11088 (include "fpa.md")
11089 ;; Load the Maverick co-processor patterns
11090 (include "cirrus.md")
11091 ;; Vector bits common to IWMMXT and Neon
11092 (include "vec-common.md")
11093 ;; Load the Intel Wireless Multimedia Extension patterns
11094 (include "iwmmxt.md")
11095 ;; Load the VFP co-processor patterns
11096 (include "vfp.md")
11097 ;; Thumb-2 patterns
11098 (include "thumb2.md")
11099 ;; Neon patterns
11100 (include "neon.md")
11101