bb9e3283fc369287acb11acfa98116eba928761f
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
43 ]
44 )
45 ;; 3rd operand to select_dominance_cc_mode
46 (define_constants
47 [(DOM_CC_X_AND_Y 0)
48 (DOM_CC_NX_OR_Y 1)
49 (DOM_CC_X_OR_Y 2)
50 ]
51 )
52 ;; conditional compare combination
53 (define_constants
54 [(CMP_CMP 0)
55 (CMN_CMP 1)
56 (CMP_CMN 2)
57 (CMN_CMN 3)
58 (NUM_OF_COND_CMP 4)
59 ]
60 )
61
62 ;; UNSPEC Usage:
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
65
66 (define_c_enum "unspec" [
67 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 UNSPEC_PUSH_MULT ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
76 ; expressions.
77 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
89 ; register to "use".
90 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
91 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
104 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
107 ; correctly for PIC usage.
108 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
109 ; a given symbolic address.
110 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
111 UNSPEC_RBIT ; rbit operation.
112 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
113 ; another symbolic address.
114 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
115 UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access
116 ; unaligned locations, on architectures which support
117 ; that.
118 UNSPEC_UNALIGNED_STORE ; Same for str/strh.
119 ])
120
121 ;; UNSPEC_VOLATILE Usage:
122
123 (define_c_enum "unspecv" [
124 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
125 ; insn in the code.
126 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
127 ; instruction epilogue sequence that isn't expanded
128 ; into normal RTL. Used for both normal and sibcall
129 ; epilogues.
130 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
131 ; modes from arm to thumb.
132 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
133 ; for inlined constants.
134 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
135 ; table.
136 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
137 ; an 8-bit object.
138 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
139 ; a 16-bit object.
140 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
141 ; a 32-bit object.
142 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
143 ; a 64-bit object.
144 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
145 ; a 128-bit object.
146 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
147 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
148 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
149 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
150 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
151 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
152 VUNSPEC_EH_RETURN ; Use to override the return address for exception
153 ; handling.
154 VUNSPEC_SYNC_COMPARE_AND_SWAP ; Represent an atomic compare swap.
155 VUNSPEC_SYNC_LOCK ; Represent a sync_lock_test_and_set.
156 VUNSPEC_SYNC_OP ; Represent a sync_<op>
157 VUNSPEC_SYNC_NEW_OP ; Represent a sync_new_<op>
158 VUNSPEC_SYNC_OLD_OP ; Represent a sync_old_<op>
159 ])
160 \f
161 ;;---------------------------------------------------------------------------
162 ;; Attributes
163
164 ;; Processor type. This is created automatically from arm-cores.def.
165 (include "arm-tune.md")
166
167 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
168 ; generating ARM code. This is used to control the length of some insn
169 ; patterns that share the same RTL in both ARM and Thumb code.
170 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
171
172 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
173 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
174
175 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
176 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
177
178 ;; Operand number of an input operand that is shifted. Zero if the
179 ;; given instruction does not shift one of its input operands.
180 (define_attr "shift" "" (const_int 0))
181
182 ; Floating Point Unit. If we only have floating point emulation, then there
183 ; is no point in scheduling the floating point insns. (Well, for best
184 ; performance we should try and group them together).
185 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
186 (const (symbol_ref "arm_fpu_attr")))
187
188 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
189 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
190 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
191 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
192 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
193 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
194 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
195 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
196 (const_string "none"))
197
198 ; LENGTH of an instruction (in bytes)
199 (define_attr "length" ""
200 (cond [(not (eq_attr "sync_memory" "none"))
201 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
202 ] (const_int 4)))
203
204 ; The architecture which supports the instruction (or alternative).
205 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
206 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
207 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
208 ; arm_arch6. This attribute is used to compute attribute "enabled",
209 ; use type "any" to enable an alternative in all cases.
210 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
211 (const_string "any"))
212
213 (define_attr "arch_enabled" "no,yes"
214 (cond [(eq_attr "arch" "any")
215 (const_string "yes")
216
217 (and (eq_attr "arch" "a")
218 (match_test "TARGET_ARM"))
219 (const_string "yes")
220
221 (and (eq_attr "arch" "t")
222 (match_test "TARGET_THUMB"))
223 (const_string "yes")
224
225 (and (eq_attr "arch" "t1")
226 (match_test "TARGET_THUMB1"))
227 (const_string "yes")
228
229 (and (eq_attr "arch" "t2")
230 (match_test "TARGET_THUMB2"))
231 (const_string "yes")
232
233 (and (eq_attr "arch" "32")
234 (match_test "TARGET_32BIT"))
235 (const_string "yes")
236
237 (and (eq_attr "arch" "v6")
238 (match_test "TARGET_32BIT && arm_arch6"))
239 (const_string "yes")
240
241 (and (eq_attr "arch" "nov6")
242 (match_test "TARGET_32BIT && !arm_arch6"))
243 (const_string "yes")
244
245 (and (eq_attr "arch" "onlya8")
246 (eq_attr "tune" "cortexa8"))
247 (const_string "yes")
248
249 (and (eq_attr "arch" "nota8")
250 (not (eq_attr "tune" "cortexa8")))
251 (const_string "yes")]
252 (const_string "no")))
253
254 ; Allows an insn to disable certain alternatives for reasons other than
255 ; arch support.
256 (define_attr "insn_enabled" "no,yes"
257 (const_string "yes"))
258
259 ; Enable all alternatives that are both arch_enabled and insn_enabled.
260 (define_attr "enabled" "no,yes"
261 (if_then_else (eq_attr "insn_enabled" "yes")
262 (if_then_else (eq_attr "arch_enabled" "yes")
263 (const_string "yes")
264 (const_string "no"))
265 (const_string "no")))
266
267 ; POOL_RANGE is how far away from a constant pool entry that this insn
268 ; can be placed. If the distance is zero, then this insn will never
269 ; reference the pool.
270 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
271 ; before its address.
272 (define_attr "arm_pool_range" "" (const_int 0))
273 (define_attr "thumb2_pool_range" "" (const_int 0))
274 (define_attr "arm_neg_pool_range" "" (const_int 0))
275 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
276
277 (define_attr "pool_range" ""
278 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
279 (attr "arm_pool_range")))
280 (define_attr "neg_pool_range" ""
281 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
282 (attr "arm_neg_pool_range")))
283
284 ; An assembler sequence may clobber the condition codes without us knowing.
285 ; If such an insn references the pool, then we have no way of knowing how,
286 ; so use the most conservative value for pool_range.
287 (define_asm_attributes
288 [(set_attr "conds" "clob")
289 (set_attr "length" "4")
290 (set_attr "pool_range" "250")])
291
292 ;; The instruction used to implement a particular pattern. This
293 ;; information is used by pipeline descriptions to provide accurate
294 ;; scheduling information.
295
296 (define_attr "insn"
297 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
298 (const_string "other"))
299
300 ; TYPE attribute is used to detect floating point instructions which, if
301 ; running on a co-processor can run in parallel with other, basic instructions
302 ; If write-buffer scheduling is enabled then it can also be used in the
303 ; scheduling of writes.
304
305 ; Classification of each insn
306 ; Note: vfp.md has different meanings for some of these, and some further
307 ; types as well. See that file for details.
308 ; alu any alu instruction that doesn't hit memory or fp
309 ; regs or have a shifted source operand
310 ; alu_shift any data instruction that doesn't hit memory or fp
311 ; regs, but has a source operand shifted by a constant
312 ; alu_shift_reg any data instruction that doesn't hit memory or fp
313 ; regs, but has a source operand shifted by a register value
314 ; mult a multiply instruction
315 ; block blockage insn, this blocks all functional units
316 ; float a floating point arithmetic operation (subject to expansion)
317 ; fdivd DFmode floating point division
318 ; fdivs SFmode floating point division
319 ; fmul Floating point multiply
320 ; ffmul Fast floating point multiply
321 ; farith Floating point arithmetic (4 cycle)
322 ; ffarith Fast floating point arithmetic (2 cycle)
323 ; float_em a floating point arithmetic operation that is normally emulated
324 ; even on a machine with an fpa.
325 ; f_fpa_load a floating point load from memory. Only for the FPA.
326 ; f_fpa_store a floating point store to memory. Only for the FPA.
327 ; f_load[sd] A single/double load from memory. Used for VFP unit.
328 ; f_store[sd] A single/double store to memory. Used for VFP unit.
329 ; f_flag a transfer of co-processor flags to the CPSR
330 ; f_mem_r a transfer of a floating point register to a real reg via mem
331 ; r_mem_f the reverse of f_mem_r
332 ; f_2_r fast transfer float to arm (no memory needed)
333 ; r_2_f fast transfer arm to float
334 ; f_cvt convert floating<->integral
335 ; branch a branch
336 ; call a subroutine call
337 ; load_byte load byte(s) from memory to arm registers
338 ; load1 load 1 word from memory to arm registers
339 ; load2 load 2 words from memory to arm registers
340 ; load3 load 3 words from memory to arm registers
341 ; load4 load 4 words from memory to arm registers
342 ; store store 1 word to memory from arm registers
343 ; store2 store 2 words
344 ; store3 store 3 words
345 ; store4 store 4 (or more) words
346 ; Additions for Cirrus Maverick co-processor:
347 ; mav_farith Floating point arithmetic (4 cycle)
348 ; mav_dmult Double multiplies (7 cycle)
349 ;
350
351 (define_attr "type"
352 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
353 (if_then_else
354 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
355 (const_string "mult")
356 (const_string "alu")))
357
358 ; Load scheduling, set from the arm_ld_sched variable
359 ; initialized by arm_option_override()
360 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
361
362 ;; Classification of NEON instructions for scheduling purposes.
363 ;; Do not set this attribute and the "type" attribute together in
364 ;; any one instruction pattern.
365 (define_attr "neon_type"
366 "neon_int_1,\
367 neon_int_2,\
368 neon_int_3,\
369 neon_int_4,\
370 neon_int_5,\
371 neon_vqneg_vqabs,\
372 neon_vmov,\
373 neon_vaba,\
374 neon_vsma,\
375 neon_vaba_qqq,\
376 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
377 neon_mul_qqq_8_16_32_ddd_32,\
378 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
379 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
380 neon_mla_qqq_8_16,\
381 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
382 neon_mla_qqq_32_qqd_32_scalar,\
383 neon_mul_ddd_16_scalar_32_16_long_scalar,\
384 neon_mul_qqd_32_scalar,\
385 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
386 neon_shift_1,\
387 neon_shift_2,\
388 neon_shift_3,\
389 neon_vshl_ddd,\
390 neon_vqshl_vrshl_vqrshl_qqq,\
391 neon_vsra_vrsra,\
392 neon_fp_vadd_ddd_vabs_dd,\
393 neon_fp_vadd_qqq_vabs_qq,\
394 neon_fp_vsum,\
395 neon_fp_vmul_ddd,\
396 neon_fp_vmul_qqd,\
397 neon_fp_vmla_ddd,\
398 neon_fp_vmla_qqq,\
399 neon_fp_vmla_ddd_scalar,\
400 neon_fp_vmla_qqq_scalar,\
401 neon_fp_vrecps_vrsqrts_ddd,\
402 neon_fp_vrecps_vrsqrts_qqq,\
403 neon_bp_simple,\
404 neon_bp_2cycle,\
405 neon_bp_3cycle,\
406 neon_ldr,\
407 neon_str,\
408 neon_vld1_1_2_regs,\
409 neon_vld1_3_4_regs,\
410 neon_vld2_2_regs_vld1_vld2_all_lanes,\
411 neon_vld2_4_regs,\
412 neon_vld3_vld4,\
413 neon_vst1_1_2_regs_vst2_2_regs,\
414 neon_vst1_3_4_regs,\
415 neon_vst2_4_regs_vst3_vst4,\
416 neon_vst3_vst4,\
417 neon_vld1_vld2_lane,\
418 neon_vld3_vld4_lane,\
419 neon_vst1_vst2_lane,\
420 neon_vst3_vst4_lane,\
421 neon_vld3_vld4_all_lanes,\
422 neon_mcr,\
423 neon_mcr_2_mcrr,\
424 neon_mrc,\
425 neon_mrrc,\
426 neon_ldm_2,\
427 neon_stm_2,\
428 none"
429 (const_string "none"))
430
431 ; condition codes: this one is used by final_prescan_insn to speed up
432 ; conditionalizing instructions. It saves having to scan the rtl to see if
433 ; it uses or alters the condition codes.
434 ;
435 ; USE means that the condition codes are used by the insn in the process of
436 ; outputting code, this means (at present) that we can't use the insn in
437 ; inlined branches
438 ;
439 ; SET means that the purpose of the insn is to set the condition codes in a
440 ; well defined manner.
441 ;
442 ; CLOB means that the condition codes are altered in an undefined manner, if
443 ; they are altered at all
444 ;
445 ; UNCONDITIONAL means the instruction can not be conditionally executed and
446 ; that the instruction does not use or alter the condition codes.
447 ;
448 ; NOCOND means that the instruction does not use or alter the condition
449 ; codes but can be converted into a conditionally exectuted instruction.
450
451 (define_attr "conds" "use,set,clob,unconditional,nocond"
452 (if_then_else
453 (ior (eq_attr "is_thumb1" "yes")
454 (eq_attr "type" "call"))
455 (const_string "clob")
456 (if_then_else (eq_attr "neon_type" "none")
457 (const_string "nocond")
458 (const_string "unconditional"))))
459
460 ; Predicable means that the insn can be conditionally executed based on
461 ; an automatically added predicate (additional patterns are generated by
462 ; gen...). We default to 'no' because no Thumb patterns match this rule
463 ; and not all ARM patterns do.
464 (define_attr "predicable" "no,yes" (const_string "no"))
465
466 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
467 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
468 ; suffer blockages enough to warrant modelling this (and it can adversely
469 ; affect the schedule).
470 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
471
472 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
473 ; to stall the processor. Used with model_wbuf above.
474 (define_attr "write_conflict" "no,yes"
475 (if_then_else (eq_attr "type"
476 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
477 (const_string "yes")
478 (const_string "no")))
479
480 ; Classify the insns into those that take one cycle and those that take more
481 ; than one on the main cpu execution unit.
482 (define_attr "core_cycles" "single,multi"
483 (if_then_else (eq_attr "type"
484 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
485 (const_string "single")
486 (const_string "multi")))
487
488 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
489 ;; distant label. Only applicable to Thumb code.
490 (define_attr "far_jump" "yes,no" (const_string "no"))
491
492
493 ;; The number of machine instructions this pattern expands to.
494 ;; Used for Thumb-2 conditional execution.
495 (define_attr "ce_count" "" (const_int 1))
496
497 ;;---------------------------------------------------------------------------
498 ;; Mode iterators
499
500 (include "iterators.md")
501
502 ;;---------------------------------------------------------------------------
503 ;; Predicates
504
505 (include "predicates.md")
506 (include "constraints.md")
507
508 ;;---------------------------------------------------------------------------
509 ;; Pipeline descriptions
510
511 (define_attr "tune_cortexr4" "yes,no"
512 (const (if_then_else
513 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
514 (const_string "yes")
515 (const_string "no"))))
516
517 ;; True if the generic scheduling description should be used.
518
519 (define_attr "generic_sched" "yes,no"
520 (const (if_then_else
521 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
522 (eq_attr "tune_cortexr4" "yes"))
523 (const_string "no")
524 (const_string "yes"))))
525
526 (define_attr "generic_vfp" "yes,no"
527 (const (if_then_else
528 (and (eq_attr "fpu" "vfp")
529 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
530 (eq_attr "tune_cortexr4" "no"))
531 (const_string "yes")
532 (const_string "no"))))
533
534 (include "arm-generic.md")
535 (include "arm926ejs.md")
536 (include "arm1020e.md")
537 (include "arm1026ejs.md")
538 (include "arm1136jfs.md")
539 (include "fa526.md")
540 (include "fa606te.md")
541 (include "fa626te.md")
542 (include "fmp626.md")
543 (include "fa726te.md")
544 (include "cortex-a5.md")
545 (include "cortex-a8.md")
546 (include "cortex-a9.md")
547 (include "cortex-r4.md")
548 (include "cortex-r4f.md")
549 (include "cortex-m4.md")
550 (include "cortex-m4-fpu.md")
551 (include "vfp11.md")
552
553 \f
554 ;;---------------------------------------------------------------------------
555 ;; Insn patterns
556 ;;
557 ;; Addition insns.
558
559 ;; Note: For DImode insns, there is normally no reason why operands should
560 ;; not be in the same register, what we don't want is for something being
561 ;; written to partially overlap something that is an input.
562 ;; Cirrus 64bit additions should not be split because we have a native
563 ;; 64bit addition instructions.
564
565 (define_expand "adddi3"
566 [(parallel
567 [(set (match_operand:DI 0 "s_register_operand" "")
568 (plus:DI (match_operand:DI 1 "s_register_operand" "")
569 (match_operand:DI 2 "s_register_operand" "")))
570 (clobber (reg:CC CC_REGNUM))])]
571 "TARGET_EITHER"
572 "
573 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
574 {
575 if (!cirrus_fp_register (operands[0], DImode))
576 operands[0] = force_reg (DImode, operands[0]);
577 if (!cirrus_fp_register (operands[1], DImode))
578 operands[1] = force_reg (DImode, operands[1]);
579 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
580 DONE;
581 }
582
583 if (TARGET_THUMB1)
584 {
585 if (GET_CODE (operands[1]) != REG)
586 operands[1] = force_reg (DImode, operands[1]);
587 if (GET_CODE (operands[2]) != REG)
588 operands[2] = force_reg (DImode, operands[2]);
589 }
590 "
591 )
592
593 (define_insn "*thumb1_adddi3"
594 [(set (match_operand:DI 0 "register_operand" "=l")
595 (plus:DI (match_operand:DI 1 "register_operand" "%0")
596 (match_operand:DI 2 "register_operand" "l")))
597 (clobber (reg:CC CC_REGNUM))
598 ]
599 "TARGET_THUMB1"
600 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
601 [(set_attr "length" "4")]
602 )
603
604 (define_insn_and_split "*arm_adddi3"
605 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
606 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
607 (match_operand:DI 2 "s_register_operand" "r, 0")))
608 (clobber (reg:CC CC_REGNUM))]
609 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
610 "#"
611 "TARGET_32BIT && reload_completed
612 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
613 [(parallel [(set (reg:CC_C CC_REGNUM)
614 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
615 (match_dup 1)))
616 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
617 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
618 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
619 "
620 {
621 operands[3] = gen_highpart (SImode, operands[0]);
622 operands[0] = gen_lowpart (SImode, operands[0]);
623 operands[4] = gen_highpart (SImode, operands[1]);
624 operands[1] = gen_lowpart (SImode, operands[1]);
625 operands[5] = gen_highpart (SImode, operands[2]);
626 operands[2] = gen_lowpart (SImode, operands[2]);
627 }"
628 [(set_attr "conds" "clob")
629 (set_attr "length" "8")]
630 )
631
632 (define_insn_and_split "*adddi_sesidi_di"
633 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
634 (plus:DI (sign_extend:DI
635 (match_operand:SI 2 "s_register_operand" "r,r"))
636 (match_operand:DI 1 "s_register_operand" "0,r")))
637 (clobber (reg:CC CC_REGNUM))]
638 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
639 "#"
640 "TARGET_32BIT && reload_completed"
641 [(parallel [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
643 (match_dup 1)))
644 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
645 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
646 (const_int 31))
647 (match_dup 4))
648 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
649 "
650 {
651 operands[3] = gen_highpart (SImode, operands[0]);
652 operands[0] = gen_lowpart (SImode, operands[0]);
653 operands[4] = gen_highpart (SImode, operands[1]);
654 operands[1] = gen_lowpart (SImode, operands[1]);
655 operands[2] = gen_lowpart (SImode, operands[2]);
656 }"
657 [(set_attr "conds" "clob")
658 (set_attr "length" "8")]
659 )
660
661 (define_insn_and_split "*adddi_zesidi_di"
662 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
663 (plus:DI (zero_extend:DI
664 (match_operand:SI 2 "s_register_operand" "r,r"))
665 (match_operand:DI 1 "s_register_operand" "0,r")))
666 (clobber (reg:CC CC_REGNUM))]
667 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
668 "#"
669 "TARGET_32BIT && reload_completed"
670 [(parallel [(set (reg:CC_C CC_REGNUM)
671 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
672 (match_dup 1)))
673 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
674 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
675 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
676 "
677 {
678 operands[3] = gen_highpart (SImode, operands[0]);
679 operands[0] = gen_lowpart (SImode, operands[0]);
680 operands[4] = gen_highpart (SImode, operands[1]);
681 operands[1] = gen_lowpart (SImode, operands[1]);
682 operands[2] = gen_lowpart (SImode, operands[2]);
683 }"
684 [(set_attr "conds" "clob")
685 (set_attr "length" "8")]
686 )
687
688 (define_expand "addsi3"
689 [(set (match_operand:SI 0 "s_register_operand" "")
690 (plus:SI (match_operand:SI 1 "s_register_operand" "")
691 (match_operand:SI 2 "reg_or_int_operand" "")))]
692 "TARGET_EITHER"
693 "
694 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
695 {
696 arm_split_constant (PLUS, SImode, NULL_RTX,
697 INTVAL (operands[2]), operands[0], operands[1],
698 optimize && can_create_pseudo_p ());
699 DONE;
700 }
701 "
702 )
703
704 ; If there is a scratch available, this will be faster than synthesizing the
705 ; addition.
706 (define_peephole2
707 [(match_scratch:SI 3 "r")
708 (set (match_operand:SI 0 "arm_general_register_operand" "")
709 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
710 (match_operand:SI 2 "const_int_operand" "")))]
711 "TARGET_32BIT &&
712 !(const_ok_for_arm (INTVAL (operands[2]))
713 || const_ok_for_arm (-INTVAL (operands[2])))
714 && const_ok_for_arm (~INTVAL (operands[2]))"
715 [(set (match_dup 3) (match_dup 2))
716 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
717 ""
718 )
719
720 ;; The r/r/k alternative is required when reloading the address
721 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
722 ;; put the duplicated register first, and not try the commutative version.
723 (define_insn_and_split "*arm_addsi3"
724 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
725 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
726 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
727 "TARGET_32BIT"
728 "@
729 add%?\\t%0, %1, %2
730 add%?\\t%0, %1, %2
731 add%?\\t%0, %2, %1
732 addw%?\\t%0, %1, %2
733 addw%?\\t%0, %1, %2
734 sub%?\\t%0, %1, #%n2
735 sub%?\\t%0, %1, #%n2
736 subw%?\\t%0, %1, #%n2
737 subw%?\\t%0, %1, #%n2
738 #"
739 "TARGET_32BIT
740 && GET_CODE (operands[2]) == CONST_INT
741 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
742 && (reload_completed || !arm_eliminable_register (operands[1]))"
743 [(clobber (const_int 0))]
744 "
745 arm_split_constant (PLUS, SImode, curr_insn,
746 INTVAL (operands[2]), operands[0],
747 operands[1], 0);
748 DONE;
749 "
750 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
751 (set_attr "predicable" "yes")
752 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
753 )
754
755 (define_insn_and_split "*thumb1_addsi3"
756 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
757 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
758 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
759 "TARGET_THUMB1"
760 "*
761 static const char * const asms[] =
762 {
763 \"add\\t%0, %0, %2\",
764 \"sub\\t%0, %0, #%n2\",
765 \"add\\t%0, %1, %2\",
766 \"add\\t%0, %0, %2\",
767 \"add\\t%0, %0, %2\",
768 \"add\\t%0, %1, %2\",
769 \"add\\t%0, %1, %2\",
770 \"#\",
771 \"#\",
772 \"#\"
773 };
774 if ((which_alternative == 2 || which_alternative == 6)
775 && GET_CODE (operands[2]) == CONST_INT
776 && INTVAL (operands[2]) < 0)
777 return \"sub\\t%0, %1, #%n2\";
778 return asms[which_alternative];
779 "
780 "&& reload_completed && CONST_INT_P (operands[2])
781 && ((operands[1] != stack_pointer_rtx
782 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
783 || (operands[1] == stack_pointer_rtx
784 && INTVAL (operands[2]) > 1020))"
785 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
786 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
787 {
788 HOST_WIDE_INT offset = INTVAL (operands[2]);
789 if (operands[1] == stack_pointer_rtx)
790 offset -= 1020;
791 else
792 {
793 if (offset > 255)
794 offset = 255;
795 else if (offset < -255)
796 offset = -255;
797 }
798 operands[3] = GEN_INT (offset);
799 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
800 }
801 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
802 )
803
804 ;; Reloading and elimination of the frame pointer can
805 ;; sometimes cause this optimization to be missed.
806 (define_peephole2
807 [(set (match_operand:SI 0 "arm_general_register_operand" "")
808 (match_operand:SI 1 "const_int_operand" ""))
809 (set (match_dup 0)
810 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
811 "TARGET_THUMB1
812 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
813 && (INTVAL (operands[1]) & 3) == 0"
814 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
815 ""
816 )
817
818 (define_insn "addsi3_compare0"
819 [(set (reg:CC_NOOV CC_REGNUM)
820 (compare:CC_NOOV
821 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
822 (match_operand:SI 2 "arm_add_operand" "rI,L"))
823 (const_int 0)))
824 (set (match_operand:SI 0 "s_register_operand" "=r,r")
825 (plus:SI (match_dup 1) (match_dup 2)))]
826 "TARGET_ARM"
827 "@
828 add%.\\t%0, %1, %2
829 sub%.\\t%0, %1, #%n2"
830 [(set_attr "conds" "set")]
831 )
832
833 (define_insn "*addsi3_compare0_scratch"
834 [(set (reg:CC_NOOV CC_REGNUM)
835 (compare:CC_NOOV
836 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
837 (match_operand:SI 1 "arm_add_operand" "rI,L"))
838 (const_int 0)))]
839 "TARGET_ARM"
840 "@
841 cmn%?\\t%0, %1
842 cmp%?\\t%0, #%n1"
843 [(set_attr "conds" "set")]
844 )
845
846 (define_insn "*compare_negsi_si"
847 [(set (reg:CC_Z CC_REGNUM)
848 (compare:CC_Z
849 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
850 (match_operand:SI 1 "s_register_operand" "r")))]
851 "TARGET_32BIT"
852 "cmn%?\\t%1, %0"
853 [(set_attr "conds" "set")]
854 )
855
856 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
857 ;; addend is a constant.
858 (define_insn "*cmpsi2_addneg"
859 [(set (reg:CC CC_REGNUM)
860 (compare:CC
861 (match_operand:SI 1 "s_register_operand" "r,r")
862 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
863 (set (match_operand:SI 0 "s_register_operand" "=r,r")
864 (plus:SI (match_dup 1)
865 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
866 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
867 "@
868 add%.\\t%0, %1, %3
869 sub%.\\t%0, %1, #%n3"
870 [(set_attr "conds" "set")]
871 )
872
873 ;; Convert the sequence
874 ;; sub rd, rn, #1
875 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
876 ;; bne dest
877 ;; into
878 ;; subs rd, rn, #1
879 ;; bcs dest ((unsigned)rn >= 1)
880 ;; similarly for the beq variant using bcc.
881 ;; This is a common looping idiom (while (n--))
882 (define_peephole2
883 [(set (match_operand:SI 0 "arm_general_register_operand" "")
884 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
885 (const_int -1)))
886 (set (match_operand 2 "cc_register" "")
887 (compare (match_dup 0) (const_int -1)))
888 (set (pc)
889 (if_then_else (match_operator 3 "equality_operator"
890 [(match_dup 2) (const_int 0)])
891 (match_operand 4 "" "")
892 (match_operand 5 "" "")))]
893 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
894 [(parallel[
895 (set (match_dup 2)
896 (compare:CC
897 (match_dup 1) (const_int 1)))
898 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
899 (set (pc)
900 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
901 (match_dup 4)
902 (match_dup 5)))]
903 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
904 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
905 ? GEU : LTU),
906 VOIDmode,
907 operands[2], const0_rtx);"
908 )
909
910 ;; The next four insns work because they compare the result with one of
911 ;; the operands, and we know that the use of the condition code is
912 ;; either GEU or LTU, so we can use the carry flag from the addition
913 ;; instead of doing the compare a second time.
914 (define_insn "*addsi3_compare_op1"
915 [(set (reg:CC_C CC_REGNUM)
916 (compare:CC_C
917 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
918 (match_operand:SI 2 "arm_add_operand" "rI,L"))
919 (match_dup 1)))
920 (set (match_operand:SI 0 "s_register_operand" "=r,r")
921 (plus:SI (match_dup 1) (match_dup 2)))]
922 "TARGET_32BIT"
923 "@
924 add%.\\t%0, %1, %2
925 sub%.\\t%0, %1, #%n2"
926 [(set_attr "conds" "set")]
927 )
928
929 (define_insn "*addsi3_compare_op2"
930 [(set (reg:CC_C CC_REGNUM)
931 (compare:CC_C
932 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
933 (match_operand:SI 2 "arm_add_operand" "rI,L"))
934 (match_dup 2)))
935 (set (match_operand:SI 0 "s_register_operand" "=r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
937 "TARGET_32BIT"
938 "@
939 add%.\\t%0, %1, %2
940 sub%.\\t%0, %1, #%n2"
941 [(set_attr "conds" "set")]
942 )
943
944 (define_insn "*compare_addsi2_op0"
945 [(set (reg:CC_C CC_REGNUM)
946 (compare:CC_C
947 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
948 (match_operand:SI 1 "arm_add_operand" "rI,L"))
949 (match_dup 0)))]
950 "TARGET_32BIT"
951 "@
952 cmn%?\\t%0, %1
953 cmp%?\\t%0, #%n1"
954 [(set_attr "conds" "set")]
955 )
956
957 (define_insn "*compare_addsi2_op1"
958 [(set (reg:CC_C CC_REGNUM)
959 (compare:CC_C
960 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
961 (match_operand:SI 1 "arm_add_operand" "rI,L"))
962 (match_dup 1)))]
963 "TARGET_32BIT"
964 "@
965 cmn%?\\t%0, %1
966 cmp%?\\t%0, #%n1"
967 [(set_attr "conds" "set")]
968 )
969
970 (define_insn "*addsi3_carryin_<optab>"
971 [(set (match_operand:SI 0 "s_register_operand" "=r")
972 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
973 (match_operand:SI 2 "arm_rhs_operand" "rI"))
974 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
975 "TARGET_32BIT"
976 "adc%?\\t%0, %1, %2"
977 [(set_attr "conds" "use")]
978 )
979
980 (define_insn "*addsi3_carryin_alt2_<optab>"
981 [(set (match_operand:SI 0 "s_register_operand" "=r")
982 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
983 (match_operand:SI 1 "s_register_operand" "%r"))
984 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
985 "TARGET_32BIT"
986 "adc%?\\t%0, %1, %2"
987 [(set_attr "conds" "use")]
988 )
989
990 (define_insn "*addsi3_carryin_shift_<optab>"
991 [(set (match_operand:SI 0 "s_register_operand" "=r")
992 (plus:SI (plus:SI
993 (match_operator:SI 2 "shift_operator"
994 [(match_operand:SI 3 "s_register_operand" "r")
995 (match_operand:SI 4 "reg_or_int_operand" "rM")])
996 (match_operand:SI 1 "s_register_operand" "r"))
997 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
998 "TARGET_32BIT"
999 "adc%?\\t%0, %1, %3%S2"
1000 [(set_attr "conds" "use")
1001 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1002 (const_string "alu_shift")
1003 (const_string "alu_shift_reg")))]
1004 )
1005
1006 (define_insn "*addsi3_carryin_clobercc_<optab>"
1007 [(set (match_operand:SI 0 "s_register_operand" "=r")
1008 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1009 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1010 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1011 (clobber (reg:CC CC_REGNUM))]
1012 "TARGET_32BIT"
1013 "adc%.\\t%0, %1, %2"
1014 [(set_attr "conds" "set")]
1015 )
1016
1017 (define_expand "incscc"
1018 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1019 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1020 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1021 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1022 "TARGET_32BIT"
1023 ""
1024 )
1025
1026 (define_insn "*arm_incscc"
1027 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1028 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1029 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1030 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1031 "TARGET_ARM"
1032 "@
1033 add%d2\\t%0, %1, #1
1034 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1035 [(set_attr "conds" "use")
1036 (set_attr "length" "4,8")]
1037 )
1038
1039 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1040 (define_split
1041 [(set (match_operand:SI 0 "s_register_operand" "")
1042 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1043 (match_operand:SI 2 "s_register_operand" ""))
1044 (const_int -1)))
1045 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1046 "TARGET_32BIT"
1047 [(set (match_dup 3) (match_dup 1))
1048 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1049 "
1050 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1051 ")
1052
1053 (define_expand "addsf3"
1054 [(set (match_operand:SF 0 "s_register_operand" "")
1055 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1056 (match_operand:SF 2 "arm_float_add_operand" "")))]
1057 "TARGET_32BIT && TARGET_HARD_FLOAT"
1058 "
1059 if (TARGET_MAVERICK
1060 && !cirrus_fp_register (operands[2], SFmode))
1061 operands[2] = force_reg (SFmode, operands[2]);
1062 ")
1063
1064 (define_expand "adddf3"
1065 [(set (match_operand:DF 0 "s_register_operand" "")
1066 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1067 (match_operand:DF 2 "arm_float_add_operand" "")))]
1068 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1069 "
1070 if (TARGET_MAVERICK
1071 && !cirrus_fp_register (operands[2], DFmode))
1072 operands[2] = force_reg (DFmode, operands[2]);
1073 ")
1074
1075 (define_expand "subdi3"
1076 [(parallel
1077 [(set (match_operand:DI 0 "s_register_operand" "")
1078 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1079 (match_operand:DI 2 "s_register_operand" "")))
1080 (clobber (reg:CC CC_REGNUM))])]
1081 "TARGET_EITHER"
1082 "
1083 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1084 && TARGET_32BIT
1085 && cirrus_fp_register (operands[0], DImode)
1086 && cirrus_fp_register (operands[1], DImode))
1087 {
1088 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1089 DONE;
1090 }
1091
1092 if (TARGET_THUMB1)
1093 {
1094 if (GET_CODE (operands[1]) != REG)
1095 operands[1] = force_reg (DImode, operands[1]);
1096 if (GET_CODE (operands[2]) != REG)
1097 operands[2] = force_reg (DImode, operands[2]);
1098 }
1099 "
1100 )
1101
1102 (define_insn "*arm_subdi3"
1103 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1104 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1105 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1106 (clobber (reg:CC CC_REGNUM))]
1107 "TARGET_32BIT && !TARGET_NEON"
1108 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1109 [(set_attr "conds" "clob")
1110 (set_attr "length" "8")]
1111 )
1112
1113 (define_insn "*thumb_subdi3"
1114 [(set (match_operand:DI 0 "register_operand" "=l")
1115 (minus:DI (match_operand:DI 1 "register_operand" "0")
1116 (match_operand:DI 2 "register_operand" "l")))
1117 (clobber (reg:CC CC_REGNUM))]
1118 "TARGET_THUMB1"
1119 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1120 [(set_attr "length" "4")]
1121 )
1122
1123 (define_insn "*subdi_di_zesidi"
1124 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1125 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1126 (zero_extend:DI
1127 (match_operand:SI 2 "s_register_operand" "r,r"))))
1128 (clobber (reg:CC CC_REGNUM))]
1129 "TARGET_32BIT"
1130 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1131 [(set_attr "conds" "clob")
1132 (set_attr "length" "8")]
1133 )
1134
1135 (define_insn "*subdi_di_sesidi"
1136 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1137 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1138 (sign_extend:DI
1139 (match_operand:SI 2 "s_register_operand" "r,r"))))
1140 (clobber (reg:CC CC_REGNUM))]
1141 "TARGET_32BIT"
1142 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1143 [(set_attr "conds" "clob")
1144 (set_attr "length" "8")]
1145 )
1146
1147 (define_insn "*subdi_zesidi_di"
1148 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1149 (minus:DI (zero_extend:DI
1150 (match_operand:SI 2 "s_register_operand" "r,r"))
1151 (match_operand:DI 1 "s_register_operand" "0,r")))
1152 (clobber (reg:CC CC_REGNUM))]
1153 "TARGET_ARM"
1154 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1155 [(set_attr "conds" "clob")
1156 (set_attr "length" "8")]
1157 )
1158
1159 (define_insn "*subdi_sesidi_di"
1160 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1161 (minus:DI (sign_extend:DI
1162 (match_operand:SI 2 "s_register_operand" "r,r"))
1163 (match_operand:DI 1 "s_register_operand" "0,r")))
1164 (clobber (reg:CC CC_REGNUM))]
1165 "TARGET_ARM"
1166 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1167 [(set_attr "conds" "clob")
1168 (set_attr "length" "8")]
1169 )
1170
1171 (define_insn "*subdi_zesidi_zesidi"
1172 [(set (match_operand:DI 0 "s_register_operand" "=r")
1173 (minus:DI (zero_extend:DI
1174 (match_operand:SI 1 "s_register_operand" "r"))
1175 (zero_extend:DI
1176 (match_operand:SI 2 "s_register_operand" "r"))))
1177 (clobber (reg:CC CC_REGNUM))]
1178 "TARGET_32BIT"
1179 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1180 [(set_attr "conds" "clob")
1181 (set_attr "length" "8")]
1182 )
1183
1184 (define_expand "subsi3"
1185 [(set (match_operand:SI 0 "s_register_operand" "")
1186 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1187 (match_operand:SI 2 "s_register_operand" "")))]
1188 "TARGET_EITHER"
1189 "
1190 if (GET_CODE (operands[1]) == CONST_INT)
1191 {
1192 if (TARGET_32BIT)
1193 {
1194 arm_split_constant (MINUS, SImode, NULL_RTX,
1195 INTVAL (operands[1]), operands[0],
1196 operands[2], optimize && can_create_pseudo_p ());
1197 DONE;
1198 }
1199 else /* TARGET_THUMB1 */
1200 operands[1] = force_reg (SImode, operands[1]);
1201 }
1202 "
1203 )
1204
1205 (define_insn "thumb1_subsi3_insn"
1206 [(set (match_operand:SI 0 "register_operand" "=l")
1207 (minus:SI (match_operand:SI 1 "register_operand" "l")
1208 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1209 "TARGET_THUMB1"
1210 "sub\\t%0, %1, %2"
1211 [(set_attr "length" "2")
1212 (set_attr "conds" "set")])
1213
1214 ; ??? Check Thumb-2 split length
1215 (define_insn_and_split "*arm_subsi3_insn"
1216 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1217 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1218 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1219 "TARGET_32BIT"
1220 "@
1221 rsb%?\\t%0, %2, %1
1222 sub%?\\t%0, %1, %2
1223 sub%?\\t%0, %1, %2
1224 #
1225 #"
1226 "&& ((GET_CODE (operands[1]) == CONST_INT
1227 && !const_ok_for_arm (INTVAL (operands[1])))
1228 || (GET_CODE (operands[2]) == CONST_INT
1229 && !const_ok_for_arm (INTVAL (operands[2]))))"
1230 [(clobber (const_int 0))]
1231 "
1232 arm_split_constant (MINUS, SImode, curr_insn,
1233 INTVAL (operands[1]), operands[0], operands[2], 0);
1234 DONE;
1235 "
1236 [(set_attr "length" "4,4,4,16,16")
1237 (set_attr "predicable" "yes")]
1238 )
1239
1240 (define_peephole2
1241 [(match_scratch:SI 3 "r")
1242 (set (match_operand:SI 0 "arm_general_register_operand" "")
1243 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1244 (match_operand:SI 2 "arm_general_register_operand" "")))]
1245 "TARGET_32BIT
1246 && !const_ok_for_arm (INTVAL (operands[1]))
1247 && const_ok_for_arm (~INTVAL (operands[1]))"
1248 [(set (match_dup 3) (match_dup 1))
1249 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1250 ""
1251 )
1252
1253 (define_insn "*subsi3_compare0"
1254 [(set (reg:CC_NOOV CC_REGNUM)
1255 (compare:CC_NOOV
1256 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1257 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1258 (const_int 0)))
1259 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1260 (minus:SI (match_dup 1) (match_dup 2)))]
1261 "TARGET_32BIT"
1262 "@
1263 sub%.\\t%0, %1, %2
1264 rsb%.\\t%0, %2, %1"
1265 [(set_attr "conds" "set")]
1266 )
1267
1268 (define_insn "*subsi3_compare"
1269 [(set (reg:CC CC_REGNUM)
1270 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1271 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1272 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1273 (minus:SI (match_dup 1) (match_dup 2)))]
1274 "TARGET_32BIT"
1275 "@
1276 sub%.\\t%0, %1, %2
1277 rsb%.\\t%0, %2, %1"
1278 [(set_attr "conds" "set")]
1279 )
1280
1281 (define_expand "decscc"
1282 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1283 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1284 (match_operator:SI 2 "arm_comparison_operator"
1285 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1286 "TARGET_32BIT"
1287 ""
1288 )
1289
1290 (define_insn "*arm_decscc"
1291 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1292 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1293 (match_operator:SI 2 "arm_comparison_operator"
1294 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1295 "TARGET_ARM"
1296 "@
1297 sub%d2\\t%0, %1, #1
1298 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1299 [(set_attr "conds" "use")
1300 (set_attr "length" "*,8")]
1301 )
1302
1303 (define_expand "subsf3"
1304 [(set (match_operand:SF 0 "s_register_operand" "")
1305 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1306 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1307 "TARGET_32BIT && TARGET_HARD_FLOAT"
1308 "
1309 if (TARGET_MAVERICK)
1310 {
1311 if (!cirrus_fp_register (operands[1], SFmode))
1312 operands[1] = force_reg (SFmode, operands[1]);
1313 if (!cirrus_fp_register (operands[2], SFmode))
1314 operands[2] = force_reg (SFmode, operands[2]);
1315 }
1316 ")
1317
1318 (define_expand "subdf3"
1319 [(set (match_operand:DF 0 "s_register_operand" "")
1320 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1321 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1322 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1323 "
1324 if (TARGET_MAVERICK)
1325 {
1326 if (!cirrus_fp_register (operands[1], DFmode))
1327 operands[1] = force_reg (DFmode, operands[1]);
1328 if (!cirrus_fp_register (operands[2], DFmode))
1329 operands[2] = force_reg (DFmode, operands[2]);
1330 }
1331 ")
1332
1333 \f
1334 ;; Multiplication insns
1335
1336 (define_expand "mulsi3"
1337 [(set (match_operand:SI 0 "s_register_operand" "")
1338 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1339 (match_operand:SI 1 "s_register_operand" "")))]
1340 "TARGET_EITHER"
1341 ""
1342 )
1343
1344 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1345 (define_insn "*arm_mulsi3"
1346 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1347 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1348 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1349 "TARGET_32BIT && !arm_arch6"
1350 "mul%?\\t%0, %2, %1"
1351 [(set_attr "insn" "mul")
1352 (set_attr "predicable" "yes")]
1353 )
1354
1355 (define_insn "*arm_mulsi3_v6"
1356 [(set (match_operand:SI 0 "s_register_operand" "=r")
1357 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1358 (match_operand:SI 2 "s_register_operand" "r")))]
1359 "TARGET_32BIT && arm_arch6"
1360 "mul%?\\t%0, %1, %2"
1361 [(set_attr "insn" "mul")
1362 (set_attr "predicable" "yes")]
1363 )
1364
1365 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1366 ; 1 and 2; are the same, because reload will make operand 0 match
1367 ; operand 1 without realizing that this conflicts with operand 2. We fix
1368 ; this by adding another alternative to match this case, and then `reload'
1369 ; it ourselves. This alternative must come first.
1370 (define_insn "*thumb_mulsi3"
1371 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1372 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1373 (match_operand:SI 2 "register_operand" "l,l,l")))]
1374 "TARGET_THUMB1 && !arm_arch6"
1375 "*
1376 if (which_alternative < 2)
1377 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1378 else
1379 return \"mul\\t%0, %2\";
1380 "
1381 [(set_attr "length" "4,4,2")
1382 (set_attr "insn" "mul")]
1383 )
1384
1385 (define_insn "*thumb_mulsi3_v6"
1386 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1387 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1388 (match_operand:SI 2 "register_operand" "l,0,0")))]
1389 "TARGET_THUMB1 && arm_arch6"
1390 "@
1391 mul\\t%0, %2
1392 mul\\t%0, %1
1393 mul\\t%0, %1"
1394 [(set_attr "length" "2")
1395 (set_attr "insn" "mul")]
1396 )
1397
1398 (define_insn "*mulsi3_compare0"
1399 [(set (reg:CC_NOOV CC_REGNUM)
1400 (compare:CC_NOOV (mult:SI
1401 (match_operand:SI 2 "s_register_operand" "r,r")
1402 (match_operand:SI 1 "s_register_operand" "%0,r"))
1403 (const_int 0)))
1404 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1405 (mult:SI (match_dup 2) (match_dup 1)))]
1406 "TARGET_ARM && !arm_arch6"
1407 "mul%.\\t%0, %2, %1"
1408 [(set_attr "conds" "set")
1409 (set_attr "insn" "muls")]
1410 )
1411
1412 (define_insn "*mulsi3_compare0_v6"
1413 [(set (reg:CC_NOOV CC_REGNUM)
1414 (compare:CC_NOOV (mult:SI
1415 (match_operand:SI 2 "s_register_operand" "r")
1416 (match_operand:SI 1 "s_register_operand" "r"))
1417 (const_int 0)))
1418 (set (match_operand:SI 0 "s_register_operand" "=r")
1419 (mult:SI (match_dup 2) (match_dup 1)))]
1420 "TARGET_ARM && arm_arch6 && optimize_size"
1421 "mul%.\\t%0, %2, %1"
1422 [(set_attr "conds" "set")
1423 (set_attr "insn" "muls")]
1424 )
1425
1426 (define_insn "*mulsi_compare0_scratch"
1427 [(set (reg:CC_NOOV CC_REGNUM)
1428 (compare:CC_NOOV (mult:SI
1429 (match_operand:SI 2 "s_register_operand" "r,r")
1430 (match_operand:SI 1 "s_register_operand" "%0,r"))
1431 (const_int 0)))
1432 (clobber (match_scratch:SI 0 "=&r,&r"))]
1433 "TARGET_ARM && !arm_arch6"
1434 "mul%.\\t%0, %2, %1"
1435 [(set_attr "conds" "set")
1436 (set_attr "insn" "muls")]
1437 )
1438
1439 (define_insn "*mulsi_compare0_scratch_v6"
1440 [(set (reg:CC_NOOV CC_REGNUM)
1441 (compare:CC_NOOV (mult:SI
1442 (match_operand:SI 2 "s_register_operand" "r")
1443 (match_operand:SI 1 "s_register_operand" "r"))
1444 (const_int 0)))
1445 (clobber (match_scratch:SI 0 "=r"))]
1446 "TARGET_ARM && arm_arch6 && optimize_size"
1447 "mul%.\\t%0, %2, %1"
1448 [(set_attr "conds" "set")
1449 (set_attr "insn" "muls")]
1450 )
1451
1452 ;; Unnamed templates to match MLA instruction.
1453
1454 (define_insn "*mulsi3addsi"
1455 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1456 (plus:SI
1457 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1458 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1459 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1460 "TARGET_32BIT && !arm_arch6"
1461 "mla%?\\t%0, %2, %1, %3"
1462 [(set_attr "insn" "mla")
1463 (set_attr "predicable" "yes")]
1464 )
1465
1466 (define_insn "*mulsi3addsi_v6"
1467 [(set (match_operand:SI 0 "s_register_operand" "=r")
1468 (plus:SI
1469 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1470 (match_operand:SI 1 "s_register_operand" "r"))
1471 (match_operand:SI 3 "s_register_operand" "r")))]
1472 "TARGET_32BIT && arm_arch6"
1473 "mla%?\\t%0, %2, %1, %3"
1474 [(set_attr "insn" "mla")
1475 (set_attr "predicable" "yes")]
1476 )
1477
1478 (define_insn "*mulsi3addsi_compare0"
1479 [(set (reg:CC_NOOV CC_REGNUM)
1480 (compare:CC_NOOV
1481 (plus:SI (mult:SI
1482 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1483 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1484 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1485 (const_int 0)))
1486 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1487 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1488 (match_dup 3)))]
1489 "TARGET_ARM && arm_arch6"
1490 "mla%.\\t%0, %2, %1, %3"
1491 [(set_attr "conds" "set")
1492 (set_attr "insn" "mlas")]
1493 )
1494
1495 (define_insn "*mulsi3addsi_compare0_v6"
1496 [(set (reg:CC_NOOV CC_REGNUM)
1497 (compare:CC_NOOV
1498 (plus:SI (mult:SI
1499 (match_operand:SI 2 "s_register_operand" "r")
1500 (match_operand:SI 1 "s_register_operand" "r"))
1501 (match_operand:SI 3 "s_register_operand" "r"))
1502 (const_int 0)))
1503 (set (match_operand:SI 0 "s_register_operand" "=r")
1504 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1505 (match_dup 3)))]
1506 "TARGET_ARM && arm_arch6 && optimize_size"
1507 "mla%.\\t%0, %2, %1, %3"
1508 [(set_attr "conds" "set")
1509 (set_attr "insn" "mlas")]
1510 )
1511
1512 (define_insn "*mulsi3addsi_compare0_scratch"
1513 [(set (reg:CC_NOOV CC_REGNUM)
1514 (compare:CC_NOOV
1515 (plus:SI (mult:SI
1516 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1517 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1518 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1519 (const_int 0)))
1520 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1521 "TARGET_ARM && !arm_arch6"
1522 "mla%.\\t%0, %2, %1, %3"
1523 [(set_attr "conds" "set")
1524 (set_attr "insn" "mlas")]
1525 )
1526
1527 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1528 [(set (reg:CC_NOOV CC_REGNUM)
1529 (compare:CC_NOOV
1530 (plus:SI (mult:SI
1531 (match_operand:SI 2 "s_register_operand" "r")
1532 (match_operand:SI 1 "s_register_operand" "r"))
1533 (match_operand:SI 3 "s_register_operand" "r"))
1534 (const_int 0)))
1535 (clobber (match_scratch:SI 0 "=r"))]
1536 "TARGET_ARM && arm_arch6 && optimize_size"
1537 "mla%.\\t%0, %2, %1, %3"
1538 [(set_attr "conds" "set")
1539 (set_attr "insn" "mlas")]
1540 )
1541
1542 (define_insn "*mulsi3subsi"
1543 [(set (match_operand:SI 0 "s_register_operand" "=r")
1544 (minus:SI
1545 (match_operand:SI 3 "s_register_operand" "r")
1546 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1547 (match_operand:SI 1 "s_register_operand" "r"))))]
1548 "TARGET_32BIT && arm_arch_thumb2"
1549 "mls%?\\t%0, %2, %1, %3"
1550 [(set_attr "insn" "mla")
1551 (set_attr "predicable" "yes")]
1552 )
1553
1554 (define_expand "maddsidi4"
1555 [(set (match_operand:DI 0 "s_register_operand" "")
1556 (plus:DI
1557 (mult:DI
1558 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1559 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1560 (match_operand:DI 3 "s_register_operand" "")))]
1561 "TARGET_32BIT && arm_arch3m"
1562 "")
1563
1564 (define_insn "*mulsidi3adddi"
1565 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1566 (plus:DI
1567 (mult:DI
1568 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1569 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1570 (match_operand:DI 1 "s_register_operand" "0")))]
1571 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1572 "smlal%?\\t%Q0, %R0, %3, %2"
1573 [(set_attr "insn" "smlal")
1574 (set_attr "predicable" "yes")]
1575 )
1576
1577 (define_insn "*mulsidi3adddi_v6"
1578 [(set (match_operand:DI 0 "s_register_operand" "=r")
1579 (plus:DI
1580 (mult:DI
1581 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1582 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1583 (match_operand:DI 1 "s_register_operand" "0")))]
1584 "TARGET_32BIT && arm_arch6"
1585 "smlal%?\\t%Q0, %R0, %3, %2"
1586 [(set_attr "insn" "smlal")
1587 (set_attr "predicable" "yes")]
1588 )
1589
1590 ;; 32x32->64 widening multiply.
1591 ;; As with mulsi3, the only difference between the v3-5 and v6+
1592 ;; versions of these patterns is the requirement that the output not
1593 ;; overlap the inputs, but that still means we have to have a named
1594 ;; expander and two different starred insns.
1595
1596 (define_expand "mulsidi3"
1597 [(set (match_operand:DI 0 "s_register_operand" "")
1598 (mult:DI
1599 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1600 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1601 "TARGET_32BIT && arm_arch3m"
1602 ""
1603 )
1604
1605 (define_insn "*mulsidi3_nov6"
1606 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1607 (mult:DI
1608 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1609 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1610 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1611 "smull%?\\t%Q0, %R0, %1, %2"
1612 [(set_attr "insn" "smull")
1613 (set_attr "predicable" "yes")]
1614 )
1615
1616 (define_insn "*mulsidi3_v6"
1617 [(set (match_operand:DI 0 "s_register_operand" "=r")
1618 (mult:DI
1619 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1620 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1621 "TARGET_32BIT && arm_arch6"
1622 "smull%?\\t%Q0, %R0, %1, %2"
1623 [(set_attr "insn" "smull")
1624 (set_attr "predicable" "yes")]
1625 )
1626
1627 (define_expand "umulsidi3"
1628 [(set (match_operand:DI 0 "s_register_operand" "")
1629 (mult:DI
1630 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1631 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1632 "TARGET_32BIT && arm_arch3m"
1633 ""
1634 )
1635
1636 (define_insn "*umulsidi3_nov6"
1637 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1638 (mult:DI
1639 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1640 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1641 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1642 "umull%?\\t%Q0, %R0, %1, %2"
1643 [(set_attr "insn" "umull")
1644 (set_attr "predicable" "yes")]
1645 )
1646
1647 (define_insn "*umulsidi3_v6"
1648 [(set (match_operand:DI 0 "s_register_operand" "=r")
1649 (mult:DI
1650 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1651 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1652 "TARGET_32BIT && arm_arch6"
1653 "umull%?\\t%Q0, %R0, %1, %2"
1654 [(set_attr "insn" "umull")
1655 (set_attr "predicable" "yes")]
1656 )
1657
1658 (define_expand "umaddsidi4"
1659 [(set (match_operand:DI 0 "s_register_operand" "")
1660 (plus:DI
1661 (mult:DI
1662 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1663 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1664 (match_operand:DI 3 "s_register_operand" "")))]
1665 "TARGET_32BIT && arm_arch3m"
1666 "")
1667
1668 (define_insn "*umulsidi3adddi"
1669 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1670 (plus:DI
1671 (mult:DI
1672 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1673 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1674 (match_operand:DI 1 "s_register_operand" "0")))]
1675 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1676 "umlal%?\\t%Q0, %R0, %3, %2"
1677 [(set_attr "insn" "umlal")
1678 (set_attr "predicable" "yes")]
1679 )
1680
1681 (define_insn "*umulsidi3adddi_v6"
1682 [(set (match_operand:DI 0 "s_register_operand" "=r")
1683 (plus:DI
1684 (mult:DI
1685 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1686 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1687 (match_operand:DI 1 "s_register_operand" "0")))]
1688 "TARGET_32BIT && arm_arch6"
1689 "umlal%?\\t%Q0, %R0, %3, %2"
1690 [(set_attr "insn" "umlal")
1691 (set_attr "predicable" "yes")]
1692 )
1693
1694 (define_expand "smulsi3_highpart"
1695 [(parallel
1696 [(set (match_operand:SI 0 "s_register_operand" "")
1697 (truncate:SI
1698 (lshiftrt:DI
1699 (mult:DI
1700 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1701 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1702 (const_int 32))))
1703 (clobber (match_scratch:SI 3 ""))])]
1704 "TARGET_32BIT && arm_arch3m"
1705 ""
1706 )
1707
1708 (define_insn "*smulsi3_highpart_nov6"
1709 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1710 (truncate:SI
1711 (lshiftrt:DI
1712 (mult:DI
1713 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1714 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1715 (const_int 32))))
1716 (clobber (match_scratch:SI 3 "=&r,&r"))]
1717 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1718 "smull%?\\t%3, %0, %2, %1"
1719 [(set_attr "insn" "smull")
1720 (set_attr "predicable" "yes")]
1721 )
1722
1723 (define_insn "*smulsi3_highpart_v6"
1724 [(set (match_operand:SI 0 "s_register_operand" "=r")
1725 (truncate:SI
1726 (lshiftrt:DI
1727 (mult:DI
1728 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1729 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1730 (const_int 32))))
1731 (clobber (match_scratch:SI 3 "=r"))]
1732 "TARGET_32BIT && arm_arch6"
1733 "smull%?\\t%3, %0, %2, %1"
1734 [(set_attr "insn" "smull")
1735 (set_attr "predicable" "yes")]
1736 )
1737
1738 (define_expand "umulsi3_highpart"
1739 [(parallel
1740 [(set (match_operand:SI 0 "s_register_operand" "")
1741 (truncate:SI
1742 (lshiftrt:DI
1743 (mult:DI
1744 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1745 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1746 (const_int 32))))
1747 (clobber (match_scratch:SI 3 ""))])]
1748 "TARGET_32BIT && arm_arch3m"
1749 ""
1750 )
1751
1752 (define_insn "*umulsi3_highpart_nov6"
1753 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1754 (truncate:SI
1755 (lshiftrt:DI
1756 (mult:DI
1757 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1758 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1759 (const_int 32))))
1760 (clobber (match_scratch:SI 3 "=&r,&r"))]
1761 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1762 "umull%?\\t%3, %0, %2, %1"
1763 [(set_attr "insn" "umull")
1764 (set_attr "predicable" "yes")]
1765 )
1766
1767 (define_insn "*umulsi3_highpart_v6"
1768 [(set (match_operand:SI 0 "s_register_operand" "=r")
1769 (truncate:SI
1770 (lshiftrt:DI
1771 (mult:DI
1772 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1773 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1774 (const_int 32))))
1775 (clobber (match_scratch:SI 3 "=r"))]
1776 "TARGET_32BIT && arm_arch6"
1777 "umull%?\\t%3, %0, %2, %1"
1778 [(set_attr "insn" "umull")
1779 (set_attr "predicable" "yes")]
1780 )
1781
1782 (define_insn "mulhisi3"
1783 [(set (match_operand:SI 0 "s_register_operand" "=r")
1784 (mult:SI (sign_extend:SI
1785 (match_operand:HI 1 "s_register_operand" "%r"))
1786 (sign_extend:SI
1787 (match_operand:HI 2 "s_register_operand" "r"))))]
1788 "TARGET_DSP_MULTIPLY"
1789 "smulbb%?\\t%0, %1, %2"
1790 [(set_attr "insn" "smulxy")
1791 (set_attr "predicable" "yes")]
1792 )
1793
1794 (define_insn "*mulhisi3tb"
1795 [(set (match_operand:SI 0 "s_register_operand" "=r")
1796 (mult:SI (ashiftrt:SI
1797 (match_operand:SI 1 "s_register_operand" "r")
1798 (const_int 16))
1799 (sign_extend:SI
1800 (match_operand:HI 2 "s_register_operand" "r"))))]
1801 "TARGET_DSP_MULTIPLY"
1802 "smultb%?\\t%0, %1, %2"
1803 [(set_attr "insn" "smulxy")
1804 (set_attr "predicable" "yes")]
1805 )
1806
1807 (define_insn "*mulhisi3bt"
1808 [(set (match_operand:SI 0 "s_register_operand" "=r")
1809 (mult:SI (sign_extend:SI
1810 (match_operand:HI 1 "s_register_operand" "r"))
1811 (ashiftrt:SI
1812 (match_operand:SI 2 "s_register_operand" "r")
1813 (const_int 16))))]
1814 "TARGET_DSP_MULTIPLY"
1815 "smulbt%?\\t%0, %1, %2"
1816 [(set_attr "insn" "smulxy")
1817 (set_attr "predicable" "yes")]
1818 )
1819
1820 (define_insn "*mulhisi3tt"
1821 [(set (match_operand:SI 0 "s_register_operand" "=r")
1822 (mult:SI (ashiftrt:SI
1823 (match_operand:SI 1 "s_register_operand" "r")
1824 (const_int 16))
1825 (ashiftrt:SI
1826 (match_operand:SI 2 "s_register_operand" "r")
1827 (const_int 16))))]
1828 "TARGET_DSP_MULTIPLY"
1829 "smultt%?\\t%0, %1, %2"
1830 [(set_attr "insn" "smulxy")
1831 (set_attr "predicable" "yes")]
1832 )
1833
1834 (define_insn "maddhisi4"
1835 [(set (match_operand:SI 0 "s_register_operand" "=r")
1836 (plus:SI (mult:SI (sign_extend:SI
1837 (match_operand:HI 1 "s_register_operand" "r"))
1838 (sign_extend:SI
1839 (match_operand:HI 2 "s_register_operand" "r")))
1840 (match_operand:SI 3 "s_register_operand" "r")))]
1841 "TARGET_DSP_MULTIPLY"
1842 "smlabb%?\\t%0, %1, %2, %3"
1843 [(set_attr "insn" "smlaxy")
1844 (set_attr "predicable" "yes")]
1845 )
1846
1847 ;; Note: there is no maddhisi4ibt because this one is canonical form
1848 (define_insn "*maddhisi4tb"
1849 [(set (match_operand:SI 0 "s_register_operand" "=r")
1850 (plus:SI (mult:SI (ashiftrt:SI
1851 (match_operand:SI 1 "s_register_operand" "r")
1852 (const_int 16))
1853 (sign_extend:SI
1854 (match_operand:HI 2 "s_register_operand" "r")))
1855 (match_operand:SI 3 "s_register_operand" "r")))]
1856 "TARGET_DSP_MULTIPLY"
1857 "smlatb%?\\t%0, %1, %2, %3"
1858 [(set_attr "insn" "smlaxy")
1859 (set_attr "predicable" "yes")]
1860 )
1861
1862 (define_insn "*maddhisi4tt"
1863 [(set (match_operand:SI 0 "s_register_operand" "=r")
1864 (plus:SI (mult:SI (ashiftrt:SI
1865 (match_operand:SI 1 "s_register_operand" "r")
1866 (const_int 16))
1867 (ashiftrt:SI
1868 (match_operand:SI 2 "s_register_operand" "r")
1869 (const_int 16)))
1870 (match_operand:SI 3 "s_register_operand" "r")))]
1871 "TARGET_DSP_MULTIPLY"
1872 "smlatt%?\\t%0, %1, %2, %3"
1873 [(set_attr "insn" "smlaxy")
1874 (set_attr "predicable" "yes")]
1875 )
1876
1877 (define_insn "maddhidi4"
1878 [(set (match_operand:DI 0 "s_register_operand" "=r")
1879 (plus:DI
1880 (mult:DI (sign_extend:DI
1881 (match_operand:HI 1 "s_register_operand" "r"))
1882 (sign_extend:DI
1883 (match_operand:HI 2 "s_register_operand" "r")))
1884 (match_operand:DI 3 "s_register_operand" "0")))]
1885 "TARGET_DSP_MULTIPLY"
1886 "smlalbb%?\\t%Q0, %R0, %1, %2"
1887 [(set_attr "insn" "smlalxy")
1888 (set_attr "predicable" "yes")])
1889
1890 ;; Note: there is no maddhidi4ibt because this one is canonical form
1891 (define_insn "*maddhidi4tb"
1892 [(set (match_operand:DI 0 "s_register_operand" "=r")
1893 (plus:DI
1894 (mult:DI (sign_extend:DI
1895 (ashiftrt:SI
1896 (match_operand:SI 1 "s_register_operand" "r")
1897 (const_int 16)))
1898 (sign_extend:DI
1899 (match_operand:HI 2 "s_register_operand" "r")))
1900 (match_operand:DI 3 "s_register_operand" "0")))]
1901 "TARGET_DSP_MULTIPLY"
1902 "smlaltb%?\\t%Q0, %R0, %1, %2"
1903 [(set_attr "insn" "smlalxy")
1904 (set_attr "predicable" "yes")])
1905
1906 (define_insn "*maddhidi4tt"
1907 [(set (match_operand:DI 0 "s_register_operand" "=r")
1908 (plus:DI
1909 (mult:DI (sign_extend:DI
1910 (ashiftrt:SI
1911 (match_operand:SI 1 "s_register_operand" "r")
1912 (const_int 16)))
1913 (sign_extend:DI
1914 (ashiftrt:SI
1915 (match_operand:SI 2 "s_register_operand" "r")
1916 (const_int 16))))
1917 (match_operand:DI 3 "s_register_operand" "0")))]
1918 "TARGET_DSP_MULTIPLY"
1919 "smlaltt%?\\t%Q0, %R0, %1, %2"
1920 [(set_attr "insn" "smlalxy")
1921 (set_attr "predicable" "yes")])
1922
1923 (define_expand "mulsf3"
1924 [(set (match_operand:SF 0 "s_register_operand" "")
1925 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1926 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1927 "TARGET_32BIT && TARGET_HARD_FLOAT"
1928 "
1929 if (TARGET_MAVERICK
1930 && !cirrus_fp_register (operands[2], SFmode))
1931 operands[2] = force_reg (SFmode, operands[2]);
1932 ")
1933
1934 (define_expand "muldf3"
1935 [(set (match_operand:DF 0 "s_register_operand" "")
1936 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1937 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1938 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1939 "
1940 if (TARGET_MAVERICK
1941 && !cirrus_fp_register (operands[2], DFmode))
1942 operands[2] = force_reg (DFmode, operands[2]);
1943 ")
1944 \f
1945 ;; Division insns
1946
1947 (define_expand "divsf3"
1948 [(set (match_operand:SF 0 "s_register_operand" "")
1949 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1950 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1951 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1952 "")
1953
1954 (define_expand "divdf3"
1955 [(set (match_operand:DF 0 "s_register_operand" "")
1956 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1957 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1958 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1959 "")
1960 \f
1961 ;; Modulo insns
1962
1963 (define_expand "modsf3"
1964 [(set (match_operand:SF 0 "s_register_operand" "")
1965 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1966 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1967 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1968 "")
1969
1970 (define_expand "moddf3"
1971 [(set (match_operand:DF 0 "s_register_operand" "")
1972 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1973 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1974 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1975 "")
1976 \f
1977 ;; Boolean and,ior,xor insns
1978
1979 ;; Split up double word logical operations
1980
1981 ;; Split up simple DImode logical operations. Simply perform the logical
1982 ;; operation on the upper and lower halves of the registers.
1983 (define_split
1984 [(set (match_operand:DI 0 "s_register_operand" "")
1985 (match_operator:DI 6 "logical_binary_operator"
1986 [(match_operand:DI 1 "s_register_operand" "")
1987 (match_operand:DI 2 "s_register_operand" "")]))]
1988 "TARGET_32BIT && reload_completed
1989 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1990 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1991 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1992 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1993 "
1994 {
1995 operands[3] = gen_highpart (SImode, operands[0]);
1996 operands[0] = gen_lowpart (SImode, operands[0]);
1997 operands[4] = gen_highpart (SImode, operands[1]);
1998 operands[1] = gen_lowpart (SImode, operands[1]);
1999 operands[5] = gen_highpart (SImode, operands[2]);
2000 operands[2] = gen_lowpart (SImode, operands[2]);
2001 }"
2002 )
2003
2004 (define_split
2005 [(set (match_operand:DI 0 "s_register_operand" "")
2006 (match_operator:DI 6 "logical_binary_operator"
2007 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2008 (match_operand:DI 1 "s_register_operand" "")]))]
2009 "TARGET_32BIT && reload_completed"
2010 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2011 (set (match_dup 3) (match_op_dup:SI 6
2012 [(ashiftrt:SI (match_dup 2) (const_int 31))
2013 (match_dup 4)]))]
2014 "
2015 {
2016 operands[3] = gen_highpart (SImode, operands[0]);
2017 operands[0] = gen_lowpart (SImode, operands[0]);
2018 operands[4] = gen_highpart (SImode, operands[1]);
2019 operands[1] = gen_lowpart (SImode, operands[1]);
2020 operands[5] = gen_highpart (SImode, operands[2]);
2021 operands[2] = gen_lowpart (SImode, operands[2]);
2022 }"
2023 )
2024
2025 ;; The zero extend of operand 2 means we can just copy the high part of
2026 ;; operand1 into operand0.
2027 (define_split
2028 [(set (match_operand:DI 0 "s_register_operand" "")
2029 (ior:DI
2030 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2031 (match_operand:DI 1 "s_register_operand" "")))]
2032 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2033 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2034 (set (match_dup 3) (match_dup 4))]
2035 "
2036 {
2037 operands[4] = gen_highpart (SImode, operands[1]);
2038 operands[3] = gen_highpart (SImode, operands[0]);
2039 operands[0] = gen_lowpart (SImode, operands[0]);
2040 operands[1] = gen_lowpart (SImode, operands[1]);
2041 }"
2042 )
2043
2044 ;; The zero extend of operand 2 means we can just copy the high part of
2045 ;; operand1 into operand0.
2046 (define_split
2047 [(set (match_operand:DI 0 "s_register_operand" "")
2048 (xor:DI
2049 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2050 (match_operand:DI 1 "s_register_operand" "")))]
2051 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2052 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2053 (set (match_dup 3) (match_dup 4))]
2054 "
2055 {
2056 operands[4] = gen_highpart (SImode, operands[1]);
2057 operands[3] = gen_highpart (SImode, operands[0]);
2058 operands[0] = gen_lowpart (SImode, operands[0]);
2059 operands[1] = gen_lowpart (SImode, operands[1]);
2060 }"
2061 )
2062
2063 (define_expand "anddi3"
2064 [(set (match_operand:DI 0 "s_register_operand" "")
2065 (and:DI (match_operand:DI 1 "s_register_operand" "")
2066 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2067 "TARGET_32BIT"
2068 ""
2069 )
2070
2071 (define_insn "*anddi3_insn"
2072 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2073 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2074 (match_operand:DI 2 "s_register_operand" "r,r")))]
2075 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2076 "#"
2077 [(set_attr "length" "8")]
2078 )
2079
2080 (define_insn_and_split "*anddi_zesidi_di"
2081 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2082 (and:DI (zero_extend:DI
2083 (match_operand:SI 2 "s_register_operand" "r,r"))
2084 (match_operand:DI 1 "s_register_operand" "0,r")))]
2085 "TARGET_32BIT"
2086 "#"
2087 "TARGET_32BIT && reload_completed"
2088 ; The zero extend of operand 2 clears the high word of the output
2089 ; operand.
2090 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2091 (set (match_dup 3) (const_int 0))]
2092 "
2093 {
2094 operands[3] = gen_highpart (SImode, operands[0]);
2095 operands[0] = gen_lowpart (SImode, operands[0]);
2096 operands[1] = gen_lowpart (SImode, operands[1]);
2097 }"
2098 [(set_attr "length" "8")]
2099 )
2100
2101 (define_insn "*anddi_sesdi_di"
2102 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2103 (and:DI (sign_extend:DI
2104 (match_operand:SI 2 "s_register_operand" "r,r"))
2105 (match_operand:DI 1 "s_register_operand" "0,r")))]
2106 "TARGET_32BIT"
2107 "#"
2108 [(set_attr "length" "8")]
2109 )
2110
2111 (define_expand "andsi3"
2112 [(set (match_operand:SI 0 "s_register_operand" "")
2113 (and:SI (match_operand:SI 1 "s_register_operand" "")
2114 (match_operand:SI 2 "reg_or_int_operand" "")))]
2115 "TARGET_EITHER"
2116 "
2117 if (TARGET_32BIT)
2118 {
2119 if (GET_CODE (operands[2]) == CONST_INT)
2120 {
2121 if (INTVAL (operands[2]) == 255 && arm_arch6)
2122 {
2123 operands[1] = convert_to_mode (QImode, operands[1], 1);
2124 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2125 operands[1]));
2126 }
2127 else
2128 arm_split_constant (AND, SImode, NULL_RTX,
2129 INTVAL (operands[2]), operands[0],
2130 operands[1],
2131 optimize && can_create_pseudo_p ());
2132
2133 DONE;
2134 }
2135 }
2136 else /* TARGET_THUMB1 */
2137 {
2138 if (GET_CODE (operands[2]) != CONST_INT)
2139 {
2140 rtx tmp = force_reg (SImode, operands[2]);
2141 if (rtx_equal_p (operands[0], operands[1]))
2142 operands[2] = tmp;
2143 else
2144 {
2145 operands[2] = operands[1];
2146 operands[1] = tmp;
2147 }
2148 }
2149 else
2150 {
2151 int i;
2152
2153 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2154 {
2155 operands[2] = force_reg (SImode,
2156 GEN_INT (~INTVAL (operands[2])));
2157
2158 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2159
2160 DONE;
2161 }
2162
2163 for (i = 9; i <= 31; i++)
2164 {
2165 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2166 {
2167 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2168 const0_rtx));
2169 DONE;
2170 }
2171 else if ((((HOST_WIDE_INT) 1) << i) - 1
2172 == ~INTVAL (operands[2]))
2173 {
2174 rtx shift = GEN_INT (i);
2175 rtx reg = gen_reg_rtx (SImode);
2176
2177 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2178 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2179
2180 DONE;
2181 }
2182 }
2183
2184 operands[2] = force_reg (SImode, operands[2]);
2185 }
2186 }
2187 "
2188 )
2189
2190 ; ??? Check split length for Thumb-2
2191 (define_insn_and_split "*arm_andsi3_insn"
2192 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2193 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2194 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2195 "TARGET_32BIT"
2196 "@
2197 and%?\\t%0, %1, %2
2198 bic%?\\t%0, %1, #%B2
2199 #"
2200 "TARGET_32BIT
2201 && GET_CODE (operands[2]) == CONST_INT
2202 && !(const_ok_for_arm (INTVAL (operands[2]))
2203 || const_ok_for_arm (~INTVAL (operands[2])))"
2204 [(clobber (const_int 0))]
2205 "
2206 arm_split_constant (AND, SImode, curr_insn,
2207 INTVAL (operands[2]), operands[0], operands[1], 0);
2208 DONE;
2209 "
2210 [(set_attr "length" "4,4,16")
2211 (set_attr "predicable" "yes")]
2212 )
2213
2214 (define_insn "*thumb1_andsi3_insn"
2215 [(set (match_operand:SI 0 "register_operand" "=l")
2216 (and:SI (match_operand:SI 1 "register_operand" "%0")
2217 (match_operand:SI 2 "register_operand" "l")))]
2218 "TARGET_THUMB1"
2219 "and\\t%0, %2"
2220 [(set_attr "length" "2")
2221 (set_attr "conds" "set")])
2222
2223 (define_insn "*andsi3_compare0"
2224 [(set (reg:CC_NOOV CC_REGNUM)
2225 (compare:CC_NOOV
2226 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2227 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2228 (const_int 0)))
2229 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2230 (and:SI (match_dup 1) (match_dup 2)))]
2231 "TARGET_32BIT"
2232 "@
2233 and%.\\t%0, %1, %2
2234 bic%.\\t%0, %1, #%B2"
2235 [(set_attr "conds" "set")]
2236 )
2237
2238 (define_insn "*andsi3_compare0_scratch"
2239 [(set (reg:CC_NOOV CC_REGNUM)
2240 (compare:CC_NOOV
2241 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2242 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2243 (const_int 0)))
2244 (clobber (match_scratch:SI 2 "=X,r"))]
2245 "TARGET_32BIT"
2246 "@
2247 tst%?\\t%0, %1
2248 bic%.\\t%2, %0, #%B1"
2249 [(set_attr "conds" "set")]
2250 )
2251
2252 (define_insn "*zeroextractsi_compare0_scratch"
2253 [(set (reg:CC_NOOV CC_REGNUM)
2254 (compare:CC_NOOV (zero_extract:SI
2255 (match_operand:SI 0 "s_register_operand" "r")
2256 (match_operand 1 "const_int_operand" "n")
2257 (match_operand 2 "const_int_operand" "n"))
2258 (const_int 0)))]
2259 "TARGET_32BIT
2260 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2261 && INTVAL (operands[1]) > 0
2262 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2263 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2264 "*
2265 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2266 << INTVAL (operands[2]));
2267 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2268 return \"\";
2269 "
2270 [(set_attr "conds" "set")]
2271 )
2272
2273 (define_insn_and_split "*ne_zeroextractsi"
2274 [(set (match_operand:SI 0 "s_register_operand" "=r")
2275 (ne:SI (zero_extract:SI
2276 (match_operand:SI 1 "s_register_operand" "r")
2277 (match_operand:SI 2 "const_int_operand" "n")
2278 (match_operand:SI 3 "const_int_operand" "n"))
2279 (const_int 0)))
2280 (clobber (reg:CC CC_REGNUM))]
2281 "TARGET_32BIT
2282 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2283 && INTVAL (operands[2]) > 0
2284 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2285 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2286 "#"
2287 "TARGET_32BIT
2288 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2289 && INTVAL (operands[2]) > 0
2290 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2291 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2292 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2293 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2294 (const_int 0)))
2295 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2296 (set (match_dup 0)
2297 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2298 (match_dup 0) (const_int 1)))]
2299 "
2300 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2301 << INTVAL (operands[3]));
2302 "
2303 [(set_attr "conds" "clob")
2304 (set (attr "length")
2305 (if_then_else (eq_attr "is_thumb" "yes")
2306 (const_int 12)
2307 (const_int 8)))]
2308 )
2309
2310 (define_insn_and_split "*ne_zeroextractsi_shifted"
2311 [(set (match_operand:SI 0 "s_register_operand" "=r")
2312 (ne:SI (zero_extract:SI
2313 (match_operand:SI 1 "s_register_operand" "r")
2314 (match_operand:SI 2 "const_int_operand" "n")
2315 (const_int 0))
2316 (const_int 0)))
2317 (clobber (reg:CC CC_REGNUM))]
2318 "TARGET_ARM"
2319 "#"
2320 "TARGET_ARM"
2321 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2322 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2323 (const_int 0)))
2324 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2325 (set (match_dup 0)
2326 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2327 (match_dup 0) (const_int 1)))]
2328 "
2329 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2330 "
2331 [(set_attr "conds" "clob")
2332 (set_attr "length" "8")]
2333 )
2334
2335 (define_insn_and_split "*ite_ne_zeroextractsi"
2336 [(set (match_operand:SI 0 "s_register_operand" "=r")
2337 (if_then_else:SI (ne (zero_extract:SI
2338 (match_operand:SI 1 "s_register_operand" "r")
2339 (match_operand:SI 2 "const_int_operand" "n")
2340 (match_operand:SI 3 "const_int_operand" "n"))
2341 (const_int 0))
2342 (match_operand:SI 4 "arm_not_operand" "rIK")
2343 (const_int 0)))
2344 (clobber (reg:CC CC_REGNUM))]
2345 "TARGET_ARM
2346 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2347 && INTVAL (operands[2]) > 0
2348 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2349 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2350 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2351 "#"
2352 "TARGET_ARM
2353 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2354 && INTVAL (operands[2]) > 0
2355 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2356 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2357 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2358 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2359 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2360 (const_int 0)))
2361 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2362 (set (match_dup 0)
2363 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2364 (match_dup 0) (match_dup 4)))]
2365 "
2366 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2367 << INTVAL (operands[3]));
2368 "
2369 [(set_attr "conds" "clob")
2370 (set_attr "length" "8")]
2371 )
2372
2373 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2374 [(set (match_operand:SI 0 "s_register_operand" "=r")
2375 (if_then_else:SI (ne (zero_extract:SI
2376 (match_operand:SI 1 "s_register_operand" "r")
2377 (match_operand:SI 2 "const_int_operand" "n")
2378 (const_int 0))
2379 (const_int 0))
2380 (match_operand:SI 3 "arm_not_operand" "rIK")
2381 (const_int 0)))
2382 (clobber (reg:CC CC_REGNUM))]
2383 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2384 "#"
2385 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2386 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2387 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2388 (const_int 0)))
2389 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2390 (set (match_dup 0)
2391 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2392 (match_dup 0) (match_dup 3)))]
2393 "
2394 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2395 "
2396 [(set_attr "conds" "clob")
2397 (set_attr "length" "8")]
2398 )
2399
2400 (define_split
2401 [(set (match_operand:SI 0 "s_register_operand" "")
2402 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2403 (match_operand:SI 2 "const_int_operand" "")
2404 (match_operand:SI 3 "const_int_operand" "")))
2405 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2406 "TARGET_THUMB1"
2407 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2408 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2409 "{
2410 HOST_WIDE_INT temp = INTVAL (operands[2]);
2411
2412 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2413 operands[3] = GEN_INT (32 - temp);
2414 }"
2415 )
2416
2417 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2418 (define_split
2419 [(set (match_operand:SI 0 "s_register_operand" "")
2420 (match_operator:SI 1 "shiftable_operator"
2421 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2422 (match_operand:SI 3 "const_int_operand" "")
2423 (match_operand:SI 4 "const_int_operand" ""))
2424 (match_operand:SI 5 "s_register_operand" "")]))
2425 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2426 "TARGET_ARM"
2427 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2428 (set (match_dup 0)
2429 (match_op_dup 1
2430 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2431 (match_dup 5)]))]
2432 "{
2433 HOST_WIDE_INT temp = INTVAL (operands[3]);
2434
2435 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2436 operands[4] = GEN_INT (32 - temp);
2437 }"
2438 )
2439
2440 (define_split
2441 [(set (match_operand:SI 0 "s_register_operand" "")
2442 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2443 (match_operand:SI 2 "const_int_operand" "")
2444 (match_operand:SI 3 "const_int_operand" "")))]
2445 "TARGET_THUMB1"
2446 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2447 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2448 "{
2449 HOST_WIDE_INT temp = INTVAL (operands[2]);
2450
2451 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2452 operands[3] = GEN_INT (32 - temp);
2453 }"
2454 )
2455
2456 (define_split
2457 [(set (match_operand:SI 0 "s_register_operand" "")
2458 (match_operator:SI 1 "shiftable_operator"
2459 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2460 (match_operand:SI 3 "const_int_operand" "")
2461 (match_operand:SI 4 "const_int_operand" ""))
2462 (match_operand:SI 5 "s_register_operand" "")]))
2463 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2464 "TARGET_ARM"
2465 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2466 (set (match_dup 0)
2467 (match_op_dup 1
2468 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2469 (match_dup 5)]))]
2470 "{
2471 HOST_WIDE_INT temp = INTVAL (operands[3]);
2472
2473 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2474 operands[4] = GEN_INT (32 - temp);
2475 }"
2476 )
2477
2478 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2479 ;;; represented by the bitfield, then this will produce incorrect results.
2480 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2481 ;;; which have a real bit-field insert instruction, the truncation happens
2482 ;;; in the bit-field insert instruction itself. Since arm does not have a
2483 ;;; bit-field insert instruction, we would have to emit code here to truncate
2484 ;;; the value before we insert. This loses some of the advantage of having
2485 ;;; this insv pattern, so this pattern needs to be reevalutated.
2486
2487 (define_expand "insv"
2488 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2489 (match_operand 1 "general_operand" "")
2490 (match_operand 2 "general_operand" ""))
2491 (match_operand 3 "reg_or_int_operand" ""))]
2492 "TARGET_ARM || arm_arch_thumb2"
2493 "
2494 {
2495 int start_bit = INTVAL (operands[2]);
2496 int width = INTVAL (operands[1]);
2497 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2498 rtx target, subtarget;
2499
2500 if (arm_arch_thumb2)
2501 {
2502 if (unaligned_access && MEM_P (operands[0])
2503 && s_register_operand (operands[3], GET_MODE (operands[3]))
2504 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2505 {
2506 rtx base_addr;
2507
2508 if (BYTES_BIG_ENDIAN)
2509 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2510 - start_bit;
2511
2512 if (width == 32)
2513 {
2514 base_addr = adjust_address (operands[0], SImode,
2515 start_bit / BITS_PER_UNIT);
2516 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2517 }
2518 else
2519 {
2520 rtx tmp = gen_reg_rtx (HImode);
2521
2522 base_addr = adjust_address (operands[0], HImode,
2523 start_bit / BITS_PER_UNIT);
2524 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2525 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2526 }
2527 DONE;
2528 }
2529 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2530 {
2531 bool use_bfi = TRUE;
2532
2533 if (GET_CODE (operands[3]) == CONST_INT)
2534 {
2535 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2536
2537 if (val == 0)
2538 {
2539 emit_insn (gen_insv_zero (operands[0], operands[1],
2540 operands[2]));
2541 DONE;
2542 }
2543
2544 /* See if the set can be done with a single orr instruction. */
2545 if (val == mask && const_ok_for_arm (val << start_bit))
2546 use_bfi = FALSE;
2547 }
2548
2549 if (use_bfi)
2550 {
2551 if (GET_CODE (operands[3]) != REG)
2552 operands[3] = force_reg (SImode, operands[3]);
2553
2554 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2555 operands[3]));
2556 DONE;
2557 }
2558 }
2559 else
2560 FAIL;
2561 }
2562
2563 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2564 FAIL;
2565
2566 target = copy_rtx (operands[0]);
2567 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2568 subreg as the final target. */
2569 if (GET_CODE (target) == SUBREG)
2570 {
2571 subtarget = gen_reg_rtx (SImode);
2572 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2573 < GET_MODE_SIZE (SImode))
2574 target = SUBREG_REG (target);
2575 }
2576 else
2577 subtarget = target;
2578
2579 if (GET_CODE (operands[3]) == CONST_INT)
2580 {
2581 /* Since we are inserting a known constant, we may be able to
2582 reduce the number of bits that we have to clear so that
2583 the mask becomes simple. */
2584 /* ??? This code does not check to see if the new mask is actually
2585 simpler. It may not be. */
2586 rtx op1 = gen_reg_rtx (SImode);
2587 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2588 start of this pattern. */
2589 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2590 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2591
2592 emit_insn (gen_andsi3 (op1, operands[0],
2593 gen_int_mode (~mask2, SImode)));
2594 emit_insn (gen_iorsi3 (subtarget, op1,
2595 gen_int_mode (op3_value << start_bit, SImode)));
2596 }
2597 else if (start_bit == 0
2598 && !(const_ok_for_arm (mask)
2599 || const_ok_for_arm (~mask)))
2600 {
2601 /* A Trick, since we are setting the bottom bits in the word,
2602 we can shift operand[3] up, operand[0] down, OR them together
2603 and rotate the result back again. This takes 3 insns, and
2604 the third might be mergeable into another op. */
2605 /* The shift up copes with the possibility that operand[3] is
2606 wider than the bitfield. */
2607 rtx op0 = gen_reg_rtx (SImode);
2608 rtx op1 = gen_reg_rtx (SImode);
2609
2610 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2611 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2612 emit_insn (gen_iorsi3 (op1, op1, op0));
2613 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2614 }
2615 else if ((width + start_bit == 32)
2616 && !(const_ok_for_arm (mask)
2617 || const_ok_for_arm (~mask)))
2618 {
2619 /* Similar trick, but slightly less efficient. */
2620
2621 rtx op0 = gen_reg_rtx (SImode);
2622 rtx op1 = gen_reg_rtx (SImode);
2623
2624 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2625 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2626 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2627 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2628 }
2629 else
2630 {
2631 rtx op0 = gen_int_mode (mask, SImode);
2632 rtx op1 = gen_reg_rtx (SImode);
2633 rtx op2 = gen_reg_rtx (SImode);
2634
2635 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2636 {
2637 rtx tmp = gen_reg_rtx (SImode);
2638
2639 emit_insn (gen_movsi (tmp, op0));
2640 op0 = tmp;
2641 }
2642
2643 /* Mask out any bits in operand[3] that are not needed. */
2644 emit_insn (gen_andsi3 (op1, operands[3], op0));
2645
2646 if (GET_CODE (op0) == CONST_INT
2647 && (const_ok_for_arm (mask << start_bit)
2648 || const_ok_for_arm (~(mask << start_bit))))
2649 {
2650 op0 = gen_int_mode (~(mask << start_bit), SImode);
2651 emit_insn (gen_andsi3 (op2, operands[0], op0));
2652 }
2653 else
2654 {
2655 if (GET_CODE (op0) == CONST_INT)
2656 {
2657 rtx tmp = gen_reg_rtx (SImode);
2658
2659 emit_insn (gen_movsi (tmp, op0));
2660 op0 = tmp;
2661 }
2662
2663 if (start_bit != 0)
2664 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2665
2666 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2667 }
2668
2669 if (start_bit != 0)
2670 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2671
2672 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2673 }
2674
2675 if (subtarget != target)
2676 {
2677 /* If TARGET is still a SUBREG, then it must be wider than a word,
2678 so we must be careful only to set the subword we were asked to. */
2679 if (GET_CODE (target) == SUBREG)
2680 emit_move_insn (target, subtarget);
2681 else
2682 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2683 }
2684
2685 DONE;
2686 }"
2687 )
2688
2689 (define_insn "insv_zero"
2690 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2691 (match_operand:SI 1 "const_int_operand" "M")
2692 (match_operand:SI 2 "const_int_operand" "M"))
2693 (const_int 0))]
2694 "arm_arch_thumb2"
2695 "bfc%?\t%0, %2, %1"
2696 [(set_attr "length" "4")
2697 (set_attr "predicable" "yes")]
2698 )
2699
2700 (define_insn "insv_t2"
2701 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2702 (match_operand:SI 1 "const_int_operand" "M")
2703 (match_operand:SI 2 "const_int_operand" "M"))
2704 (match_operand:SI 3 "s_register_operand" "r"))]
2705 "arm_arch_thumb2"
2706 "bfi%?\t%0, %3, %2, %1"
2707 [(set_attr "length" "4")
2708 (set_attr "predicable" "yes")]
2709 )
2710
2711 ; constants for op 2 will never be given to these patterns.
2712 (define_insn_and_split "*anddi_notdi_di"
2713 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2714 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2715 (match_operand:DI 2 "s_register_operand" "r,0")))]
2716 "TARGET_32BIT"
2717 "#"
2718 "TARGET_32BIT && reload_completed
2719 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2720 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2721 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2722 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2723 "
2724 {
2725 operands[3] = gen_highpart (SImode, operands[0]);
2726 operands[0] = gen_lowpart (SImode, operands[0]);
2727 operands[4] = gen_highpart (SImode, operands[1]);
2728 operands[1] = gen_lowpart (SImode, operands[1]);
2729 operands[5] = gen_highpart (SImode, operands[2]);
2730 operands[2] = gen_lowpart (SImode, operands[2]);
2731 }"
2732 [(set_attr "length" "8")
2733 (set_attr "predicable" "yes")]
2734 )
2735
2736 (define_insn_and_split "*anddi_notzesidi_di"
2737 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2738 (and:DI (not:DI (zero_extend:DI
2739 (match_operand:SI 2 "s_register_operand" "r,r")))
2740 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2741 "TARGET_32BIT"
2742 "@
2743 bic%?\\t%Q0, %Q1, %2
2744 #"
2745 ; (not (zero_extend ...)) allows us to just copy the high word from
2746 ; operand1 to operand0.
2747 "TARGET_32BIT
2748 && reload_completed
2749 && operands[0] != operands[1]"
2750 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2751 (set (match_dup 3) (match_dup 4))]
2752 "
2753 {
2754 operands[3] = gen_highpart (SImode, operands[0]);
2755 operands[0] = gen_lowpart (SImode, operands[0]);
2756 operands[4] = gen_highpart (SImode, operands[1]);
2757 operands[1] = gen_lowpart (SImode, operands[1]);
2758 }"
2759 [(set_attr "length" "4,8")
2760 (set_attr "predicable" "yes")]
2761 )
2762
2763 (define_insn_and_split "*anddi_notsesidi_di"
2764 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2765 (and:DI (not:DI (sign_extend:DI
2766 (match_operand:SI 2 "s_register_operand" "r,r")))
2767 (match_operand:DI 1 "s_register_operand" "0,r")))]
2768 "TARGET_32BIT"
2769 "#"
2770 "TARGET_32BIT && reload_completed"
2771 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2772 (set (match_dup 3) (and:SI (not:SI
2773 (ashiftrt:SI (match_dup 2) (const_int 31)))
2774 (match_dup 4)))]
2775 "
2776 {
2777 operands[3] = gen_highpart (SImode, operands[0]);
2778 operands[0] = gen_lowpart (SImode, operands[0]);
2779 operands[4] = gen_highpart (SImode, operands[1]);
2780 operands[1] = gen_lowpart (SImode, operands[1]);
2781 }"
2782 [(set_attr "length" "8")
2783 (set_attr "predicable" "yes")]
2784 )
2785
2786 (define_insn "andsi_notsi_si"
2787 [(set (match_operand:SI 0 "s_register_operand" "=r")
2788 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2789 (match_operand:SI 1 "s_register_operand" "r")))]
2790 "TARGET_32BIT"
2791 "bic%?\\t%0, %1, %2"
2792 [(set_attr "predicable" "yes")]
2793 )
2794
2795 (define_insn "thumb1_bicsi3"
2796 [(set (match_operand:SI 0 "register_operand" "=l")
2797 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2798 (match_operand:SI 2 "register_operand" "0")))]
2799 "TARGET_THUMB1"
2800 "bic\\t%0, %1"
2801 [(set_attr "length" "2")
2802 (set_attr "conds" "set")])
2803
2804 (define_insn "andsi_not_shiftsi_si"
2805 [(set (match_operand:SI 0 "s_register_operand" "=r")
2806 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2807 [(match_operand:SI 2 "s_register_operand" "r")
2808 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2809 (match_operand:SI 1 "s_register_operand" "r")))]
2810 "TARGET_ARM"
2811 "bic%?\\t%0, %1, %2%S4"
2812 [(set_attr "predicable" "yes")
2813 (set_attr "shift" "2")
2814 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2815 (const_string "alu_shift")
2816 (const_string "alu_shift_reg")))]
2817 )
2818
2819 (define_insn "*andsi_notsi_si_compare0"
2820 [(set (reg:CC_NOOV CC_REGNUM)
2821 (compare:CC_NOOV
2822 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2823 (match_operand:SI 1 "s_register_operand" "r"))
2824 (const_int 0)))
2825 (set (match_operand:SI 0 "s_register_operand" "=r")
2826 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2827 "TARGET_32BIT"
2828 "bic%.\\t%0, %1, %2"
2829 [(set_attr "conds" "set")]
2830 )
2831
2832 (define_insn "*andsi_notsi_si_compare0_scratch"
2833 [(set (reg:CC_NOOV CC_REGNUM)
2834 (compare:CC_NOOV
2835 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2836 (match_operand:SI 1 "s_register_operand" "r"))
2837 (const_int 0)))
2838 (clobber (match_scratch:SI 0 "=r"))]
2839 "TARGET_32BIT"
2840 "bic%.\\t%0, %1, %2"
2841 [(set_attr "conds" "set")]
2842 )
2843
2844 (define_expand "iordi3"
2845 [(set (match_operand:DI 0 "s_register_operand" "")
2846 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2847 (match_operand:DI 2 "neon_logic_op2" "")))]
2848 "TARGET_32BIT"
2849 ""
2850 )
2851
2852 (define_insn "*iordi3_insn"
2853 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2854 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2855 (match_operand:DI 2 "s_register_operand" "r,r")))]
2856 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2857 "#"
2858 [(set_attr "length" "8")
2859 (set_attr "predicable" "yes")]
2860 )
2861
2862 (define_insn "*iordi_zesidi_di"
2863 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2864 (ior:DI (zero_extend:DI
2865 (match_operand:SI 2 "s_register_operand" "r,r"))
2866 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2867 "TARGET_32BIT"
2868 "@
2869 orr%?\\t%Q0, %Q1, %2
2870 #"
2871 [(set_attr "length" "4,8")
2872 (set_attr "predicable" "yes")]
2873 )
2874
2875 (define_insn "*iordi_sesidi_di"
2876 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2877 (ior:DI (sign_extend:DI
2878 (match_operand:SI 2 "s_register_operand" "r,r"))
2879 (match_operand:DI 1 "s_register_operand" "0,r")))]
2880 "TARGET_32BIT"
2881 "#"
2882 [(set_attr "length" "8")
2883 (set_attr "predicable" "yes")]
2884 )
2885
2886 (define_expand "iorsi3"
2887 [(set (match_operand:SI 0 "s_register_operand" "")
2888 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2889 (match_operand:SI 2 "reg_or_int_operand" "")))]
2890 "TARGET_EITHER"
2891 "
2892 if (GET_CODE (operands[2]) == CONST_INT)
2893 {
2894 if (TARGET_32BIT)
2895 {
2896 arm_split_constant (IOR, SImode, NULL_RTX,
2897 INTVAL (operands[2]), operands[0], operands[1],
2898 optimize && can_create_pseudo_p ());
2899 DONE;
2900 }
2901 else /* TARGET_THUMB1 */
2902 {
2903 rtx tmp = force_reg (SImode, operands[2]);
2904 if (rtx_equal_p (operands[0], operands[1]))
2905 operands[2] = tmp;
2906 else
2907 {
2908 operands[2] = operands[1];
2909 operands[1] = tmp;
2910 }
2911 }
2912 }
2913 "
2914 )
2915
2916 (define_insn_and_split "*iorsi3_insn"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2918 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2919 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2920 "TARGET_32BIT"
2921 "@
2922 orr%?\\t%0, %1, %2
2923 orn%?\\t%0, %1, #%B2
2924 #"
2925 "TARGET_32BIT
2926 && GET_CODE (operands[2]) == CONST_INT
2927 && !(const_ok_for_arm (INTVAL (operands[2]))
2928 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2929 [(clobber (const_int 0))]
2930 {
2931 arm_split_constant (IOR, SImode, curr_insn,
2932 INTVAL (operands[2]), operands[0], operands[1], 0);
2933 DONE;
2934 }
2935 [(set_attr "length" "4,4,16")
2936 (set_attr "arch" "32,t2,32")
2937 (set_attr "predicable" "yes")])
2938
2939 (define_insn "*thumb1_iorsi3_insn"
2940 [(set (match_operand:SI 0 "register_operand" "=l")
2941 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2942 (match_operand:SI 2 "register_operand" "l")))]
2943 "TARGET_THUMB1"
2944 "orr\\t%0, %2"
2945 [(set_attr "length" "2")
2946 (set_attr "conds" "set")])
2947
2948 (define_peephole2
2949 [(match_scratch:SI 3 "r")
2950 (set (match_operand:SI 0 "arm_general_register_operand" "")
2951 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2952 (match_operand:SI 2 "const_int_operand" "")))]
2953 "TARGET_ARM
2954 && !const_ok_for_arm (INTVAL (operands[2]))
2955 && const_ok_for_arm (~INTVAL (operands[2]))"
2956 [(set (match_dup 3) (match_dup 2))
2957 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2958 ""
2959 )
2960
2961 (define_insn "*iorsi3_compare0"
2962 [(set (reg:CC_NOOV CC_REGNUM)
2963 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2964 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2965 (const_int 0)))
2966 (set (match_operand:SI 0 "s_register_operand" "=r")
2967 (ior:SI (match_dup 1) (match_dup 2)))]
2968 "TARGET_32BIT"
2969 "orr%.\\t%0, %1, %2"
2970 [(set_attr "conds" "set")]
2971 )
2972
2973 (define_insn "*iorsi3_compare0_scratch"
2974 [(set (reg:CC_NOOV CC_REGNUM)
2975 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2976 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2977 (const_int 0)))
2978 (clobber (match_scratch:SI 0 "=r"))]
2979 "TARGET_32BIT"
2980 "orr%.\\t%0, %1, %2"
2981 [(set_attr "conds" "set")]
2982 )
2983
2984 (define_expand "xordi3"
2985 [(set (match_operand:DI 0 "s_register_operand" "")
2986 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2987 (match_operand:DI 2 "s_register_operand" "")))]
2988 "TARGET_32BIT"
2989 ""
2990 )
2991
2992 (define_insn "*xordi3_insn"
2993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2994 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2995 (match_operand:DI 2 "s_register_operand" "r,r")))]
2996 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2997 "#"
2998 [(set_attr "length" "8")
2999 (set_attr "predicable" "yes")]
3000 )
3001
3002 (define_insn "*xordi_zesidi_di"
3003 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3004 (xor:DI (zero_extend:DI
3005 (match_operand:SI 2 "s_register_operand" "r,r"))
3006 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3007 "TARGET_32BIT"
3008 "@
3009 eor%?\\t%Q0, %Q1, %2
3010 #"
3011 [(set_attr "length" "4,8")
3012 (set_attr "predicable" "yes")]
3013 )
3014
3015 (define_insn "*xordi_sesidi_di"
3016 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3017 (xor:DI (sign_extend:DI
3018 (match_operand:SI 2 "s_register_operand" "r,r"))
3019 (match_operand:DI 1 "s_register_operand" "0,r")))]
3020 "TARGET_32BIT"
3021 "#"
3022 [(set_attr "length" "8")
3023 (set_attr "predicable" "yes")]
3024 )
3025
3026 (define_expand "xorsi3"
3027 [(set (match_operand:SI 0 "s_register_operand" "")
3028 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3029 (match_operand:SI 2 "reg_or_int_operand" "")))]
3030 "TARGET_EITHER"
3031 "if (GET_CODE (operands[2]) == CONST_INT)
3032 {
3033 if (TARGET_32BIT)
3034 {
3035 arm_split_constant (XOR, SImode, NULL_RTX,
3036 INTVAL (operands[2]), operands[0], operands[1],
3037 optimize && can_create_pseudo_p ());
3038 DONE;
3039 }
3040 else /* TARGET_THUMB1 */
3041 {
3042 rtx tmp = force_reg (SImode, operands[2]);
3043 if (rtx_equal_p (operands[0], operands[1]))
3044 operands[2] = tmp;
3045 else
3046 {
3047 operands[2] = operands[1];
3048 operands[1] = tmp;
3049 }
3050 }
3051 }"
3052 )
3053
3054 (define_insn "*arm_xorsi3"
3055 [(set (match_operand:SI 0 "s_register_operand" "=r")
3056 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3057 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
3058 "TARGET_32BIT"
3059 "eor%?\\t%0, %1, %2"
3060 [(set_attr "predicable" "yes")]
3061 )
3062
3063 (define_insn "*thumb1_xorsi3_insn"
3064 [(set (match_operand:SI 0 "register_operand" "=l")
3065 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3066 (match_operand:SI 2 "register_operand" "l")))]
3067 "TARGET_THUMB1"
3068 "eor\\t%0, %2"
3069 [(set_attr "length" "2")
3070 (set_attr "conds" "set")])
3071
3072 (define_insn "*xorsi3_compare0"
3073 [(set (reg:CC_NOOV CC_REGNUM)
3074 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3075 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3076 (const_int 0)))
3077 (set (match_operand:SI 0 "s_register_operand" "=r")
3078 (xor:SI (match_dup 1) (match_dup 2)))]
3079 "TARGET_32BIT"
3080 "eor%.\\t%0, %1, %2"
3081 [(set_attr "conds" "set")]
3082 )
3083
3084 (define_insn "*xorsi3_compare0_scratch"
3085 [(set (reg:CC_NOOV CC_REGNUM)
3086 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3087 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3088 (const_int 0)))]
3089 "TARGET_32BIT"
3090 "teq%?\\t%0, %1"
3091 [(set_attr "conds" "set")]
3092 )
3093
3094 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3095 ; (NOT D) we can sometimes merge the final NOT into one of the following
3096 ; insns.
3097
3098 (define_split
3099 [(set (match_operand:SI 0 "s_register_operand" "")
3100 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3101 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3102 (match_operand:SI 3 "arm_rhs_operand" "")))
3103 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3104 "TARGET_32BIT"
3105 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3106 (not:SI (match_dup 3))))
3107 (set (match_dup 0) (not:SI (match_dup 4)))]
3108 ""
3109 )
3110
3111 (define_insn "*andsi_iorsi3_notsi"
3112 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3113 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3114 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3115 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3116 "TARGET_32BIT"
3117 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3118 [(set_attr "length" "8")
3119 (set_attr "ce_count" "2")
3120 (set_attr "predicable" "yes")]
3121 )
3122
3123 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3124 ; insns are available?
3125 (define_split
3126 [(set (match_operand:SI 0 "s_register_operand" "")
3127 (match_operator:SI 1 "logical_binary_operator"
3128 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3129 (match_operand:SI 3 "const_int_operand" "")
3130 (match_operand:SI 4 "const_int_operand" ""))
3131 (match_operator:SI 9 "logical_binary_operator"
3132 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3133 (match_operand:SI 6 "const_int_operand" ""))
3134 (match_operand:SI 7 "s_register_operand" "")])]))
3135 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3136 "TARGET_32BIT
3137 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3138 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3139 [(set (match_dup 8)
3140 (match_op_dup 1
3141 [(ashift:SI (match_dup 2) (match_dup 4))
3142 (match_dup 5)]))
3143 (set (match_dup 0)
3144 (match_op_dup 1
3145 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3146 (match_dup 7)]))]
3147 "
3148 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3149 ")
3150
3151 (define_split
3152 [(set (match_operand:SI 0 "s_register_operand" "")
3153 (match_operator:SI 1 "logical_binary_operator"
3154 [(match_operator:SI 9 "logical_binary_operator"
3155 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3156 (match_operand:SI 6 "const_int_operand" ""))
3157 (match_operand:SI 7 "s_register_operand" "")])
3158 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3159 (match_operand:SI 3 "const_int_operand" "")
3160 (match_operand:SI 4 "const_int_operand" ""))]))
3161 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3162 "TARGET_32BIT
3163 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3164 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3165 [(set (match_dup 8)
3166 (match_op_dup 1
3167 [(ashift:SI (match_dup 2) (match_dup 4))
3168 (match_dup 5)]))
3169 (set (match_dup 0)
3170 (match_op_dup 1
3171 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3172 (match_dup 7)]))]
3173 "
3174 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3175 ")
3176
3177 (define_split
3178 [(set (match_operand:SI 0 "s_register_operand" "")
3179 (match_operator:SI 1 "logical_binary_operator"
3180 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3181 (match_operand:SI 3 "const_int_operand" "")
3182 (match_operand:SI 4 "const_int_operand" ""))
3183 (match_operator:SI 9 "logical_binary_operator"
3184 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3185 (match_operand:SI 6 "const_int_operand" ""))
3186 (match_operand:SI 7 "s_register_operand" "")])]))
3187 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3188 "TARGET_32BIT
3189 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3190 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3191 [(set (match_dup 8)
3192 (match_op_dup 1
3193 [(ashift:SI (match_dup 2) (match_dup 4))
3194 (match_dup 5)]))
3195 (set (match_dup 0)
3196 (match_op_dup 1
3197 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3198 (match_dup 7)]))]
3199 "
3200 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3201 ")
3202
3203 (define_split
3204 [(set (match_operand:SI 0 "s_register_operand" "")
3205 (match_operator:SI 1 "logical_binary_operator"
3206 [(match_operator:SI 9 "logical_binary_operator"
3207 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3208 (match_operand:SI 6 "const_int_operand" ""))
3209 (match_operand:SI 7 "s_register_operand" "")])
3210 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3211 (match_operand:SI 3 "const_int_operand" "")
3212 (match_operand:SI 4 "const_int_operand" ""))]))
3213 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3214 "TARGET_32BIT
3215 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3216 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3217 [(set (match_dup 8)
3218 (match_op_dup 1
3219 [(ashift:SI (match_dup 2) (match_dup 4))
3220 (match_dup 5)]))
3221 (set (match_dup 0)
3222 (match_op_dup 1
3223 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3224 (match_dup 7)]))]
3225 "
3226 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3227 ")
3228 \f
3229
3230 ;; Minimum and maximum insns
3231
3232 (define_expand "smaxsi3"
3233 [(parallel [
3234 (set (match_operand:SI 0 "s_register_operand" "")
3235 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3236 (match_operand:SI 2 "arm_rhs_operand" "")))
3237 (clobber (reg:CC CC_REGNUM))])]
3238 "TARGET_32BIT"
3239 "
3240 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3241 {
3242 /* No need for a clobber of the condition code register here. */
3243 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3244 gen_rtx_SMAX (SImode, operands[1],
3245 operands[2])));
3246 DONE;
3247 }
3248 ")
3249
3250 (define_insn "*smax_0"
3251 [(set (match_operand:SI 0 "s_register_operand" "=r")
3252 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3253 (const_int 0)))]
3254 "TARGET_32BIT"
3255 "bic%?\\t%0, %1, %1, asr #31"
3256 [(set_attr "predicable" "yes")]
3257 )
3258
3259 (define_insn "*smax_m1"
3260 [(set (match_operand:SI 0 "s_register_operand" "=r")
3261 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3262 (const_int -1)))]
3263 "TARGET_32BIT"
3264 "orr%?\\t%0, %1, %1, asr #31"
3265 [(set_attr "predicable" "yes")]
3266 )
3267
3268 (define_insn "*arm_smax_insn"
3269 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3270 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3271 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3272 (clobber (reg:CC CC_REGNUM))]
3273 "TARGET_ARM"
3274 "@
3275 cmp\\t%1, %2\;movlt\\t%0, %2
3276 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3277 [(set_attr "conds" "clob")
3278 (set_attr "length" "8,12")]
3279 )
3280
3281 (define_expand "sminsi3"
3282 [(parallel [
3283 (set (match_operand:SI 0 "s_register_operand" "")
3284 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3285 (match_operand:SI 2 "arm_rhs_operand" "")))
3286 (clobber (reg:CC CC_REGNUM))])]
3287 "TARGET_32BIT"
3288 "
3289 if (operands[2] == const0_rtx)
3290 {
3291 /* No need for a clobber of the condition code register here. */
3292 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3293 gen_rtx_SMIN (SImode, operands[1],
3294 operands[2])));
3295 DONE;
3296 }
3297 ")
3298
3299 (define_insn "*smin_0"
3300 [(set (match_operand:SI 0 "s_register_operand" "=r")
3301 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3302 (const_int 0)))]
3303 "TARGET_32BIT"
3304 "and%?\\t%0, %1, %1, asr #31"
3305 [(set_attr "predicable" "yes")]
3306 )
3307
3308 (define_insn "*arm_smin_insn"
3309 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3310 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3311 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3312 (clobber (reg:CC CC_REGNUM))]
3313 "TARGET_ARM"
3314 "@
3315 cmp\\t%1, %2\;movge\\t%0, %2
3316 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3317 [(set_attr "conds" "clob")
3318 (set_attr "length" "8,12")]
3319 )
3320
3321 (define_expand "umaxsi3"
3322 [(parallel [
3323 (set (match_operand:SI 0 "s_register_operand" "")
3324 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3325 (match_operand:SI 2 "arm_rhs_operand" "")))
3326 (clobber (reg:CC CC_REGNUM))])]
3327 "TARGET_32BIT"
3328 ""
3329 )
3330
3331 (define_insn "*arm_umaxsi3"
3332 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3333 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3334 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3335 (clobber (reg:CC CC_REGNUM))]
3336 "TARGET_ARM"
3337 "@
3338 cmp\\t%1, %2\;movcc\\t%0, %2
3339 cmp\\t%1, %2\;movcs\\t%0, %1
3340 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3341 [(set_attr "conds" "clob")
3342 (set_attr "length" "8,8,12")]
3343 )
3344
3345 (define_expand "uminsi3"
3346 [(parallel [
3347 (set (match_operand:SI 0 "s_register_operand" "")
3348 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3349 (match_operand:SI 2 "arm_rhs_operand" "")))
3350 (clobber (reg:CC CC_REGNUM))])]
3351 "TARGET_32BIT"
3352 ""
3353 )
3354
3355 (define_insn "*arm_uminsi3"
3356 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3357 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3358 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3359 (clobber (reg:CC CC_REGNUM))]
3360 "TARGET_ARM"
3361 "@
3362 cmp\\t%1, %2\;movcs\\t%0, %2
3363 cmp\\t%1, %2\;movcc\\t%0, %1
3364 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3365 [(set_attr "conds" "clob")
3366 (set_attr "length" "8,8,12")]
3367 )
3368
3369 (define_insn "*store_minmaxsi"
3370 [(set (match_operand:SI 0 "memory_operand" "=m")
3371 (match_operator:SI 3 "minmax_operator"
3372 [(match_operand:SI 1 "s_register_operand" "r")
3373 (match_operand:SI 2 "s_register_operand" "r")]))
3374 (clobber (reg:CC CC_REGNUM))]
3375 "TARGET_32BIT"
3376 "*
3377 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3378 operands[1], operands[2]);
3379 output_asm_insn (\"cmp\\t%1, %2\", operands);
3380 if (TARGET_THUMB2)
3381 output_asm_insn (\"ite\t%d3\", operands);
3382 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3383 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3384 return \"\";
3385 "
3386 [(set_attr "conds" "clob")
3387 (set (attr "length")
3388 (if_then_else (eq_attr "is_thumb" "yes")
3389 (const_int 14)
3390 (const_int 12)))
3391 (set_attr "type" "store1")]
3392 )
3393
3394 ; Reject the frame pointer in operand[1], since reloading this after
3395 ; it has been eliminated can cause carnage.
3396 (define_insn "*minmax_arithsi"
3397 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3398 (match_operator:SI 4 "shiftable_operator"
3399 [(match_operator:SI 5 "minmax_operator"
3400 [(match_operand:SI 2 "s_register_operand" "r,r")
3401 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3402 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3403 (clobber (reg:CC CC_REGNUM))]
3404 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3405 "*
3406 {
3407 enum rtx_code code = GET_CODE (operands[4]);
3408 bool need_else;
3409
3410 if (which_alternative != 0 || operands[3] != const0_rtx
3411 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3412 need_else = true;
3413 else
3414 need_else = false;
3415
3416 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3417 operands[2], operands[3]);
3418 output_asm_insn (\"cmp\\t%2, %3\", operands);
3419 if (TARGET_THUMB2)
3420 {
3421 if (need_else)
3422 output_asm_insn (\"ite\\t%d5\", operands);
3423 else
3424 output_asm_insn (\"it\\t%d5\", operands);
3425 }
3426 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3427 if (need_else)
3428 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3429 return \"\";
3430 }"
3431 [(set_attr "conds" "clob")
3432 (set (attr "length")
3433 (if_then_else (eq_attr "is_thumb" "yes")
3434 (const_int 14)
3435 (const_int 12)))]
3436 )
3437
3438 \f
3439 ;; Shift and rotation insns
3440
3441 (define_expand "ashldi3"
3442 [(set (match_operand:DI 0 "s_register_operand" "")
3443 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3444 (match_operand:SI 2 "reg_or_int_operand" "")))]
3445 "TARGET_32BIT"
3446 "
3447 if (GET_CODE (operands[2]) == CONST_INT)
3448 {
3449 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3450 {
3451 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3452 DONE;
3453 }
3454 /* Ideally we shouldn't fail here if we could know that operands[1]
3455 ends up already living in an iwmmxt register. Otherwise it's
3456 cheaper to have the alternate code being generated than moving
3457 values to iwmmxt regs and back. */
3458 FAIL;
3459 }
3460 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3461 FAIL;
3462 "
3463 )
3464
3465 (define_insn "arm_ashldi3_1bit"
3466 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3467 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3468 (const_int 1)))
3469 (clobber (reg:CC CC_REGNUM))]
3470 "TARGET_32BIT"
3471 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3472 [(set_attr "conds" "clob")
3473 (set_attr "length" "8")]
3474 )
3475
3476 (define_expand "ashlsi3"
3477 [(set (match_operand:SI 0 "s_register_operand" "")
3478 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3479 (match_operand:SI 2 "arm_rhs_operand" "")))]
3480 "TARGET_EITHER"
3481 "
3482 if (GET_CODE (operands[2]) == CONST_INT
3483 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3484 {
3485 emit_insn (gen_movsi (operands[0], const0_rtx));
3486 DONE;
3487 }
3488 "
3489 )
3490
3491 (define_insn "*thumb1_ashlsi3"
3492 [(set (match_operand:SI 0 "register_operand" "=l,l")
3493 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3494 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3495 "TARGET_THUMB1"
3496 "lsl\\t%0, %1, %2"
3497 [(set_attr "length" "2")
3498 (set_attr "conds" "set")])
3499
3500 (define_expand "ashrdi3"
3501 [(set (match_operand:DI 0 "s_register_operand" "")
3502 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3503 (match_operand:SI 2 "reg_or_int_operand" "")))]
3504 "TARGET_32BIT"
3505 "
3506 if (GET_CODE (operands[2]) == CONST_INT)
3507 {
3508 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3509 {
3510 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3511 DONE;
3512 }
3513 /* Ideally we shouldn't fail here if we could know that operands[1]
3514 ends up already living in an iwmmxt register. Otherwise it's
3515 cheaper to have the alternate code being generated than moving
3516 values to iwmmxt regs and back. */
3517 FAIL;
3518 }
3519 else if (!TARGET_REALLY_IWMMXT)
3520 FAIL;
3521 "
3522 )
3523
3524 (define_insn "arm_ashrdi3_1bit"
3525 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3526 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3527 (const_int 1)))
3528 (clobber (reg:CC CC_REGNUM))]
3529 "TARGET_32BIT"
3530 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3531 [(set_attr "conds" "clob")
3532 (set_attr "insn" "mov")
3533 (set_attr "length" "8")]
3534 )
3535
3536 (define_expand "ashrsi3"
3537 [(set (match_operand:SI 0 "s_register_operand" "")
3538 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3539 (match_operand:SI 2 "arm_rhs_operand" "")))]
3540 "TARGET_EITHER"
3541 "
3542 if (GET_CODE (operands[2]) == CONST_INT
3543 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3544 operands[2] = GEN_INT (31);
3545 "
3546 )
3547
3548 (define_insn "*thumb1_ashrsi3"
3549 [(set (match_operand:SI 0 "register_operand" "=l,l")
3550 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3551 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3552 "TARGET_THUMB1"
3553 "asr\\t%0, %1, %2"
3554 [(set_attr "length" "2")
3555 (set_attr "conds" "set")])
3556
3557 (define_expand "lshrdi3"
3558 [(set (match_operand:DI 0 "s_register_operand" "")
3559 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3560 (match_operand:SI 2 "reg_or_int_operand" "")))]
3561 "TARGET_32BIT"
3562 "
3563 if (GET_CODE (operands[2]) == CONST_INT)
3564 {
3565 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3566 {
3567 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3568 DONE;
3569 }
3570 /* Ideally we shouldn't fail here if we could know that operands[1]
3571 ends up already living in an iwmmxt register. Otherwise it's
3572 cheaper to have the alternate code being generated than moving
3573 values to iwmmxt regs and back. */
3574 FAIL;
3575 }
3576 else if (!TARGET_REALLY_IWMMXT)
3577 FAIL;
3578 "
3579 )
3580
3581 (define_insn "arm_lshrdi3_1bit"
3582 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3583 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3584 (const_int 1)))
3585 (clobber (reg:CC CC_REGNUM))]
3586 "TARGET_32BIT"
3587 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3588 [(set_attr "conds" "clob")
3589 (set_attr "insn" "mov")
3590 (set_attr "length" "8")]
3591 )
3592
3593 (define_expand "lshrsi3"
3594 [(set (match_operand:SI 0 "s_register_operand" "")
3595 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3596 (match_operand:SI 2 "arm_rhs_operand" "")))]
3597 "TARGET_EITHER"
3598 "
3599 if (GET_CODE (operands[2]) == CONST_INT
3600 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3601 {
3602 emit_insn (gen_movsi (operands[0], const0_rtx));
3603 DONE;
3604 }
3605 "
3606 )
3607
3608 (define_insn "*thumb1_lshrsi3"
3609 [(set (match_operand:SI 0 "register_operand" "=l,l")
3610 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3611 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3612 "TARGET_THUMB1"
3613 "lsr\\t%0, %1, %2"
3614 [(set_attr "length" "2")
3615 (set_attr "conds" "set")])
3616
3617 (define_expand "rotlsi3"
3618 [(set (match_operand:SI 0 "s_register_operand" "")
3619 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3620 (match_operand:SI 2 "reg_or_int_operand" "")))]
3621 "TARGET_32BIT"
3622 "
3623 if (GET_CODE (operands[2]) == CONST_INT)
3624 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3625 else
3626 {
3627 rtx reg = gen_reg_rtx (SImode);
3628 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3629 operands[2] = reg;
3630 }
3631 "
3632 )
3633
3634 (define_expand "rotrsi3"
3635 [(set (match_operand:SI 0 "s_register_operand" "")
3636 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3637 (match_operand:SI 2 "arm_rhs_operand" "")))]
3638 "TARGET_EITHER"
3639 "
3640 if (TARGET_32BIT)
3641 {
3642 if (GET_CODE (operands[2]) == CONST_INT
3643 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3644 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3645 }
3646 else /* TARGET_THUMB1 */
3647 {
3648 if (GET_CODE (operands [2]) == CONST_INT)
3649 operands [2] = force_reg (SImode, operands[2]);
3650 }
3651 "
3652 )
3653
3654 (define_insn "*thumb1_rotrsi3"
3655 [(set (match_operand:SI 0 "register_operand" "=l")
3656 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3657 (match_operand:SI 2 "register_operand" "l")))]
3658 "TARGET_THUMB1"
3659 "ror\\t%0, %0, %2"
3660 [(set_attr "length" "2")]
3661 )
3662
3663 (define_insn "*arm_shiftsi3"
3664 [(set (match_operand:SI 0 "s_register_operand" "=r")
3665 (match_operator:SI 3 "shift_operator"
3666 [(match_operand:SI 1 "s_register_operand" "r")
3667 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3668 "TARGET_32BIT"
3669 "* return arm_output_shift(operands, 0);"
3670 [(set_attr "predicable" "yes")
3671 (set_attr "shift" "1")
3672 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3673 (const_string "alu_shift")
3674 (const_string "alu_shift_reg")))]
3675 )
3676
3677 (define_insn "*shiftsi3_compare0"
3678 [(set (reg:CC_NOOV CC_REGNUM)
3679 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3680 [(match_operand:SI 1 "s_register_operand" "r")
3681 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3682 (const_int 0)))
3683 (set (match_operand:SI 0 "s_register_operand" "=r")
3684 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3685 "TARGET_32BIT"
3686 "* return arm_output_shift(operands, 1);"
3687 [(set_attr "conds" "set")
3688 (set_attr "shift" "1")
3689 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3690 (const_string "alu_shift")
3691 (const_string "alu_shift_reg")))]
3692 )
3693
3694 (define_insn "*shiftsi3_compare0_scratch"
3695 [(set (reg:CC_NOOV CC_REGNUM)
3696 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3697 [(match_operand:SI 1 "s_register_operand" "r")
3698 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3699 (const_int 0)))
3700 (clobber (match_scratch:SI 0 "=r"))]
3701 "TARGET_32BIT"
3702 "* return arm_output_shift(operands, 1);"
3703 [(set_attr "conds" "set")
3704 (set_attr "shift" "1")]
3705 )
3706
3707 (define_insn "*not_shiftsi"
3708 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3709 (not:SI (match_operator:SI 3 "shift_operator"
3710 [(match_operand:SI 1 "s_register_operand" "r,r")
3711 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3712 "TARGET_32BIT"
3713 "mvn%?\\t%0, %1%S3"
3714 [(set_attr "predicable" "yes")
3715 (set_attr "shift" "1")
3716 (set_attr "insn" "mvn")
3717 (set_attr "arch" "32,a")
3718 (set_attr "type" "alu_shift,alu_shift_reg")])
3719
3720 (define_insn "*not_shiftsi_compare0"
3721 [(set (reg:CC_NOOV CC_REGNUM)
3722 (compare:CC_NOOV
3723 (not:SI (match_operator:SI 3 "shift_operator"
3724 [(match_operand:SI 1 "s_register_operand" "r,r")
3725 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3726 (const_int 0)))
3727 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3728 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3729 "TARGET_32BIT"
3730 "mvn%.\\t%0, %1%S3"
3731 [(set_attr "conds" "set")
3732 (set_attr "shift" "1")
3733 (set_attr "insn" "mvn")
3734 (set_attr "arch" "32,a")
3735 (set_attr "type" "alu_shift,alu_shift_reg")])
3736
3737 (define_insn "*not_shiftsi_compare0_scratch"
3738 [(set (reg:CC_NOOV CC_REGNUM)
3739 (compare:CC_NOOV
3740 (not:SI (match_operator:SI 3 "shift_operator"
3741 [(match_operand:SI 1 "s_register_operand" "r,r")
3742 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3743 (const_int 0)))
3744 (clobber (match_scratch:SI 0 "=r,r"))]
3745 "TARGET_32BIT"
3746 "mvn%.\\t%0, %1%S3"
3747 [(set_attr "conds" "set")
3748 (set_attr "shift" "1")
3749 (set_attr "insn" "mvn")
3750 (set_attr "arch" "32,a")
3751 (set_attr "type" "alu_shift,alu_shift_reg")])
3752
3753 ;; We don't really have extzv, but defining this using shifts helps
3754 ;; to reduce register pressure later on.
3755
3756 (define_expand "extzv"
3757 [(set (match_operand 0 "s_register_operand" "")
3758 (zero_extract (match_operand 1 "nonimmediate_operand" "")
3759 (match_operand 2 "const_int_operand" "")
3760 (match_operand 3 "const_int_operand" "")))]
3761 "TARGET_THUMB1 || arm_arch_thumb2"
3762 "
3763 {
3764 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3765 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3766
3767 if (arm_arch_thumb2)
3768 {
3769 HOST_WIDE_INT width = INTVAL (operands[2]);
3770 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3771
3772 if (unaligned_access && MEM_P (operands[1])
3773 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3774 {
3775 rtx base_addr;
3776
3777 if (BYTES_BIG_ENDIAN)
3778 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3779 - bitpos;
3780
3781 if (width == 32)
3782 {
3783 base_addr = adjust_address (operands[1], SImode,
3784 bitpos / BITS_PER_UNIT);
3785 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3786 }
3787 else
3788 {
3789 rtx dest = operands[0];
3790 rtx tmp = gen_reg_rtx (SImode);
3791
3792 /* We may get a paradoxical subreg here. Strip it off. */
3793 if (GET_CODE (dest) == SUBREG
3794 && GET_MODE (dest) == SImode
3795 && GET_MODE (SUBREG_REG (dest)) == HImode)
3796 dest = SUBREG_REG (dest);
3797
3798 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3799 FAIL;
3800
3801 base_addr = adjust_address (operands[1], HImode,
3802 bitpos / BITS_PER_UNIT);
3803 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3804 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3805 }
3806 DONE;
3807 }
3808 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3809 {
3810 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3811 operands[3]));
3812 DONE;
3813 }
3814 else
3815 FAIL;
3816 }
3817
3818 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3819 FAIL;
3820
3821 operands[3] = GEN_INT (rshift);
3822
3823 if (lshift == 0)
3824 {
3825 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3826 DONE;
3827 }
3828
3829 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3830 operands[3], gen_reg_rtx (SImode)));
3831 DONE;
3832 }"
3833 )
3834
3835 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3836
3837 (define_expand "extzv_t1"
3838 [(set (match_operand:SI 4 "s_register_operand" "")
3839 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3840 (match_operand:SI 2 "const_int_operand" "")))
3841 (set (match_operand:SI 0 "s_register_operand" "")
3842 (lshiftrt:SI (match_dup 4)
3843 (match_operand:SI 3 "const_int_operand" "")))]
3844 "TARGET_THUMB1"
3845 "")
3846
3847 (define_expand "extv"
3848 [(set (match_operand 0 "s_register_operand" "")
3849 (sign_extract (match_operand 1 "nonimmediate_operand" "")
3850 (match_operand 2 "const_int_operand" "")
3851 (match_operand 3 "const_int_operand" "")))]
3852 "arm_arch_thumb2"
3853 {
3854 HOST_WIDE_INT width = INTVAL (operands[2]);
3855 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3856
3857 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3858 && (bitpos % BITS_PER_UNIT) == 0)
3859 {
3860 rtx base_addr;
3861
3862 if (BYTES_BIG_ENDIAN)
3863 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3864
3865 if (width == 32)
3866 {
3867 base_addr = adjust_address (operands[1], SImode,
3868 bitpos / BITS_PER_UNIT);
3869 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3870 }
3871 else
3872 {
3873 rtx dest = operands[0];
3874 rtx tmp = gen_reg_rtx (SImode);
3875
3876 /* We may get a paradoxical subreg here. Strip it off. */
3877 if (GET_CODE (dest) == SUBREG
3878 && GET_MODE (dest) == SImode
3879 && GET_MODE (SUBREG_REG (dest)) == HImode)
3880 dest = SUBREG_REG (dest);
3881
3882 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3883 FAIL;
3884
3885 base_addr = adjust_address (operands[1], HImode,
3886 bitpos / BITS_PER_UNIT);
3887 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3888 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3889 }
3890
3891 DONE;
3892 }
3893 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3894 FAIL;
3895 else if (GET_MODE (operands[0]) == SImode
3896 && GET_MODE (operands[1]) == SImode)
3897 {
3898 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3899 operands[3]));
3900 DONE;
3901 }
3902
3903 FAIL;
3904 })
3905
3906 ; Helper to expand register forms of extv with the proper modes.
3907
3908 (define_expand "extv_regsi"
3909 [(set (match_operand:SI 0 "s_register_operand" "")
3910 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
3911 (match_operand 2 "const_int_operand" "")
3912 (match_operand 3 "const_int_operand" "")))]
3913 ""
3914 {
3915 })
3916
3917 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3918
3919 (define_insn "unaligned_loadsi"
3920 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3921 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
3922 UNSPEC_UNALIGNED_LOAD))]
3923 "unaligned_access && TARGET_32BIT"
3924 "ldr%?\t%0, %1\t@ unaligned"
3925 [(set_attr "arch" "t2,any")
3926 (set_attr "length" "2,4")
3927 (set_attr "predicable" "yes")
3928 (set_attr "type" "load1")])
3929
3930 (define_insn "unaligned_loadhis"
3931 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3932 (sign_extend:SI
3933 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3934 UNSPEC_UNALIGNED_LOAD)))]
3935 "unaligned_access && TARGET_32BIT"
3936 "ldr%(sh%)\t%0, %1\t@ unaligned"
3937 [(set_attr "arch" "t2,any")
3938 (set_attr "length" "2,4")
3939 (set_attr "predicable" "yes")
3940 (set_attr "type" "load_byte")])
3941
3942 (define_insn "unaligned_loadhiu"
3943 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3944 (zero_extend:SI
3945 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
3946 UNSPEC_UNALIGNED_LOAD)))]
3947 "unaligned_access && TARGET_32BIT"
3948 "ldr%(h%)\t%0, %1\t@ unaligned"
3949 [(set_attr "arch" "t2,any")
3950 (set_attr "length" "2,4")
3951 (set_attr "predicable" "yes")
3952 (set_attr "type" "load_byte")])
3953
3954 (define_insn "unaligned_storesi"
3955 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
3956 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
3957 UNSPEC_UNALIGNED_STORE))]
3958 "unaligned_access && TARGET_32BIT"
3959 "str%?\t%1, %0\t@ unaligned"
3960 [(set_attr "arch" "t2,any")
3961 (set_attr "length" "2,4")
3962 (set_attr "predicable" "yes")
3963 (set_attr "type" "store1")])
3964
3965 (define_insn "unaligned_storehi"
3966 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
3967 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
3968 UNSPEC_UNALIGNED_STORE))]
3969 "unaligned_access && TARGET_32BIT"
3970 "str%(h%)\t%1, %0\t@ unaligned"
3971 [(set_attr "arch" "t2,any")
3972 (set_attr "length" "2,4")
3973 (set_attr "predicable" "yes")
3974 (set_attr "type" "store1")])
3975
3976 (define_insn "*extv_reg"
3977 [(set (match_operand:SI 0 "s_register_operand" "=r")
3978 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3979 (match_operand:SI 2 "const_int_operand" "M")
3980 (match_operand:SI 3 "const_int_operand" "M")))]
3981 "arm_arch_thumb2"
3982 "sbfx%?\t%0, %1, %3, %2"
3983 [(set_attr "length" "4")
3984 (set_attr "predicable" "yes")]
3985 )
3986
3987 (define_insn "extzv_t2"
3988 [(set (match_operand:SI 0 "s_register_operand" "=r")
3989 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3990 (match_operand:SI 2 "const_int_operand" "M")
3991 (match_operand:SI 3 "const_int_operand" "M")))]
3992 "arm_arch_thumb2"
3993 "ubfx%?\t%0, %1, %3, %2"
3994 [(set_attr "length" "4")
3995 (set_attr "predicable" "yes")]
3996 )
3997
3998
3999 ;; Division instructions
4000 (define_insn "divsi3"
4001 [(set (match_operand:SI 0 "s_register_operand" "=r")
4002 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4003 (match_operand:SI 2 "s_register_operand" "r")))]
4004 "TARGET_IDIV"
4005 "sdiv%?\t%0, %1, %2"
4006 [(set_attr "predicable" "yes")
4007 (set_attr "insn" "sdiv")]
4008 )
4009
4010 (define_insn "udivsi3"
4011 [(set (match_operand:SI 0 "s_register_operand" "=r")
4012 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4013 (match_operand:SI 2 "s_register_operand" "r")))]
4014 "TARGET_IDIV"
4015 "udiv%?\t%0, %1, %2"
4016 [(set_attr "predicable" "yes")
4017 (set_attr "insn" "udiv")]
4018 )
4019
4020 \f
4021 ;; Unary arithmetic insns
4022
4023 (define_expand "negdi2"
4024 [(parallel
4025 [(set (match_operand:DI 0 "s_register_operand" "")
4026 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4027 (clobber (reg:CC CC_REGNUM))])]
4028 "TARGET_EITHER"
4029 ""
4030 )
4031
4032 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4033 ;; The first alternative allows the common case of a *full* overlap.
4034 (define_insn "*arm_negdi2"
4035 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4036 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4037 (clobber (reg:CC CC_REGNUM))]
4038 "TARGET_ARM"
4039 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4040 [(set_attr "conds" "clob")
4041 (set_attr "length" "8")]
4042 )
4043
4044 (define_insn "*thumb1_negdi2"
4045 [(set (match_operand:DI 0 "register_operand" "=&l")
4046 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4047 (clobber (reg:CC CC_REGNUM))]
4048 "TARGET_THUMB1"
4049 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4050 [(set_attr "length" "6")]
4051 )
4052
4053 (define_expand "negsi2"
4054 [(set (match_operand:SI 0 "s_register_operand" "")
4055 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4056 "TARGET_EITHER"
4057 ""
4058 )
4059
4060 (define_insn "*arm_negsi2"
4061 [(set (match_operand:SI 0 "s_register_operand" "=r")
4062 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4063 "TARGET_32BIT"
4064 "rsb%?\\t%0, %1, #0"
4065 [(set_attr "predicable" "yes")]
4066 )
4067
4068 (define_insn "*thumb1_negsi2"
4069 [(set (match_operand:SI 0 "register_operand" "=l")
4070 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4071 "TARGET_THUMB1"
4072 "neg\\t%0, %1"
4073 [(set_attr "length" "2")]
4074 )
4075
4076 (define_expand "negsf2"
4077 [(set (match_operand:SF 0 "s_register_operand" "")
4078 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4079 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4080 ""
4081 )
4082
4083 (define_expand "negdf2"
4084 [(set (match_operand:DF 0 "s_register_operand" "")
4085 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4086 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4087 "")
4088
4089 ;; abssi2 doesn't really clobber the condition codes if a different register
4090 ;; is being set. To keep things simple, assume during rtl manipulations that
4091 ;; it does, but tell the final scan operator the truth. Similarly for
4092 ;; (neg (abs...))
4093
4094 (define_expand "abssi2"
4095 [(parallel
4096 [(set (match_operand:SI 0 "s_register_operand" "")
4097 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4098 (clobber (match_dup 2))])]
4099 "TARGET_EITHER"
4100 "
4101 if (TARGET_THUMB1)
4102 operands[2] = gen_rtx_SCRATCH (SImode);
4103 else
4104 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4105 ")
4106
4107 (define_insn "*arm_abssi2"
4108 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4109 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4110 (clobber (reg:CC CC_REGNUM))]
4111 "TARGET_ARM"
4112 "@
4113 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4114 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4115 [(set_attr "conds" "clob,*")
4116 (set_attr "shift" "1")
4117 ;; predicable can't be set based on the variant, so left as no
4118 (set_attr "length" "8")]
4119 )
4120
4121 (define_insn_and_split "*thumb1_abssi2"
4122 [(set (match_operand:SI 0 "s_register_operand" "=l")
4123 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4124 (clobber (match_scratch:SI 2 "=&l"))]
4125 "TARGET_THUMB1"
4126 "#"
4127 "TARGET_THUMB1 && reload_completed"
4128 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4129 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4130 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4131 ""
4132 [(set_attr "length" "6")]
4133 )
4134
4135 (define_insn "*arm_neg_abssi2"
4136 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4137 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4138 (clobber (reg:CC CC_REGNUM))]
4139 "TARGET_ARM"
4140 "@
4141 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4142 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4143 [(set_attr "conds" "clob,*")
4144 (set_attr "shift" "1")
4145 ;; predicable can't be set based on the variant, so left as no
4146 (set_attr "length" "8")]
4147 )
4148
4149 (define_insn_and_split "*thumb1_neg_abssi2"
4150 [(set (match_operand:SI 0 "s_register_operand" "=l")
4151 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4152 (clobber (match_scratch:SI 2 "=&l"))]
4153 "TARGET_THUMB1"
4154 "#"
4155 "TARGET_THUMB1 && reload_completed"
4156 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4157 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4158 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4159 ""
4160 [(set_attr "length" "6")]
4161 )
4162
4163 (define_expand "abssf2"
4164 [(set (match_operand:SF 0 "s_register_operand" "")
4165 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4166 "TARGET_32BIT && TARGET_HARD_FLOAT"
4167 "")
4168
4169 (define_expand "absdf2"
4170 [(set (match_operand:DF 0 "s_register_operand" "")
4171 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4172 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4173 "")
4174
4175 (define_expand "sqrtsf2"
4176 [(set (match_operand:SF 0 "s_register_operand" "")
4177 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4178 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
4179 "")
4180
4181 (define_expand "sqrtdf2"
4182 [(set (match_operand:DF 0 "s_register_operand" "")
4183 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4184 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
4185 "")
4186
4187 (define_insn_and_split "one_cmpldi2"
4188 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
4189 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
4190 "TARGET_32BIT"
4191 "#"
4192 "TARGET_32BIT && reload_completed"
4193 [(set (match_dup 0) (not:SI (match_dup 1)))
4194 (set (match_dup 2) (not:SI (match_dup 3)))]
4195 "
4196 {
4197 operands[2] = gen_highpart (SImode, operands[0]);
4198 operands[0] = gen_lowpart (SImode, operands[0]);
4199 operands[3] = gen_highpart (SImode, operands[1]);
4200 operands[1] = gen_lowpart (SImode, operands[1]);
4201 }"
4202 [(set_attr "length" "8")
4203 (set_attr "predicable" "yes")]
4204 )
4205
4206 (define_expand "one_cmplsi2"
4207 [(set (match_operand:SI 0 "s_register_operand" "")
4208 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4209 "TARGET_EITHER"
4210 ""
4211 )
4212
4213 (define_insn "*arm_one_cmplsi2"
4214 [(set (match_operand:SI 0 "s_register_operand" "=r")
4215 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4216 "TARGET_32BIT"
4217 "mvn%?\\t%0, %1"
4218 [(set_attr "predicable" "yes")
4219 (set_attr "insn" "mvn")]
4220 )
4221
4222 (define_insn "*thumb1_one_cmplsi2"
4223 [(set (match_operand:SI 0 "register_operand" "=l")
4224 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4225 "TARGET_THUMB1"
4226 "mvn\\t%0, %1"
4227 [(set_attr "length" "2")
4228 (set_attr "insn" "mvn")]
4229 )
4230
4231 (define_insn "*notsi_compare0"
4232 [(set (reg:CC_NOOV CC_REGNUM)
4233 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4234 (const_int 0)))
4235 (set (match_operand:SI 0 "s_register_operand" "=r")
4236 (not:SI (match_dup 1)))]
4237 "TARGET_32BIT"
4238 "mvn%.\\t%0, %1"
4239 [(set_attr "conds" "set")
4240 (set_attr "insn" "mvn")]
4241 )
4242
4243 (define_insn "*notsi_compare0_scratch"
4244 [(set (reg:CC_NOOV CC_REGNUM)
4245 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4246 (const_int 0)))
4247 (clobber (match_scratch:SI 0 "=r"))]
4248 "TARGET_32BIT"
4249 "mvn%.\\t%0, %1"
4250 [(set_attr "conds" "set")
4251 (set_attr "insn" "mvn")]
4252 )
4253 \f
4254 ;; Fixed <--> Floating conversion insns
4255
4256 (define_expand "floatsihf2"
4257 [(set (match_operand:HF 0 "general_operand" "")
4258 (float:HF (match_operand:SI 1 "general_operand" "")))]
4259 "TARGET_EITHER"
4260 "
4261 {
4262 rtx op1 = gen_reg_rtx (SFmode);
4263 expand_float (op1, operands[1], 0);
4264 op1 = convert_to_mode (HFmode, op1, 0);
4265 emit_move_insn (operands[0], op1);
4266 DONE;
4267 }"
4268 )
4269
4270 (define_expand "floatdihf2"
4271 [(set (match_operand:HF 0 "general_operand" "")
4272 (float:HF (match_operand:DI 1 "general_operand" "")))]
4273 "TARGET_EITHER"
4274 "
4275 {
4276 rtx op1 = gen_reg_rtx (SFmode);
4277 expand_float (op1, operands[1], 0);
4278 op1 = convert_to_mode (HFmode, op1, 0);
4279 emit_move_insn (operands[0], op1);
4280 DONE;
4281 }"
4282 )
4283
4284 (define_expand "floatsisf2"
4285 [(set (match_operand:SF 0 "s_register_operand" "")
4286 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4287 "TARGET_32BIT && TARGET_HARD_FLOAT"
4288 "
4289 if (TARGET_MAVERICK)
4290 {
4291 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4292 DONE;
4293 }
4294 ")
4295
4296 (define_expand "floatsidf2"
4297 [(set (match_operand:DF 0 "s_register_operand" "")
4298 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4299 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4300 "
4301 if (TARGET_MAVERICK)
4302 {
4303 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4304 DONE;
4305 }
4306 ")
4307
4308 (define_expand "fix_trunchfsi2"
4309 [(set (match_operand:SI 0 "general_operand" "")
4310 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4311 "TARGET_EITHER"
4312 "
4313 {
4314 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4315 expand_fix (operands[0], op1, 0);
4316 DONE;
4317 }"
4318 )
4319
4320 (define_expand "fix_trunchfdi2"
4321 [(set (match_operand:DI 0 "general_operand" "")
4322 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4323 "TARGET_EITHER"
4324 "
4325 {
4326 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4327 expand_fix (operands[0], op1, 0);
4328 DONE;
4329 }"
4330 )
4331
4332 (define_expand "fix_truncsfsi2"
4333 [(set (match_operand:SI 0 "s_register_operand" "")
4334 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4335 "TARGET_32BIT && TARGET_HARD_FLOAT"
4336 "
4337 if (TARGET_MAVERICK)
4338 {
4339 if (!cirrus_fp_register (operands[0], SImode))
4340 operands[0] = force_reg (SImode, operands[0]);
4341 if (!cirrus_fp_register (operands[1], SFmode))
4342 operands[1] = force_reg (SFmode, operands[0]);
4343 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4344 DONE;
4345 }
4346 ")
4347
4348 (define_expand "fix_truncdfsi2"
4349 [(set (match_operand:SI 0 "s_register_operand" "")
4350 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4351 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4352 "
4353 if (TARGET_MAVERICK)
4354 {
4355 if (!cirrus_fp_register (operands[1], DFmode))
4356 operands[1] = force_reg (DFmode, operands[0]);
4357 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4358 DONE;
4359 }
4360 ")
4361
4362 ;; Truncation insns
4363
4364 (define_expand "truncdfsf2"
4365 [(set (match_operand:SF 0 "s_register_operand" "")
4366 (float_truncate:SF
4367 (match_operand:DF 1 "s_register_operand" "")))]
4368 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4369 ""
4370 )
4371
4372 /* DFmode -> HFmode conversions have to go through SFmode. */
4373 (define_expand "truncdfhf2"
4374 [(set (match_operand:HF 0 "general_operand" "")
4375 (float_truncate:HF
4376 (match_operand:DF 1 "general_operand" "")))]
4377 "TARGET_EITHER"
4378 "
4379 {
4380 rtx op1;
4381 op1 = convert_to_mode (SFmode, operands[1], 0);
4382 op1 = convert_to_mode (HFmode, op1, 0);
4383 emit_move_insn (operands[0], op1);
4384 DONE;
4385 }"
4386 )
4387 \f
4388 ;; Zero and sign extension instructions.
4389
4390 (define_insn "zero_extend<mode>di2"
4391 [(set (match_operand:DI 0 "s_register_operand" "=r")
4392 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4393 "<qhs_extenddi_cstr>")))]
4394 "TARGET_32BIT <qhs_zextenddi_cond>"
4395 "#"
4396 [(set_attr "length" "8")
4397 (set_attr "ce_count" "2")
4398 (set_attr "predicable" "yes")]
4399 )
4400
4401 (define_insn "extend<mode>di2"
4402 [(set (match_operand:DI 0 "s_register_operand" "=r")
4403 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4404 "<qhs_extenddi_cstr>")))]
4405 "TARGET_32BIT <qhs_sextenddi_cond>"
4406 "#"
4407 [(set_attr "length" "8")
4408 (set_attr "ce_count" "2")
4409 (set_attr "shift" "1")
4410 (set_attr "predicable" "yes")]
4411 )
4412
4413 ;; Splits for all extensions to DImode
4414 (define_split
4415 [(set (match_operand:DI 0 "s_register_operand" "")
4416 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4417 "TARGET_32BIT"
4418 [(set (match_dup 0) (match_dup 1))]
4419 {
4420 rtx lo_part = gen_lowpart (SImode, operands[0]);
4421 enum machine_mode src_mode = GET_MODE (operands[1]);
4422
4423 if (REG_P (operands[0])
4424 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4425 emit_clobber (operands[0]);
4426 if (!REG_P (lo_part) || src_mode != SImode
4427 || !rtx_equal_p (lo_part, operands[1]))
4428 {
4429 if (src_mode == SImode)
4430 emit_move_insn (lo_part, operands[1]);
4431 else
4432 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4433 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4434 operands[1] = lo_part;
4435 }
4436 operands[0] = gen_highpart (SImode, operands[0]);
4437 operands[1] = const0_rtx;
4438 })
4439
4440 (define_split
4441 [(set (match_operand:DI 0 "s_register_operand" "")
4442 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4443 "TARGET_32BIT"
4444 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4445 {
4446 rtx lo_part = gen_lowpart (SImode, operands[0]);
4447 enum machine_mode src_mode = GET_MODE (operands[1]);
4448
4449 if (REG_P (operands[0])
4450 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4451 emit_clobber (operands[0]);
4452
4453 if (!REG_P (lo_part) || src_mode != SImode
4454 || !rtx_equal_p (lo_part, operands[1]))
4455 {
4456 if (src_mode == SImode)
4457 emit_move_insn (lo_part, operands[1]);
4458 else
4459 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4460 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4461 operands[1] = lo_part;
4462 }
4463 operands[0] = gen_highpart (SImode, operands[0]);
4464 })
4465
4466 (define_expand "zero_extendhisi2"
4467 [(set (match_operand:SI 0 "s_register_operand" "")
4468 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4469 "TARGET_EITHER"
4470 {
4471 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4472 {
4473 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4474 DONE;
4475 }
4476 if (!arm_arch6 && !MEM_P (operands[1]))
4477 {
4478 rtx t = gen_lowpart (SImode, operands[1]);
4479 rtx tmp = gen_reg_rtx (SImode);
4480 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4481 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4482 DONE;
4483 }
4484 })
4485
4486 (define_split
4487 [(set (match_operand:SI 0 "s_register_operand" "")
4488 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4489 "!TARGET_THUMB2 && !arm_arch6"
4490 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4491 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4492 {
4493 operands[2] = gen_lowpart (SImode, operands[1]);
4494 })
4495
4496 (define_insn "*thumb1_zero_extendhisi2"
4497 [(set (match_operand:SI 0 "register_operand" "=l,l")
4498 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4499 "TARGET_THUMB1"
4500 {
4501 rtx mem;
4502
4503 if (which_alternative == 0 && arm_arch6)
4504 return "uxth\t%0, %1";
4505 if (which_alternative == 0)
4506 return "#";
4507
4508 mem = XEXP (operands[1], 0);
4509
4510 if (GET_CODE (mem) == CONST)
4511 mem = XEXP (mem, 0);
4512
4513 if (GET_CODE (mem) == PLUS)
4514 {
4515 rtx a = XEXP (mem, 0);
4516
4517 /* This can happen due to bugs in reload. */
4518 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4519 {
4520 rtx ops[2];
4521 ops[0] = operands[0];
4522 ops[1] = a;
4523
4524 output_asm_insn ("mov\t%0, %1", ops);
4525
4526 XEXP (mem, 0) = operands[0];
4527 }
4528 }
4529
4530 return "ldrh\t%0, %1";
4531 }
4532 [(set_attr_alternative "length"
4533 [(if_then_else (eq_attr "is_arch6" "yes")
4534 (const_int 2) (const_int 4))
4535 (const_int 4)])
4536 (set_attr "type" "alu_shift,load_byte")]
4537 )
4538
4539 (define_insn "*arm_zero_extendhisi2"
4540 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4541 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4542 "TARGET_ARM && arm_arch4 && !arm_arch6"
4543 "@
4544 #
4545 ldr%(h%)\\t%0, %1"
4546 [(set_attr "type" "alu_shift,load_byte")
4547 (set_attr "predicable" "yes")]
4548 )
4549
4550 (define_insn "*arm_zero_extendhisi2_v6"
4551 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4552 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4553 "TARGET_ARM && arm_arch6"
4554 "@
4555 uxth%?\\t%0, %1
4556 ldr%(h%)\\t%0, %1"
4557 [(set_attr "type" "alu_shift,load_byte")
4558 (set_attr "predicable" "yes")]
4559 )
4560
4561 (define_insn "*arm_zero_extendhisi2addsi"
4562 [(set (match_operand:SI 0 "s_register_operand" "=r")
4563 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4564 (match_operand:SI 2 "s_register_operand" "r")))]
4565 "TARGET_INT_SIMD"
4566 "uxtah%?\\t%0, %2, %1"
4567 [(set_attr "type" "alu_shift")
4568 (set_attr "predicable" "yes")]
4569 )
4570
4571 (define_expand "zero_extendqisi2"
4572 [(set (match_operand:SI 0 "s_register_operand" "")
4573 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4574 "TARGET_EITHER"
4575 {
4576 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4577 {
4578 emit_insn (gen_andsi3 (operands[0],
4579 gen_lowpart (SImode, operands[1]),
4580 GEN_INT (255)));
4581 DONE;
4582 }
4583 if (!arm_arch6 && !MEM_P (operands[1]))
4584 {
4585 rtx t = gen_lowpart (SImode, operands[1]);
4586 rtx tmp = gen_reg_rtx (SImode);
4587 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4588 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4589 DONE;
4590 }
4591 })
4592
4593 (define_split
4594 [(set (match_operand:SI 0 "s_register_operand" "")
4595 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4596 "!arm_arch6"
4597 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4598 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4599 {
4600 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4601 if (TARGET_ARM)
4602 {
4603 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4604 DONE;
4605 }
4606 })
4607
4608 (define_insn "*thumb1_zero_extendqisi2"
4609 [(set (match_operand:SI 0 "register_operand" "=l,l")
4610 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4611 "TARGET_THUMB1 && !arm_arch6"
4612 "@
4613 #
4614 ldrb\\t%0, %1"
4615 [(set_attr "length" "4,2")
4616 (set_attr "type" "alu_shift,load_byte")
4617 (set_attr "pool_range" "*,32")]
4618 )
4619
4620 (define_insn "*thumb1_zero_extendqisi2_v6"
4621 [(set (match_operand:SI 0 "register_operand" "=l,l")
4622 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4623 "TARGET_THUMB1 && arm_arch6"
4624 "@
4625 uxtb\\t%0, %1
4626 ldrb\\t%0, %1"
4627 [(set_attr "length" "2")
4628 (set_attr "type" "alu_shift,load_byte")]
4629 )
4630
4631 (define_insn "*arm_zero_extendqisi2"
4632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4633 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4634 "TARGET_ARM && !arm_arch6"
4635 "@
4636 #
4637 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4638 [(set_attr "length" "8,4")
4639 (set_attr "type" "alu_shift,load_byte")
4640 (set_attr "predicable" "yes")]
4641 )
4642
4643 (define_insn "*arm_zero_extendqisi2_v6"
4644 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4645 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4646 "TARGET_ARM && arm_arch6"
4647 "@
4648 uxtb%(%)\\t%0, %1
4649 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4650 [(set_attr "type" "alu_shift,load_byte")
4651 (set_attr "predicable" "yes")]
4652 )
4653
4654 (define_insn "*arm_zero_extendqisi2addsi"
4655 [(set (match_operand:SI 0 "s_register_operand" "=r")
4656 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4657 (match_operand:SI 2 "s_register_operand" "r")))]
4658 "TARGET_INT_SIMD"
4659 "uxtab%?\\t%0, %2, %1"
4660 [(set_attr "predicable" "yes")
4661 (set_attr "insn" "xtab")
4662 (set_attr "type" "alu_shift")]
4663 )
4664
4665 (define_split
4666 [(set (match_operand:SI 0 "s_register_operand" "")
4667 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4668 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4669 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4670 [(set (match_dup 2) (match_dup 1))
4671 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4672 ""
4673 )
4674
4675 (define_split
4676 [(set (match_operand:SI 0 "s_register_operand" "")
4677 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4678 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4679 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4680 [(set (match_dup 2) (match_dup 1))
4681 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4682 ""
4683 )
4684
4685
4686 (define_split
4687 [(set (match_operand:SI 0 "s_register_operand" "")
4688 (ior_xor:SI (and:SI (ashift:SI
4689 (match_operand:SI 1 "s_register_operand" "")
4690 (match_operand:SI 2 "const_int_operand" ""))
4691 (match_operand:SI 3 "const_int_operand" ""))
4692 (zero_extend:SI
4693 (match_operator 5 "subreg_lowpart_operator"
4694 [(match_operand:SI 4 "s_register_operand" "")]))))]
4695 "TARGET_32BIT
4696 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4697 == (GET_MODE_MASK (GET_MODE (operands[5]))
4698 & (GET_MODE_MASK (GET_MODE (operands[5]))
4699 << (INTVAL (operands[2])))))"
4700 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4701 (match_dup 4)))
4702 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4703 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4704 )
4705
4706 (define_insn "*compareqi_eq0"
4707 [(set (reg:CC_Z CC_REGNUM)
4708 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4709 (const_int 0)))]
4710 "TARGET_32BIT"
4711 "tst\\t%0, #255"
4712 [(set_attr "conds" "set")]
4713 )
4714
4715 (define_expand "extendhisi2"
4716 [(set (match_operand:SI 0 "s_register_operand" "")
4717 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4718 "TARGET_EITHER"
4719 {
4720 if (TARGET_THUMB1)
4721 {
4722 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4723 DONE;
4724 }
4725 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4726 {
4727 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4728 DONE;
4729 }
4730
4731 if (!arm_arch6 && !MEM_P (operands[1]))
4732 {
4733 rtx t = gen_lowpart (SImode, operands[1]);
4734 rtx tmp = gen_reg_rtx (SImode);
4735 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4736 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4737 DONE;
4738 }
4739 })
4740
4741 (define_split
4742 [(parallel
4743 [(set (match_operand:SI 0 "register_operand" "")
4744 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4745 (clobber (match_scratch:SI 2 ""))])]
4746 "!arm_arch6"
4747 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4748 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4749 {
4750 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4751 })
4752
4753 ;; We used to have an early-clobber on the scratch register here.
4754 ;; However, there's a bug somewhere in reload which means that this
4755 ;; can be partially ignored during spill allocation if the memory
4756 ;; address also needs reloading; this causes us to die later on when
4757 ;; we try to verify the operands. Fortunately, we don't really need
4758 ;; the early-clobber: we can always use operand 0 if operand 2
4759 ;; overlaps the address.
4760 (define_insn "thumb1_extendhisi2"
4761 [(set (match_operand:SI 0 "register_operand" "=l,l")
4762 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4763 (clobber (match_scratch:SI 2 "=X,l"))]
4764 "TARGET_THUMB1"
4765 "*
4766 {
4767 rtx ops[4];
4768 rtx mem;
4769
4770 if (which_alternative == 0 && !arm_arch6)
4771 return \"#\";
4772 if (which_alternative == 0)
4773 return \"sxth\\t%0, %1\";
4774
4775 mem = XEXP (operands[1], 0);
4776
4777 /* This code used to try to use 'V', and fix the address only if it was
4778 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4779 range of QImode offsets, and offsettable_address_p does a QImode
4780 address check. */
4781
4782 if (GET_CODE (mem) == CONST)
4783 mem = XEXP (mem, 0);
4784
4785 if (GET_CODE (mem) == LABEL_REF)
4786 return \"ldr\\t%0, %1\";
4787
4788 if (GET_CODE (mem) == PLUS)
4789 {
4790 rtx a = XEXP (mem, 0);
4791 rtx b = XEXP (mem, 1);
4792
4793 if (GET_CODE (a) == LABEL_REF
4794 && GET_CODE (b) == CONST_INT)
4795 return \"ldr\\t%0, %1\";
4796
4797 if (GET_CODE (b) == REG)
4798 return \"ldrsh\\t%0, %1\";
4799
4800 ops[1] = a;
4801 ops[2] = b;
4802 }
4803 else
4804 {
4805 ops[1] = mem;
4806 ops[2] = const0_rtx;
4807 }
4808
4809 gcc_assert (GET_CODE (ops[1]) == REG);
4810
4811 ops[0] = operands[0];
4812 if (reg_mentioned_p (operands[2], ops[1]))
4813 ops[3] = ops[0];
4814 else
4815 ops[3] = operands[2];
4816 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4817 return \"\";
4818 }"
4819 [(set_attr_alternative "length"
4820 [(if_then_else (eq_attr "is_arch6" "yes")
4821 (const_int 2) (const_int 4))
4822 (const_int 4)])
4823 (set_attr "type" "alu_shift,load_byte")
4824 (set_attr "pool_range" "*,1020")]
4825 )
4826
4827 ;; This pattern will only be used when ldsh is not available
4828 (define_expand "extendhisi2_mem"
4829 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4830 (set (match_dup 3)
4831 (zero_extend:SI (match_dup 7)))
4832 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4833 (set (match_operand:SI 0 "" "")
4834 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4835 "TARGET_ARM"
4836 "
4837 {
4838 rtx mem1, mem2;
4839 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4840
4841 mem1 = change_address (operands[1], QImode, addr);
4842 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4843 operands[0] = gen_lowpart (SImode, operands[0]);
4844 operands[1] = mem1;
4845 operands[2] = gen_reg_rtx (SImode);
4846 operands[3] = gen_reg_rtx (SImode);
4847 operands[6] = gen_reg_rtx (SImode);
4848 operands[7] = mem2;
4849
4850 if (BYTES_BIG_ENDIAN)
4851 {
4852 operands[4] = operands[2];
4853 operands[5] = operands[3];
4854 }
4855 else
4856 {
4857 operands[4] = operands[3];
4858 operands[5] = operands[2];
4859 }
4860 }"
4861 )
4862
4863 (define_split
4864 [(set (match_operand:SI 0 "register_operand" "")
4865 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4866 "!arm_arch6"
4867 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4868 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4869 {
4870 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4871 })
4872
4873 (define_insn "*arm_extendhisi2"
4874 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4875 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4876 "TARGET_ARM && arm_arch4 && !arm_arch6"
4877 "@
4878 #
4879 ldr%(sh%)\\t%0, %1"
4880 [(set_attr "length" "8,4")
4881 (set_attr "type" "alu_shift,load_byte")
4882 (set_attr "predicable" "yes")
4883 (set_attr "pool_range" "*,256")
4884 (set_attr "neg_pool_range" "*,244")]
4885 )
4886
4887 ;; ??? Check Thumb-2 pool range
4888 (define_insn "*arm_extendhisi2_v6"
4889 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4890 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4891 "TARGET_32BIT && arm_arch6"
4892 "@
4893 sxth%?\\t%0, %1
4894 ldr%(sh%)\\t%0, %1"
4895 [(set_attr "type" "alu_shift,load_byte")
4896 (set_attr "predicable" "yes")
4897 (set_attr "pool_range" "*,256")
4898 (set_attr "neg_pool_range" "*,244")]
4899 )
4900
4901 (define_insn "*arm_extendhisi2addsi"
4902 [(set (match_operand:SI 0 "s_register_operand" "=r")
4903 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4904 (match_operand:SI 2 "s_register_operand" "r")))]
4905 "TARGET_INT_SIMD"
4906 "sxtah%?\\t%0, %2, %1"
4907 )
4908
4909 (define_expand "extendqihi2"
4910 [(set (match_dup 2)
4911 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4912 (const_int 24)))
4913 (set (match_operand:HI 0 "s_register_operand" "")
4914 (ashiftrt:SI (match_dup 2)
4915 (const_int 24)))]
4916 "TARGET_ARM"
4917 "
4918 {
4919 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4920 {
4921 emit_insn (gen_rtx_SET (VOIDmode,
4922 operands[0],
4923 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4924 DONE;
4925 }
4926 if (!s_register_operand (operands[1], QImode))
4927 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4928 operands[0] = gen_lowpart (SImode, operands[0]);
4929 operands[1] = gen_lowpart (SImode, operands[1]);
4930 operands[2] = gen_reg_rtx (SImode);
4931 }"
4932 )
4933
4934 (define_insn "*arm_extendqihi_insn"
4935 [(set (match_operand:HI 0 "s_register_operand" "=r")
4936 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4937 "TARGET_ARM && arm_arch4"
4938 "ldr%(sb%)\\t%0, %1"
4939 [(set_attr "type" "load_byte")
4940 (set_attr "predicable" "yes")
4941 (set_attr "pool_range" "256")
4942 (set_attr "neg_pool_range" "244")]
4943 )
4944
4945 (define_expand "extendqisi2"
4946 [(set (match_operand:SI 0 "s_register_operand" "")
4947 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4948 "TARGET_EITHER"
4949 {
4950 if (!arm_arch4 && MEM_P (operands[1]))
4951 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4952
4953 if (!arm_arch6 && !MEM_P (operands[1]))
4954 {
4955 rtx t = gen_lowpart (SImode, operands[1]);
4956 rtx tmp = gen_reg_rtx (SImode);
4957 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4958 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4959 DONE;
4960 }
4961 })
4962
4963 (define_split
4964 [(set (match_operand:SI 0 "register_operand" "")
4965 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4966 "!arm_arch6"
4967 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4968 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4969 {
4970 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4971 })
4972
4973 (define_insn "*arm_extendqisi"
4974 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4975 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4976 "TARGET_ARM && arm_arch4 && !arm_arch6"
4977 "@
4978 #
4979 ldr%(sb%)\\t%0, %1"
4980 [(set_attr "length" "8,4")
4981 (set_attr "type" "alu_shift,load_byte")
4982 (set_attr "predicable" "yes")
4983 (set_attr "pool_range" "*,256")
4984 (set_attr "neg_pool_range" "*,244")]
4985 )
4986
4987 (define_insn "*arm_extendqisi_v6"
4988 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4989 (sign_extend:SI
4990 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4991 "TARGET_ARM && arm_arch6"
4992 "@
4993 sxtb%?\\t%0, %1
4994 ldr%(sb%)\\t%0, %1"
4995 [(set_attr "type" "alu_shift,load_byte")
4996 (set_attr "predicable" "yes")
4997 (set_attr "pool_range" "*,256")
4998 (set_attr "neg_pool_range" "*,244")]
4999 )
5000
5001 (define_insn "*arm_extendqisi2addsi"
5002 [(set (match_operand:SI 0 "s_register_operand" "=r")
5003 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5004 (match_operand:SI 2 "s_register_operand" "r")))]
5005 "TARGET_INT_SIMD"
5006 "sxtab%?\\t%0, %2, %1"
5007 [(set_attr "type" "alu_shift")
5008 (set_attr "insn" "xtab")
5009 (set_attr "predicable" "yes")]
5010 )
5011
5012 (define_split
5013 [(set (match_operand:SI 0 "register_operand" "")
5014 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5015 "TARGET_THUMB1 && reload_completed"
5016 [(set (match_dup 0) (match_dup 2))
5017 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5018 {
5019 rtx addr = XEXP (operands[1], 0);
5020
5021 if (GET_CODE (addr) == CONST)
5022 addr = XEXP (addr, 0);
5023
5024 if (GET_CODE (addr) == PLUS
5025 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5026 /* No split necessary. */
5027 FAIL;
5028
5029 if (GET_CODE (addr) == PLUS
5030 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5031 FAIL;
5032
5033 if (reg_overlap_mentioned_p (operands[0], addr))
5034 {
5035 rtx t = gen_lowpart (QImode, operands[0]);
5036 emit_move_insn (t, operands[1]);
5037 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5038 DONE;
5039 }
5040
5041 if (REG_P (addr))
5042 {
5043 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5044 operands[2] = const0_rtx;
5045 }
5046 else if (GET_CODE (addr) != PLUS)
5047 FAIL;
5048 else if (REG_P (XEXP (addr, 0)))
5049 {
5050 operands[2] = XEXP (addr, 1);
5051 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5052 }
5053 else
5054 {
5055 operands[2] = XEXP (addr, 0);
5056 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5057 }
5058
5059 operands[3] = change_address (operands[1], QImode, addr);
5060 })
5061
5062 (define_peephole2
5063 [(set (match_operand:SI 0 "register_operand" "")
5064 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5065 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5066 (set (match_operand:SI 3 "register_operand" "")
5067 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5068 "TARGET_THUMB1
5069 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5070 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5071 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5072 && (peep2_reg_dead_p (3, operands[0])
5073 || rtx_equal_p (operands[0], operands[3]))
5074 && (peep2_reg_dead_p (3, operands[2])
5075 || rtx_equal_p (operands[2], operands[3]))"
5076 [(set (match_dup 2) (match_dup 1))
5077 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5078 {
5079 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5080 operands[4] = change_address (operands[4], QImode, addr);
5081 })
5082
5083 (define_insn "thumb1_extendqisi2"
5084 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5085 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5086 "TARGET_THUMB1"
5087 {
5088 rtx addr;
5089
5090 if (which_alternative == 0 && arm_arch6)
5091 return "sxtb\\t%0, %1";
5092 if (which_alternative == 0)
5093 return "#";
5094
5095 addr = XEXP (operands[1], 0);
5096 if (GET_CODE (addr) == PLUS
5097 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5098 return "ldrsb\\t%0, %1";
5099
5100 return "#";
5101 }
5102 [(set_attr_alternative "length"
5103 [(if_then_else (eq_attr "is_arch6" "yes")
5104 (const_int 2) (const_int 4))
5105 (const_int 2)
5106 (if_then_else (eq_attr "is_arch6" "yes")
5107 (const_int 4) (const_int 6))])
5108 (set_attr "type" "alu_shift,load_byte,load_byte")]
5109 )
5110
5111 (define_expand "extendsfdf2"
5112 [(set (match_operand:DF 0 "s_register_operand" "")
5113 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5114 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5115 ""
5116 )
5117
5118 /* HFmode -> DFmode conversions have to go through SFmode. */
5119 (define_expand "extendhfdf2"
5120 [(set (match_operand:DF 0 "general_operand" "")
5121 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5122 "TARGET_EITHER"
5123 "
5124 {
5125 rtx op1;
5126 op1 = convert_to_mode (SFmode, operands[1], 0);
5127 op1 = convert_to_mode (DFmode, op1, 0);
5128 emit_insn (gen_movdf (operands[0], op1));
5129 DONE;
5130 }"
5131 )
5132 \f
5133 ;; Move insns (including loads and stores)
5134
5135 ;; XXX Just some ideas about movti.
5136 ;; I don't think these are a good idea on the arm, there just aren't enough
5137 ;; registers
5138 ;;(define_expand "loadti"
5139 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5140 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5141 ;; "" "")
5142
5143 ;;(define_expand "storeti"
5144 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5145 ;; (match_operand:TI 1 "s_register_operand" ""))]
5146 ;; "" "")
5147
5148 ;;(define_expand "movti"
5149 ;; [(set (match_operand:TI 0 "general_operand" "")
5150 ;; (match_operand:TI 1 "general_operand" ""))]
5151 ;; ""
5152 ;; "
5153 ;;{
5154 ;; rtx insn;
5155 ;;
5156 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
5157 ;; operands[1] = copy_to_reg (operands[1]);
5158 ;; if (GET_CODE (operands[0]) == MEM)
5159 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5160 ;; else if (GET_CODE (operands[1]) == MEM)
5161 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5162 ;; else
5163 ;; FAIL;
5164 ;;
5165 ;; emit_insn (insn);
5166 ;; DONE;
5167 ;;}")
5168
5169 ;; Recognize garbage generated above.
5170
5171 ;;(define_insn ""
5172 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5173 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5174 ;; ""
5175 ;; "*
5176 ;; {
5177 ;; register mem = (which_alternative < 3);
5178 ;; register const char *template;
5179 ;;
5180 ;; operands[mem] = XEXP (operands[mem], 0);
5181 ;; switch (which_alternative)
5182 ;; {
5183 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5184 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5185 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5186 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5187 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5188 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5189 ;; }
5190 ;; output_asm_insn (template, operands);
5191 ;; return \"\";
5192 ;; }")
5193
5194 (define_expand "movdi"
5195 [(set (match_operand:DI 0 "general_operand" "")
5196 (match_operand:DI 1 "general_operand" ""))]
5197 "TARGET_EITHER"
5198 "
5199 if (can_create_pseudo_p ())
5200 {
5201 if (GET_CODE (operands[0]) != REG)
5202 operands[1] = force_reg (DImode, operands[1]);
5203 }
5204 "
5205 )
5206
5207 (define_insn "*arm_movdi"
5208 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5209 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5210 "TARGET_32BIT
5211 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
5212 && !TARGET_IWMMXT
5213 && ( register_operand (operands[0], DImode)
5214 || register_operand (operands[1], DImode))"
5215 "*
5216 switch (which_alternative)
5217 {
5218 case 0:
5219 case 1:
5220 case 2:
5221 return \"#\";
5222 default:
5223 return output_move_double (operands, true, NULL);
5224 }
5225 "
5226 [(set_attr "length" "8,12,16,8,8")
5227 (set_attr "type" "*,*,*,load2,store2")
5228 (set_attr "arm_pool_range" "*,*,*,1020,*")
5229 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
5230 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5231 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5232 )
5233
5234 (define_split
5235 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5236 (match_operand:ANY64 1 "const_double_operand" ""))]
5237 "TARGET_32BIT
5238 && reload_completed
5239 && (arm_const_double_inline_cost (operands[1])
5240 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5241 [(const_int 0)]
5242 "
5243 arm_split_constant (SET, SImode, curr_insn,
5244 INTVAL (gen_lowpart (SImode, operands[1])),
5245 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5246 arm_split_constant (SET, SImode, curr_insn,
5247 INTVAL (gen_highpart_mode (SImode,
5248 GET_MODE (operands[0]),
5249 operands[1])),
5250 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5251 DONE;
5252 "
5253 )
5254
5255 ; If optimizing for size, or if we have load delay slots, then
5256 ; we want to split the constant into two separate operations.
5257 ; In both cases this may split a trivial part into a single data op
5258 ; leaving a single complex constant to load. We can also get longer
5259 ; offsets in a LDR which means we get better chances of sharing the pool
5260 ; entries. Finally, we can normally do a better job of scheduling
5261 ; LDR instructions than we can with LDM.
5262 ; This pattern will only match if the one above did not.
5263 (define_split
5264 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5265 (match_operand:ANY64 1 "const_double_operand" ""))]
5266 "TARGET_ARM && reload_completed
5267 && arm_const_double_by_parts (operands[1])"
5268 [(set (match_dup 0) (match_dup 1))
5269 (set (match_dup 2) (match_dup 3))]
5270 "
5271 operands[2] = gen_highpart (SImode, operands[0]);
5272 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5273 operands[1]);
5274 operands[0] = gen_lowpart (SImode, operands[0]);
5275 operands[1] = gen_lowpart (SImode, operands[1]);
5276 "
5277 )
5278
5279 (define_split
5280 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5281 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5282 "TARGET_EITHER && reload_completed"
5283 [(set (match_dup 0) (match_dup 1))
5284 (set (match_dup 2) (match_dup 3))]
5285 "
5286 operands[2] = gen_highpart (SImode, operands[0]);
5287 operands[3] = gen_highpart (SImode, operands[1]);
5288 operands[0] = gen_lowpart (SImode, operands[0]);
5289 operands[1] = gen_lowpart (SImode, operands[1]);
5290
5291 /* Handle a partial overlap. */
5292 if (rtx_equal_p (operands[0], operands[3]))
5293 {
5294 rtx tmp0 = operands[0];
5295 rtx tmp1 = operands[1];
5296
5297 operands[0] = operands[2];
5298 operands[1] = operands[3];
5299 operands[2] = tmp0;
5300 operands[3] = tmp1;
5301 }
5302 "
5303 )
5304
5305 ;; We can't actually do base+index doubleword loads if the index and
5306 ;; destination overlap. Split here so that we at least have chance to
5307 ;; schedule.
5308 (define_split
5309 [(set (match_operand:DI 0 "s_register_operand" "")
5310 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5311 (match_operand:SI 2 "s_register_operand" ""))))]
5312 "TARGET_LDRD
5313 && reg_overlap_mentioned_p (operands[0], operands[1])
5314 && reg_overlap_mentioned_p (operands[0], operands[2])"
5315 [(set (match_dup 4)
5316 (plus:SI (match_dup 1)
5317 (match_dup 2)))
5318 (set (match_dup 0)
5319 (mem:DI (match_dup 4)))]
5320 "
5321 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5322 "
5323 )
5324
5325 ;;; ??? This should have alternatives for constants.
5326 ;;; ??? This was originally identical to the movdf_insn pattern.
5327 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5328 ;;; thumb_reorg with a memory reference.
5329 (define_insn "*thumb1_movdi_insn"
5330 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5331 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5332 "TARGET_THUMB1
5333 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5334 && ( register_operand (operands[0], DImode)
5335 || register_operand (operands[1], DImode))"
5336 "*
5337 {
5338 switch (which_alternative)
5339 {
5340 default:
5341 case 0:
5342 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5343 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5344 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5345 case 1:
5346 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5347 case 2:
5348 operands[1] = GEN_INT (- INTVAL (operands[1]));
5349 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5350 case 3:
5351 return \"ldmia\\t%1, {%0, %H0}\";
5352 case 4:
5353 return \"stmia\\t%0, {%1, %H1}\";
5354 case 5:
5355 return thumb_load_double_from_address (operands);
5356 case 6:
5357 operands[2] = gen_rtx_MEM (SImode,
5358 plus_constant (XEXP (operands[0], 0), 4));
5359 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5360 return \"\";
5361 case 7:
5362 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5363 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5364 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5365 }
5366 }"
5367 [(set_attr "length" "4,4,6,2,2,6,4,4")
5368 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5369 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5370 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5371 )
5372
5373 (define_expand "movsi"
5374 [(set (match_operand:SI 0 "general_operand" "")
5375 (match_operand:SI 1 "general_operand" ""))]
5376 "TARGET_EITHER"
5377 "
5378 {
5379 rtx base, offset, tmp;
5380
5381 if (TARGET_32BIT)
5382 {
5383 /* Everything except mem = const or mem = mem can be done easily. */
5384 if (GET_CODE (operands[0]) == MEM)
5385 operands[1] = force_reg (SImode, operands[1]);
5386 if (arm_general_register_operand (operands[0], SImode)
5387 && GET_CODE (operands[1]) == CONST_INT
5388 && !(const_ok_for_arm (INTVAL (operands[1]))
5389 || const_ok_for_arm (~INTVAL (operands[1]))))
5390 {
5391 arm_split_constant (SET, SImode, NULL_RTX,
5392 INTVAL (operands[1]), operands[0], NULL_RTX,
5393 optimize && can_create_pseudo_p ());
5394 DONE;
5395 }
5396
5397 if (TARGET_USE_MOVT && !target_word_relocations
5398 && GET_CODE (operands[1]) == SYMBOL_REF
5399 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5400 {
5401 arm_emit_movpair (operands[0], operands[1]);
5402 DONE;
5403 }
5404 }
5405 else /* TARGET_THUMB1... */
5406 {
5407 if (can_create_pseudo_p ())
5408 {
5409 if (GET_CODE (operands[0]) != REG)
5410 operands[1] = force_reg (SImode, operands[1]);
5411 }
5412 }
5413
5414 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5415 {
5416 split_const (operands[1], &base, &offset);
5417 if (GET_CODE (base) == SYMBOL_REF
5418 && !offset_within_block_p (base, INTVAL (offset)))
5419 {
5420 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5421 emit_move_insn (tmp, base);
5422 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5423 DONE;
5424 }
5425 }
5426
5427 /* Recognize the case where operand[1] is a reference to thread-local
5428 data and load its address to a register. */
5429 if (arm_tls_referenced_p (operands[1]))
5430 {
5431 rtx tmp = operands[1];
5432 rtx addend = NULL;
5433
5434 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5435 {
5436 addend = XEXP (XEXP (tmp, 0), 1);
5437 tmp = XEXP (XEXP (tmp, 0), 0);
5438 }
5439
5440 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5441 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5442
5443 tmp = legitimize_tls_address (tmp,
5444 !can_create_pseudo_p () ? operands[0] : 0);
5445 if (addend)
5446 {
5447 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5448 tmp = force_operand (tmp, operands[0]);
5449 }
5450 operands[1] = tmp;
5451 }
5452 else if (flag_pic
5453 && (CONSTANT_P (operands[1])
5454 || symbol_mentioned_p (operands[1])
5455 || label_mentioned_p (operands[1])))
5456 operands[1] = legitimize_pic_address (operands[1], SImode,
5457 (!can_create_pseudo_p ()
5458 ? operands[0]
5459 : 0));
5460 }
5461 "
5462 )
5463
5464 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5465 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5466 ;; so this does not matter.
5467 (define_insn "*arm_movt"
5468 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5469 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5470 (match_operand:SI 2 "general_operand" "i")))]
5471 "arm_arch_thumb2"
5472 "movt%?\t%0, #:upper16:%c2"
5473 [(set_attr "predicable" "yes")
5474 (set_attr "length" "4")]
5475 )
5476
5477 (define_insn "*arm_movsi_insn"
5478 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5479 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5480 "TARGET_ARM && ! TARGET_IWMMXT
5481 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5482 && ( register_operand (operands[0], SImode)
5483 || register_operand (operands[1], SImode))"
5484 "@
5485 mov%?\\t%0, %1
5486 mov%?\\t%0, %1
5487 mvn%?\\t%0, #%B1
5488 movw%?\\t%0, %1
5489 ldr%?\\t%0, %1
5490 str%?\\t%1, %0"
5491 [(set_attr "type" "*,*,*,*,load1,store1")
5492 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5493 (set_attr "predicable" "yes")
5494 (set_attr "pool_range" "*,*,*,*,4096,*")
5495 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5496 )
5497
5498 (define_split
5499 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5500 (match_operand:SI 1 "const_int_operand" ""))]
5501 "TARGET_32BIT
5502 && (!(const_ok_for_arm (INTVAL (operands[1]))
5503 || const_ok_for_arm (~INTVAL (operands[1]))))"
5504 [(clobber (const_int 0))]
5505 "
5506 arm_split_constant (SET, SImode, NULL_RTX,
5507 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5508 DONE;
5509 "
5510 )
5511
5512 (define_insn "*thumb1_movsi_insn"
5513 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5514 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5515 "TARGET_THUMB1
5516 && ( register_operand (operands[0], SImode)
5517 || register_operand (operands[1], SImode))"
5518 "@
5519 mov %0, %1
5520 mov %0, %1
5521 #
5522 #
5523 ldmia\\t%1, {%0}
5524 stmia\\t%0, {%1}
5525 ldr\\t%0, %1
5526 str\\t%1, %0
5527 mov\\t%0, %1"
5528 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5529 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5530 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5531 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5532
5533 (define_split
5534 [(set (match_operand:SI 0 "register_operand" "")
5535 (match_operand:SI 1 "const_int_operand" ""))]
5536 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5537 [(set (match_dup 2) (match_dup 1))
5538 (set (match_dup 0) (neg:SI (match_dup 2)))]
5539 "
5540 {
5541 operands[1] = GEN_INT (- INTVAL (operands[1]));
5542 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5543 }"
5544 )
5545
5546 (define_split
5547 [(set (match_operand:SI 0 "register_operand" "")
5548 (match_operand:SI 1 "const_int_operand" ""))]
5549 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5550 [(set (match_dup 2) (match_dup 1))
5551 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5552 "
5553 {
5554 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5555 unsigned HOST_WIDE_INT mask = 0xff;
5556 int i;
5557
5558 for (i = 0; i < 25; i++)
5559 if ((val & (mask << i)) == val)
5560 break;
5561
5562 /* Don't split if the shift is zero. */
5563 if (i == 0)
5564 FAIL;
5565
5566 operands[1] = GEN_INT (val >> i);
5567 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5568 operands[3] = GEN_INT (i);
5569 }"
5570 )
5571
5572 ;; When generating pic, we need to load the symbol offset into a register.
5573 ;; So that the optimizer does not confuse this with a normal symbol load
5574 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5575 ;; since that is the only type of relocation we can use.
5576
5577 ;; Wrap calculation of the whole PIC address in a single pattern for the
5578 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5579 ;; a PIC address involves two loads from memory, so we want to CSE it
5580 ;; as often as possible.
5581 ;; This pattern will be split into one of the pic_load_addr_* patterns
5582 ;; and a move after GCSE optimizations.
5583 ;;
5584 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5585 (define_expand "calculate_pic_address"
5586 [(set (match_operand:SI 0 "register_operand" "")
5587 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5588 (unspec:SI [(match_operand:SI 2 "" "")]
5589 UNSPEC_PIC_SYM))))]
5590 "flag_pic"
5591 )
5592
5593 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5594 (define_split
5595 [(set (match_operand:SI 0 "register_operand" "")
5596 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5597 (unspec:SI [(match_operand:SI 2 "" "")]
5598 UNSPEC_PIC_SYM))))]
5599 "flag_pic"
5600 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5601 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5602 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5603 )
5604
5605 ;; The rather odd constraints on the following are to force reload to leave
5606 ;; the insn alone, and to force the minipool generation pass to then move
5607 ;; the GOT symbol to memory.
5608
5609 (define_insn "pic_load_addr_32bit"
5610 [(set (match_operand:SI 0 "s_register_operand" "=r")
5611 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5612 "TARGET_32BIT && flag_pic"
5613 "ldr%?\\t%0, %1"
5614 [(set_attr "type" "load1")
5615 (set_attr "pool_range" "4096")
5616 (set (attr "neg_pool_range")
5617 (if_then_else (eq_attr "is_thumb" "no")
5618 (const_int 4084)
5619 (const_int 0)))]
5620 )
5621
5622 (define_insn "pic_load_addr_thumb1"
5623 [(set (match_operand:SI 0 "s_register_operand" "=l")
5624 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5625 "TARGET_THUMB1 && flag_pic"
5626 "ldr\\t%0, %1"
5627 [(set_attr "type" "load1")
5628 (set (attr "pool_range") (const_int 1024))]
5629 )
5630
5631 (define_insn "pic_add_dot_plus_four"
5632 [(set (match_operand:SI 0 "register_operand" "=r")
5633 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5634 (const_int 4)
5635 (match_operand 2 "" "")]
5636 UNSPEC_PIC_BASE))]
5637 "TARGET_THUMB"
5638 "*
5639 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5640 INTVAL (operands[2]));
5641 return \"add\\t%0, %|pc\";
5642 "
5643 [(set_attr "length" "2")]
5644 )
5645
5646 (define_insn "pic_add_dot_plus_eight"
5647 [(set (match_operand:SI 0 "register_operand" "=r")
5648 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5649 (const_int 8)
5650 (match_operand 2 "" "")]
5651 UNSPEC_PIC_BASE))]
5652 "TARGET_ARM"
5653 "*
5654 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5655 INTVAL (operands[2]));
5656 return \"add%?\\t%0, %|pc, %1\";
5657 "
5658 [(set_attr "predicable" "yes")]
5659 )
5660
5661 (define_insn "tls_load_dot_plus_eight"
5662 [(set (match_operand:SI 0 "register_operand" "=r")
5663 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5664 (const_int 8)
5665 (match_operand 2 "" "")]
5666 UNSPEC_PIC_BASE)))]
5667 "TARGET_ARM"
5668 "*
5669 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5670 INTVAL (operands[2]));
5671 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5672 "
5673 [(set_attr "predicable" "yes")]
5674 )
5675
5676 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5677 ;; followed by a load. These sequences can be crunched down to
5678 ;; tls_load_dot_plus_eight by a peephole.
5679
5680 (define_peephole2
5681 [(set (match_operand:SI 0 "register_operand" "")
5682 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5683 (const_int 8)
5684 (match_operand 1 "" "")]
5685 UNSPEC_PIC_BASE))
5686 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5687 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5688 [(set (match_dup 2)
5689 (mem:SI (unspec:SI [(match_dup 3)
5690 (const_int 8)
5691 (match_dup 1)]
5692 UNSPEC_PIC_BASE)))]
5693 ""
5694 )
5695
5696 (define_insn "pic_offset_arm"
5697 [(set (match_operand:SI 0 "register_operand" "=r")
5698 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5699 (unspec:SI [(match_operand:SI 2 "" "X")]
5700 UNSPEC_PIC_OFFSET))))]
5701 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5702 "ldr%?\\t%0, [%1,%2]"
5703 [(set_attr "type" "load1")]
5704 )
5705
5706 (define_expand "builtin_setjmp_receiver"
5707 [(label_ref (match_operand 0 "" ""))]
5708 "flag_pic"
5709 "
5710 {
5711 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5712 register. */
5713 if (arm_pic_register != INVALID_REGNUM)
5714 arm_load_pic_register (1UL << 3);
5715 DONE;
5716 }")
5717
5718 ;; If copying one reg to another we can set the condition codes according to
5719 ;; its value. Such a move is common after a return from subroutine and the
5720 ;; result is being tested against zero.
5721
5722 (define_insn "*movsi_compare0"
5723 [(set (reg:CC CC_REGNUM)
5724 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5725 (const_int 0)))
5726 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5727 (match_dup 1))]
5728 "TARGET_32BIT"
5729 "@
5730 cmp%?\\t%0, #0
5731 sub%.\\t%0, %1, #0"
5732 [(set_attr "conds" "set")]
5733 )
5734
5735 ;; Subroutine to store a half word from a register into memory.
5736 ;; Operand 0 is the source register (HImode)
5737 ;; Operand 1 is the destination address in a register (SImode)
5738
5739 ;; In both this routine and the next, we must be careful not to spill
5740 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5741 ;; can generate unrecognizable rtl.
5742
5743 (define_expand "storehi"
5744 [;; store the low byte
5745 (set (match_operand 1 "" "") (match_dup 3))
5746 ;; extract the high byte
5747 (set (match_dup 2)
5748 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5749 ;; store the high byte
5750 (set (match_dup 4) (match_dup 5))]
5751 "TARGET_ARM"
5752 "
5753 {
5754 rtx op1 = operands[1];
5755 rtx addr = XEXP (op1, 0);
5756 enum rtx_code code = GET_CODE (addr);
5757
5758 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5759 || code == MINUS)
5760 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5761
5762 operands[4] = adjust_address (op1, QImode, 1);
5763 operands[1] = adjust_address (operands[1], QImode, 0);
5764 operands[3] = gen_lowpart (QImode, operands[0]);
5765 operands[0] = gen_lowpart (SImode, operands[0]);
5766 operands[2] = gen_reg_rtx (SImode);
5767 operands[5] = gen_lowpart (QImode, operands[2]);
5768 }"
5769 )
5770
5771 (define_expand "storehi_bigend"
5772 [(set (match_dup 4) (match_dup 3))
5773 (set (match_dup 2)
5774 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5775 (set (match_operand 1 "" "") (match_dup 5))]
5776 "TARGET_ARM"
5777 "
5778 {
5779 rtx op1 = operands[1];
5780 rtx addr = XEXP (op1, 0);
5781 enum rtx_code code = GET_CODE (addr);
5782
5783 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5784 || code == MINUS)
5785 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5786
5787 operands[4] = adjust_address (op1, QImode, 1);
5788 operands[1] = adjust_address (operands[1], QImode, 0);
5789 operands[3] = gen_lowpart (QImode, operands[0]);
5790 operands[0] = gen_lowpart (SImode, operands[0]);
5791 operands[2] = gen_reg_rtx (SImode);
5792 operands[5] = gen_lowpart (QImode, operands[2]);
5793 }"
5794 )
5795
5796 ;; Subroutine to store a half word integer constant into memory.
5797 (define_expand "storeinthi"
5798 [(set (match_operand 0 "" "")
5799 (match_operand 1 "" ""))
5800 (set (match_dup 3) (match_dup 2))]
5801 "TARGET_ARM"
5802 "
5803 {
5804 HOST_WIDE_INT value = INTVAL (operands[1]);
5805 rtx addr = XEXP (operands[0], 0);
5806 rtx op0 = operands[0];
5807 enum rtx_code code = GET_CODE (addr);
5808
5809 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5810 || code == MINUS)
5811 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5812
5813 operands[1] = gen_reg_rtx (SImode);
5814 if (BYTES_BIG_ENDIAN)
5815 {
5816 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5817 if ((value & 255) == ((value >> 8) & 255))
5818 operands[2] = operands[1];
5819 else
5820 {
5821 operands[2] = gen_reg_rtx (SImode);
5822 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5823 }
5824 }
5825 else
5826 {
5827 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5828 if ((value & 255) == ((value >> 8) & 255))
5829 operands[2] = operands[1];
5830 else
5831 {
5832 operands[2] = gen_reg_rtx (SImode);
5833 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5834 }
5835 }
5836
5837 operands[3] = adjust_address (op0, QImode, 1);
5838 operands[0] = adjust_address (operands[0], QImode, 0);
5839 operands[2] = gen_lowpart (QImode, operands[2]);
5840 operands[1] = gen_lowpart (QImode, operands[1]);
5841 }"
5842 )
5843
5844 (define_expand "storehi_single_op"
5845 [(set (match_operand:HI 0 "memory_operand" "")
5846 (match_operand:HI 1 "general_operand" ""))]
5847 "TARGET_32BIT && arm_arch4"
5848 "
5849 if (!s_register_operand (operands[1], HImode))
5850 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5851 "
5852 )
5853
5854 (define_expand "movhi"
5855 [(set (match_operand:HI 0 "general_operand" "")
5856 (match_operand:HI 1 "general_operand" ""))]
5857 "TARGET_EITHER"
5858 "
5859 if (TARGET_ARM)
5860 {
5861 if (can_create_pseudo_p ())
5862 {
5863 if (GET_CODE (operands[0]) == MEM)
5864 {
5865 if (arm_arch4)
5866 {
5867 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5868 DONE;
5869 }
5870 if (GET_CODE (operands[1]) == CONST_INT)
5871 emit_insn (gen_storeinthi (operands[0], operands[1]));
5872 else
5873 {
5874 if (GET_CODE (operands[1]) == MEM)
5875 operands[1] = force_reg (HImode, operands[1]);
5876 if (BYTES_BIG_ENDIAN)
5877 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5878 else
5879 emit_insn (gen_storehi (operands[1], operands[0]));
5880 }
5881 DONE;
5882 }
5883 /* Sign extend a constant, and keep it in an SImode reg. */
5884 else if (GET_CODE (operands[1]) == CONST_INT)
5885 {
5886 rtx reg = gen_reg_rtx (SImode);
5887 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5888
5889 /* If the constant is already valid, leave it alone. */
5890 if (!const_ok_for_arm (val))
5891 {
5892 /* If setting all the top bits will make the constant
5893 loadable in a single instruction, then set them.
5894 Otherwise, sign extend the number. */
5895
5896 if (const_ok_for_arm (~(val | ~0xffff)))
5897 val |= ~0xffff;
5898 else if (val & 0x8000)
5899 val |= ~0xffff;
5900 }
5901
5902 emit_insn (gen_movsi (reg, GEN_INT (val)));
5903 operands[1] = gen_lowpart (HImode, reg);
5904 }
5905 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5906 && GET_CODE (operands[1]) == MEM)
5907 {
5908 rtx reg = gen_reg_rtx (SImode);
5909
5910 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5911 operands[1] = gen_lowpart (HImode, reg);
5912 }
5913 else if (!arm_arch4)
5914 {
5915 if (GET_CODE (operands[1]) == MEM)
5916 {
5917 rtx base;
5918 rtx offset = const0_rtx;
5919 rtx reg = gen_reg_rtx (SImode);
5920
5921 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5922 || (GET_CODE (base) == PLUS
5923 && (GET_CODE (offset = XEXP (base, 1))
5924 == CONST_INT)
5925 && ((INTVAL(offset) & 1) != 1)
5926 && GET_CODE (base = XEXP (base, 0)) == REG))
5927 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5928 {
5929 rtx new_rtx;
5930
5931 new_rtx = widen_memory_access (operands[1], SImode,
5932 ((INTVAL (offset) & ~3)
5933 - INTVAL (offset)));
5934 emit_insn (gen_movsi (reg, new_rtx));
5935 if (((INTVAL (offset) & 2) != 0)
5936 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5937 {
5938 rtx reg2 = gen_reg_rtx (SImode);
5939
5940 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5941 reg = reg2;
5942 }
5943 }
5944 else
5945 emit_insn (gen_movhi_bytes (reg, operands[1]));
5946
5947 operands[1] = gen_lowpart (HImode, reg);
5948 }
5949 }
5950 }
5951 /* Handle loading a large integer during reload. */
5952 else if (GET_CODE (operands[1]) == CONST_INT
5953 && !const_ok_for_arm (INTVAL (operands[1]))
5954 && !const_ok_for_arm (~INTVAL (operands[1])))
5955 {
5956 /* Writing a constant to memory needs a scratch, which should
5957 be handled with SECONDARY_RELOADs. */
5958 gcc_assert (GET_CODE (operands[0]) == REG);
5959
5960 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5961 emit_insn (gen_movsi (operands[0], operands[1]));
5962 DONE;
5963 }
5964 }
5965 else if (TARGET_THUMB2)
5966 {
5967 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5968 if (can_create_pseudo_p ())
5969 {
5970 if (GET_CODE (operands[0]) != REG)
5971 operands[1] = force_reg (HImode, operands[1]);
5972 /* Zero extend a constant, and keep it in an SImode reg. */
5973 else if (GET_CODE (operands[1]) == CONST_INT)
5974 {
5975 rtx reg = gen_reg_rtx (SImode);
5976 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5977
5978 emit_insn (gen_movsi (reg, GEN_INT (val)));
5979 operands[1] = gen_lowpart (HImode, reg);
5980 }
5981 }
5982 }
5983 else /* TARGET_THUMB1 */
5984 {
5985 if (can_create_pseudo_p ())
5986 {
5987 if (GET_CODE (operands[1]) == CONST_INT)
5988 {
5989 rtx reg = gen_reg_rtx (SImode);
5990
5991 emit_insn (gen_movsi (reg, operands[1]));
5992 operands[1] = gen_lowpart (HImode, reg);
5993 }
5994
5995 /* ??? We shouldn't really get invalid addresses here, but this can
5996 happen if we are passed a SP (never OK for HImode/QImode) or
5997 virtual register (also rejected as illegitimate for HImode/QImode)
5998 relative address. */
5999 /* ??? This should perhaps be fixed elsewhere, for instance, in
6000 fixup_stack_1, by checking for other kinds of invalid addresses,
6001 e.g. a bare reference to a virtual register. This may confuse the
6002 alpha though, which must handle this case differently. */
6003 if (GET_CODE (operands[0]) == MEM
6004 && !memory_address_p (GET_MODE (operands[0]),
6005 XEXP (operands[0], 0)))
6006 operands[0]
6007 = replace_equiv_address (operands[0],
6008 copy_to_reg (XEXP (operands[0], 0)));
6009
6010 if (GET_CODE (operands[1]) == MEM
6011 && !memory_address_p (GET_MODE (operands[1]),
6012 XEXP (operands[1], 0)))
6013 operands[1]
6014 = replace_equiv_address (operands[1],
6015 copy_to_reg (XEXP (operands[1], 0)));
6016
6017 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6018 {
6019 rtx reg = gen_reg_rtx (SImode);
6020
6021 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6022 operands[1] = gen_lowpart (HImode, reg);
6023 }
6024
6025 if (GET_CODE (operands[0]) == MEM)
6026 operands[1] = force_reg (HImode, operands[1]);
6027 }
6028 else if (GET_CODE (operands[1]) == CONST_INT
6029 && !satisfies_constraint_I (operands[1]))
6030 {
6031 /* Handle loading a large integer during reload. */
6032
6033 /* Writing a constant to memory needs a scratch, which should
6034 be handled with SECONDARY_RELOADs. */
6035 gcc_assert (GET_CODE (operands[0]) == REG);
6036
6037 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6038 emit_insn (gen_movsi (operands[0], operands[1]));
6039 DONE;
6040 }
6041 }
6042 "
6043 )
6044
6045 (define_insn "*thumb1_movhi_insn"
6046 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6047 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6048 "TARGET_THUMB1
6049 && ( register_operand (operands[0], HImode)
6050 || register_operand (operands[1], HImode))"
6051 "*
6052 switch (which_alternative)
6053 {
6054 case 0: return \"add %0, %1, #0\";
6055 case 2: return \"strh %1, %0\";
6056 case 3: return \"mov %0, %1\";
6057 case 4: return \"mov %0, %1\";
6058 case 5: return \"mov %0, %1\";
6059 default: gcc_unreachable ();
6060 case 1:
6061 /* The stack pointer can end up being taken as an index register.
6062 Catch this case here and deal with it. */
6063 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6064 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
6065 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6066 {
6067 rtx ops[2];
6068 ops[0] = operands[0];
6069 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6070
6071 output_asm_insn (\"mov %0, %1\", ops);
6072
6073 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6074
6075 }
6076 return \"ldrh %0, %1\";
6077 }"
6078 [(set_attr "length" "2,4,2,2,2,2")
6079 (set_attr "type" "*,load1,store1,*,*,*")
6080 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6081
6082
6083 (define_expand "movhi_bytes"
6084 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6085 (set (match_dup 3)
6086 (zero_extend:SI (match_dup 6)))
6087 (set (match_operand:SI 0 "" "")
6088 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6089 "TARGET_ARM"
6090 "
6091 {
6092 rtx mem1, mem2;
6093 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6094
6095 mem1 = change_address (operands[1], QImode, addr);
6096 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
6097 operands[0] = gen_lowpart (SImode, operands[0]);
6098 operands[1] = mem1;
6099 operands[2] = gen_reg_rtx (SImode);
6100 operands[3] = gen_reg_rtx (SImode);
6101 operands[6] = mem2;
6102
6103 if (BYTES_BIG_ENDIAN)
6104 {
6105 operands[4] = operands[2];
6106 operands[5] = operands[3];
6107 }
6108 else
6109 {
6110 operands[4] = operands[3];
6111 operands[5] = operands[2];
6112 }
6113 }"
6114 )
6115
6116 (define_expand "movhi_bigend"
6117 [(set (match_dup 2)
6118 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6119 (const_int 16)))
6120 (set (match_dup 3)
6121 (ashiftrt:SI (match_dup 2) (const_int 16)))
6122 (set (match_operand:HI 0 "s_register_operand" "")
6123 (match_dup 4))]
6124 "TARGET_ARM"
6125 "
6126 operands[2] = gen_reg_rtx (SImode);
6127 operands[3] = gen_reg_rtx (SImode);
6128 operands[4] = gen_lowpart (HImode, operands[3]);
6129 "
6130 )
6131
6132 ;; Pattern to recognize insn generated default case above
6133 (define_insn "*movhi_insn_arch4"
6134 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6135 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6136 "TARGET_ARM
6137 && arm_arch4
6138 && (register_operand (operands[0], HImode)
6139 || register_operand (operands[1], HImode))"
6140 "@
6141 mov%?\\t%0, %1\\t%@ movhi
6142 mvn%?\\t%0, #%B1\\t%@ movhi
6143 str%(h%)\\t%1, %0\\t%@ movhi
6144 ldr%(h%)\\t%0, %1\\t%@ movhi"
6145 [(set_attr "type" "*,*,store1,load1")
6146 (set_attr "predicable" "yes")
6147 (set_attr "insn" "mov,mvn,*,*")
6148 (set_attr "pool_range" "*,*,*,256")
6149 (set_attr "neg_pool_range" "*,*,*,244")]
6150 )
6151
6152 (define_insn "*movhi_bytes"
6153 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
6154 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
6155 "TARGET_ARM"
6156 "@
6157 mov%?\\t%0, %1\\t%@ movhi
6158 mvn%?\\t%0, #%B1\\t%@ movhi"
6159 [(set_attr "predicable" "yes")
6160 (set_attr "insn" "mov,mvn")]
6161 )
6162
6163 (define_expand "thumb_movhi_clobber"
6164 [(set (match_operand:HI 0 "memory_operand" "")
6165 (match_operand:HI 1 "register_operand" ""))
6166 (clobber (match_operand:DI 2 "register_operand" ""))]
6167 "TARGET_THUMB1"
6168 "
6169 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6170 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6171 {
6172 emit_insn (gen_movhi (operands[0], operands[1]));
6173 DONE;
6174 }
6175 /* XXX Fixme, need to handle other cases here as well. */
6176 gcc_unreachable ();
6177 "
6178 )
6179
6180 ;; We use a DImode scratch because we may occasionally need an additional
6181 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6182 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6183 (define_expand "reload_outhi"
6184 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6185 (match_operand:HI 1 "s_register_operand" "r")
6186 (match_operand:DI 2 "s_register_operand" "=&l")])]
6187 "TARGET_EITHER"
6188 "if (TARGET_ARM)
6189 arm_reload_out_hi (operands);
6190 else
6191 thumb_reload_out_hi (operands);
6192 DONE;
6193 "
6194 )
6195
6196 (define_expand "reload_inhi"
6197 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6198 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6199 (match_operand:DI 2 "s_register_operand" "=&r")])]
6200 "TARGET_EITHER"
6201 "
6202 if (TARGET_ARM)
6203 arm_reload_in_hi (operands);
6204 else
6205 thumb_reload_out_hi (operands);
6206 DONE;
6207 ")
6208
6209 (define_expand "movqi"
6210 [(set (match_operand:QI 0 "general_operand" "")
6211 (match_operand:QI 1 "general_operand" ""))]
6212 "TARGET_EITHER"
6213 "
6214 /* Everything except mem = const or mem = mem can be done easily */
6215
6216 if (can_create_pseudo_p ())
6217 {
6218 if (GET_CODE (operands[1]) == CONST_INT)
6219 {
6220 rtx reg = gen_reg_rtx (SImode);
6221
6222 /* For thumb we want an unsigned immediate, then we are more likely
6223 to be able to use a movs insn. */
6224 if (TARGET_THUMB)
6225 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6226
6227 emit_insn (gen_movsi (reg, operands[1]));
6228 operands[1] = gen_lowpart (QImode, reg);
6229 }
6230
6231 if (TARGET_THUMB)
6232 {
6233 /* ??? We shouldn't really get invalid addresses here, but this can
6234 happen if we are passed a SP (never OK for HImode/QImode) or
6235 virtual register (also rejected as illegitimate for HImode/QImode)
6236 relative address. */
6237 /* ??? This should perhaps be fixed elsewhere, for instance, in
6238 fixup_stack_1, by checking for other kinds of invalid addresses,
6239 e.g. a bare reference to a virtual register. This may confuse the
6240 alpha though, which must handle this case differently. */
6241 if (GET_CODE (operands[0]) == MEM
6242 && !memory_address_p (GET_MODE (operands[0]),
6243 XEXP (operands[0], 0)))
6244 operands[0]
6245 = replace_equiv_address (operands[0],
6246 copy_to_reg (XEXP (operands[0], 0)));
6247 if (GET_CODE (operands[1]) == MEM
6248 && !memory_address_p (GET_MODE (operands[1]),
6249 XEXP (operands[1], 0)))
6250 operands[1]
6251 = replace_equiv_address (operands[1],
6252 copy_to_reg (XEXP (operands[1], 0)));
6253 }
6254
6255 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6256 {
6257 rtx reg = gen_reg_rtx (SImode);
6258
6259 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6260 operands[1] = gen_lowpart (QImode, reg);
6261 }
6262
6263 if (GET_CODE (operands[0]) == MEM)
6264 operands[1] = force_reg (QImode, operands[1]);
6265 }
6266 else if (TARGET_THUMB
6267 && GET_CODE (operands[1]) == CONST_INT
6268 && !satisfies_constraint_I (operands[1]))
6269 {
6270 /* Handle loading a large integer during reload. */
6271
6272 /* Writing a constant to memory needs a scratch, which should
6273 be handled with SECONDARY_RELOADs. */
6274 gcc_assert (GET_CODE (operands[0]) == REG);
6275
6276 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6277 emit_insn (gen_movsi (operands[0], operands[1]));
6278 DONE;
6279 }
6280 "
6281 )
6282
6283
6284 (define_insn "*arm_movqi_insn"
6285 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6286 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6287 "TARGET_32BIT
6288 && ( register_operand (operands[0], QImode)
6289 || register_operand (operands[1], QImode))"
6290 "@
6291 mov%?\\t%0, %1
6292 mvn%?\\t%0, #%B1
6293 ldr%(b%)\\t%0, %1
6294 str%(b%)\\t%1, %0
6295 ldr%(b%)\\t%0, %1
6296 str%(b%)\\t%1, %0"
6297 [(set_attr "type" "*,*,load1,store1,load1,store1")
6298 (set_attr "insn" "mov,mvn,*,*,*,*")
6299 (set_attr "predicable" "yes")
6300 (set_attr "arch" "any,any,t2,t2,any,any")
6301 (set_attr "length" "4,4,2,2,4,4")]
6302 )
6303
6304 (define_insn "*thumb1_movqi_insn"
6305 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6306 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6307 "TARGET_THUMB1
6308 && ( register_operand (operands[0], QImode)
6309 || register_operand (operands[1], QImode))"
6310 "@
6311 add\\t%0, %1, #0
6312 ldrb\\t%0, %1
6313 strb\\t%1, %0
6314 mov\\t%0, %1
6315 mov\\t%0, %1
6316 mov\\t%0, %1"
6317 [(set_attr "length" "2")
6318 (set_attr "type" "*,load1,store1,*,*,*")
6319 (set_attr "insn" "*,*,*,mov,mov,mov")
6320 (set_attr "pool_range" "*,32,*,*,*,*")
6321 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6322
6323 ;; HFmode moves
6324 (define_expand "movhf"
6325 [(set (match_operand:HF 0 "general_operand" "")
6326 (match_operand:HF 1 "general_operand" ""))]
6327 "TARGET_EITHER"
6328 "
6329 if (TARGET_32BIT)
6330 {
6331 if (GET_CODE (operands[0]) == MEM)
6332 operands[1] = force_reg (HFmode, operands[1]);
6333 }
6334 else /* TARGET_THUMB1 */
6335 {
6336 if (can_create_pseudo_p ())
6337 {
6338 if (GET_CODE (operands[0]) != REG)
6339 operands[1] = force_reg (HFmode, operands[1]);
6340 }
6341 }
6342 "
6343 )
6344
6345 (define_insn "*arm32_movhf"
6346 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6347 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6348 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6349 && ( s_register_operand (operands[0], HFmode)
6350 || s_register_operand (operands[1], HFmode))"
6351 "*
6352 switch (which_alternative)
6353 {
6354 case 0: /* ARM register from memory */
6355 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6356 case 1: /* memory from ARM register */
6357 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6358 case 2: /* ARM register from ARM register */
6359 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6360 case 3: /* ARM register from constant */
6361 {
6362 REAL_VALUE_TYPE r;
6363 long bits;
6364 rtx ops[4];
6365
6366 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6367 bits = real_to_target (NULL, &r, HFmode);
6368 ops[0] = operands[0];
6369 ops[1] = GEN_INT (bits);
6370 ops[2] = GEN_INT (bits & 0xff00);
6371 ops[3] = GEN_INT (bits & 0x00ff);
6372
6373 if (arm_arch_thumb2)
6374 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6375 else
6376 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6377 return \"\";
6378 }
6379 default:
6380 gcc_unreachable ();
6381 }
6382 "
6383 [(set_attr "conds" "unconditional")
6384 (set_attr "type" "load1,store1,*,*")
6385 (set_attr "insn" "*,*,mov,mov")
6386 (set_attr "length" "4,4,4,8")
6387 (set_attr "predicable" "yes")]
6388 )
6389
6390 (define_insn "*thumb1_movhf"
6391 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6392 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6393 "TARGET_THUMB1
6394 && ( s_register_operand (operands[0], HFmode)
6395 || s_register_operand (operands[1], HFmode))"
6396 "*
6397 switch (which_alternative)
6398 {
6399 case 1:
6400 {
6401 rtx addr;
6402 gcc_assert (GET_CODE(operands[1]) == MEM);
6403 addr = XEXP (operands[1], 0);
6404 if (GET_CODE (addr) == LABEL_REF
6405 || (GET_CODE (addr) == CONST
6406 && GET_CODE (XEXP (addr, 0)) == PLUS
6407 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6408 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6409 {
6410 /* Constant pool entry. */
6411 return \"ldr\\t%0, %1\";
6412 }
6413 return \"ldrh\\t%0, %1\";
6414 }
6415 case 2: return \"strh\\t%1, %0\";
6416 default: return \"mov\\t%0, %1\";
6417 }
6418 "
6419 [(set_attr "length" "2")
6420 (set_attr "type" "*,load1,store1,*,*")
6421 (set_attr "insn" "mov,*,*,mov,mov")
6422 (set_attr "pool_range" "*,1020,*,*,*")
6423 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6424
6425 (define_expand "movsf"
6426 [(set (match_operand:SF 0 "general_operand" "")
6427 (match_operand:SF 1 "general_operand" ""))]
6428 "TARGET_EITHER"
6429 "
6430 if (TARGET_32BIT)
6431 {
6432 if (GET_CODE (operands[0]) == MEM)
6433 operands[1] = force_reg (SFmode, operands[1]);
6434 }
6435 else /* TARGET_THUMB1 */
6436 {
6437 if (can_create_pseudo_p ())
6438 {
6439 if (GET_CODE (operands[0]) != REG)
6440 operands[1] = force_reg (SFmode, operands[1]);
6441 }
6442 }
6443 "
6444 )
6445
6446 ;; Transform a floating-point move of a constant into a core register into
6447 ;; an SImode operation.
6448 (define_split
6449 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6450 (match_operand:SF 1 "immediate_operand" ""))]
6451 "TARGET_EITHER
6452 && reload_completed
6453 && GET_CODE (operands[1]) == CONST_DOUBLE"
6454 [(set (match_dup 2) (match_dup 3))]
6455 "
6456 operands[2] = gen_lowpart (SImode, operands[0]);
6457 operands[3] = gen_lowpart (SImode, operands[1]);
6458 if (operands[2] == 0 || operands[3] == 0)
6459 FAIL;
6460 "
6461 )
6462
6463 (define_insn "*arm_movsf_soft_insn"
6464 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6465 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6466 "TARGET_32BIT
6467 && TARGET_SOFT_FLOAT
6468 && (GET_CODE (operands[0]) != MEM
6469 || register_operand (operands[1], SFmode))"
6470 "@
6471 mov%?\\t%0, %1
6472 ldr%?\\t%0, %1\\t%@ float
6473 str%?\\t%1, %0\\t%@ float"
6474 [(set_attr "predicable" "yes")
6475 (set_attr "type" "*,load1,store1")
6476 (set_attr "insn" "mov,*,*")
6477 (set_attr "pool_range" "*,4096,*")
6478 (set_attr "arm_neg_pool_range" "*,4084,*")
6479 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6480 )
6481
6482 ;;; ??? This should have alternatives for constants.
6483 (define_insn "*thumb1_movsf_insn"
6484 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6485 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6486 "TARGET_THUMB1
6487 && ( register_operand (operands[0], SFmode)
6488 || register_operand (operands[1], SFmode))"
6489 "@
6490 add\\t%0, %1, #0
6491 ldmia\\t%1, {%0}
6492 stmia\\t%0, {%1}
6493 ldr\\t%0, %1
6494 str\\t%1, %0
6495 mov\\t%0, %1
6496 mov\\t%0, %1"
6497 [(set_attr "length" "2")
6498 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6499 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6500 (set_attr "insn" "*,*,*,*,*,mov,mov")
6501 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6502 )
6503
6504 (define_expand "movdf"
6505 [(set (match_operand:DF 0 "general_operand" "")
6506 (match_operand:DF 1 "general_operand" ""))]
6507 "TARGET_EITHER"
6508 "
6509 if (TARGET_32BIT)
6510 {
6511 if (GET_CODE (operands[0]) == MEM)
6512 operands[1] = force_reg (DFmode, operands[1]);
6513 }
6514 else /* TARGET_THUMB */
6515 {
6516 if (can_create_pseudo_p ())
6517 {
6518 if (GET_CODE (operands[0]) != REG)
6519 operands[1] = force_reg (DFmode, operands[1]);
6520 }
6521 }
6522 "
6523 )
6524
6525 ;; Reloading a df mode value stored in integer regs to memory can require a
6526 ;; scratch reg.
6527 (define_expand "reload_outdf"
6528 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6529 (match_operand:DF 1 "s_register_operand" "r")
6530 (match_operand:SI 2 "s_register_operand" "=&r")]
6531 "TARGET_THUMB2"
6532 "
6533 {
6534 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6535
6536 if (code == REG)
6537 operands[2] = XEXP (operands[0], 0);
6538 else if (code == POST_INC || code == PRE_DEC)
6539 {
6540 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6541 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6542 emit_insn (gen_movdi (operands[0], operands[1]));
6543 DONE;
6544 }
6545 else if (code == PRE_INC)
6546 {
6547 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6548
6549 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6550 operands[2] = reg;
6551 }
6552 else if (code == POST_DEC)
6553 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6554 else
6555 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6556 XEXP (XEXP (operands[0], 0), 1)));
6557
6558 emit_insn (gen_rtx_SET (VOIDmode,
6559 replace_equiv_address (operands[0], operands[2]),
6560 operands[1]));
6561
6562 if (code == POST_DEC)
6563 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6564
6565 DONE;
6566 }"
6567 )
6568
6569 (define_insn "*movdf_soft_insn"
6570 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6571 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6572 "TARGET_32BIT && TARGET_SOFT_FLOAT
6573 && ( register_operand (operands[0], DFmode)
6574 || register_operand (operands[1], DFmode))"
6575 "*
6576 switch (which_alternative)
6577 {
6578 case 0:
6579 case 1:
6580 case 2:
6581 return \"#\";
6582 default:
6583 return output_move_double (operands, true, NULL);
6584 }
6585 "
6586 [(set_attr "length" "8,12,16,8,8")
6587 (set_attr "type" "*,*,*,load2,store2")
6588 (set_attr "pool_range" "*,*,*,1020,*")
6589 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6590 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6591 )
6592
6593 ;;; ??? This should have alternatives for constants.
6594 ;;; ??? This was originally identical to the movdi_insn pattern.
6595 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6596 ;;; thumb_reorg with a memory reference.
6597 (define_insn "*thumb_movdf_insn"
6598 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6599 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6600 "TARGET_THUMB1
6601 && ( register_operand (operands[0], DFmode)
6602 || register_operand (operands[1], DFmode))"
6603 "*
6604 switch (which_alternative)
6605 {
6606 default:
6607 case 0:
6608 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6609 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6610 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6611 case 1:
6612 return \"ldmia\\t%1, {%0, %H0}\";
6613 case 2:
6614 return \"stmia\\t%0, {%1, %H1}\";
6615 case 3:
6616 return thumb_load_double_from_address (operands);
6617 case 4:
6618 operands[2] = gen_rtx_MEM (SImode,
6619 plus_constant (XEXP (operands[0], 0), 4));
6620 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6621 return \"\";
6622 case 5:
6623 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6624 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6625 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6626 }
6627 "
6628 [(set_attr "length" "4,2,2,6,4,4")
6629 (set_attr "type" "*,load2,store2,load2,store2,*")
6630 (set_attr "insn" "*,*,*,*,*,mov")
6631 (set_attr "pool_range" "*,*,*,1020,*,*")]
6632 )
6633
6634 (define_expand "movxf"
6635 [(set (match_operand:XF 0 "general_operand" "")
6636 (match_operand:XF 1 "general_operand" ""))]
6637 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6638 "
6639 if (GET_CODE (operands[0]) == MEM)
6640 operands[1] = force_reg (XFmode, operands[1]);
6641 "
6642 )
6643
6644 \f
6645
6646 ;; load- and store-multiple insns
6647 ;; The arm can load/store any set of registers, provided that they are in
6648 ;; ascending order, but these expanders assume a contiguous set.
6649
6650 (define_expand "load_multiple"
6651 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6652 (match_operand:SI 1 "" ""))
6653 (use (match_operand:SI 2 "" ""))])]
6654 "TARGET_32BIT"
6655 {
6656 HOST_WIDE_INT offset = 0;
6657
6658 /* Support only fixed point registers. */
6659 if (GET_CODE (operands[2]) != CONST_INT
6660 || INTVAL (operands[2]) > 14
6661 || INTVAL (operands[2]) < 2
6662 || GET_CODE (operands[1]) != MEM
6663 || GET_CODE (operands[0]) != REG
6664 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6665 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6666 FAIL;
6667
6668 operands[3]
6669 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6670 INTVAL (operands[2]),
6671 force_reg (SImode, XEXP (operands[1], 0)),
6672 FALSE, operands[1], &offset);
6673 })
6674
6675 (define_expand "store_multiple"
6676 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6677 (match_operand:SI 1 "" ""))
6678 (use (match_operand:SI 2 "" ""))])]
6679 "TARGET_32BIT"
6680 {
6681 HOST_WIDE_INT offset = 0;
6682
6683 /* Support only fixed point registers. */
6684 if (GET_CODE (operands[2]) != CONST_INT
6685 || INTVAL (operands[2]) > 14
6686 || INTVAL (operands[2]) < 2
6687 || GET_CODE (operands[1]) != REG
6688 || GET_CODE (operands[0]) != MEM
6689 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6690 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6691 FAIL;
6692
6693 operands[3]
6694 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6695 INTVAL (operands[2]),
6696 force_reg (SImode, XEXP (operands[0], 0)),
6697 FALSE, operands[0], &offset);
6698 })
6699
6700
6701 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6702 ;; We could let this apply for blocks of less than this, but it clobbers so
6703 ;; many registers that there is then probably a better way.
6704
6705 (define_expand "movmemqi"
6706 [(match_operand:BLK 0 "general_operand" "")
6707 (match_operand:BLK 1 "general_operand" "")
6708 (match_operand:SI 2 "const_int_operand" "")
6709 (match_operand:SI 3 "const_int_operand" "")]
6710 "TARGET_EITHER"
6711 "
6712 if (TARGET_32BIT)
6713 {
6714 if (arm_gen_movmemqi (operands))
6715 DONE;
6716 FAIL;
6717 }
6718 else /* TARGET_THUMB1 */
6719 {
6720 if ( INTVAL (operands[3]) != 4
6721 || INTVAL (operands[2]) > 48)
6722 FAIL;
6723
6724 thumb_expand_movmemqi (operands);
6725 DONE;
6726 }
6727 "
6728 )
6729
6730 ;; Thumb block-move insns
6731
6732 (define_insn "movmem12b"
6733 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6734 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6735 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6736 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6737 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6738 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6739 (set (match_operand:SI 0 "register_operand" "=l")
6740 (plus:SI (match_dup 2) (const_int 12)))
6741 (set (match_operand:SI 1 "register_operand" "=l")
6742 (plus:SI (match_dup 3) (const_int 12)))
6743 (clobber (match_scratch:SI 4 "=&l"))
6744 (clobber (match_scratch:SI 5 "=&l"))
6745 (clobber (match_scratch:SI 6 "=&l"))]
6746 "TARGET_THUMB1"
6747 "* return thumb_output_move_mem_multiple (3, operands);"
6748 [(set_attr "length" "4")
6749 ; This isn't entirely accurate... It loads as well, but in terms of
6750 ; scheduling the following insn it is better to consider it as a store
6751 (set_attr "type" "store3")]
6752 )
6753
6754 (define_insn "movmem8b"
6755 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6756 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6757 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6758 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6759 (set (match_operand:SI 0 "register_operand" "=l")
6760 (plus:SI (match_dup 2) (const_int 8)))
6761 (set (match_operand:SI 1 "register_operand" "=l")
6762 (plus:SI (match_dup 3) (const_int 8)))
6763 (clobber (match_scratch:SI 4 "=&l"))
6764 (clobber (match_scratch:SI 5 "=&l"))]
6765 "TARGET_THUMB1"
6766 "* return thumb_output_move_mem_multiple (2, operands);"
6767 [(set_attr "length" "4")
6768 ; This isn't entirely accurate... It loads as well, but in terms of
6769 ; scheduling the following insn it is better to consider it as a store
6770 (set_attr "type" "store2")]
6771 )
6772
6773 \f
6774
6775 ;; Compare & branch insns
6776 ;; The range calculations are based as follows:
6777 ;; For forward branches, the address calculation returns the address of
6778 ;; the next instruction. This is 2 beyond the branch instruction.
6779 ;; For backward branches, the address calculation returns the address of
6780 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6781 ;; instruction for the shortest sequence, and 4 before the branch instruction
6782 ;; if we have to jump around an unconditional branch.
6783 ;; To the basic branch range the PC offset must be added (this is +4).
6784 ;; So for forward branches we have
6785 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6786 ;; And for backward branches we have
6787 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6788 ;;
6789 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6790 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6791
6792 (define_expand "cbranchsi4"
6793 [(set (pc) (if_then_else
6794 (match_operator 0 "arm_comparison_operator"
6795 [(match_operand:SI 1 "s_register_operand" "")
6796 (match_operand:SI 2 "nonmemory_operand" "")])
6797 (label_ref (match_operand 3 "" ""))
6798 (pc)))]
6799 "TARGET_THUMB1 || TARGET_32BIT"
6800 "
6801 if (!TARGET_THUMB1)
6802 {
6803 if (!arm_add_operand (operands[2], SImode))
6804 operands[2] = force_reg (SImode, operands[2]);
6805 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6806 operands[3]));
6807 DONE;
6808 }
6809 if (thumb1_cmpneg_operand (operands[2], SImode))
6810 {
6811 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6812 operands[3], operands[0]));
6813 DONE;
6814 }
6815 if (!thumb1_cmp_operand (operands[2], SImode))
6816 operands[2] = force_reg (SImode, operands[2]);
6817 ")
6818
6819 ;; A pattern to recognize a special situation and optimize for it.
6820 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6821 ;; due to the available addressing modes. Hence, convert a signed comparison
6822 ;; with zero into an unsigned comparison with 127 if possible.
6823 (define_expand "cbranchqi4"
6824 [(set (pc) (if_then_else
6825 (match_operator 0 "lt_ge_comparison_operator"
6826 [(match_operand:QI 1 "memory_operand" "")
6827 (match_operand:QI 2 "const0_operand" "")])
6828 (label_ref (match_operand 3 "" ""))
6829 (pc)))]
6830 "TARGET_THUMB1"
6831 {
6832 rtx xops[4];
6833 xops[1] = gen_reg_rtx (SImode);
6834 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6835 xops[2] = GEN_INT (127);
6836 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6837 VOIDmode, xops[1], xops[2]);
6838 xops[3] = operands[3];
6839 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6840 DONE;
6841 })
6842
6843 (define_expand "cbranchsf4"
6844 [(set (pc) (if_then_else
6845 (match_operator 0 "arm_comparison_operator"
6846 [(match_operand:SF 1 "s_register_operand" "")
6847 (match_operand:SF 2 "arm_float_compare_operand" "")])
6848 (label_ref (match_operand 3 "" ""))
6849 (pc)))]
6850 "TARGET_32BIT && TARGET_HARD_FLOAT"
6851 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6852 operands[3])); DONE;"
6853 )
6854
6855 (define_expand "cbranchdf4"
6856 [(set (pc) (if_then_else
6857 (match_operator 0 "arm_comparison_operator"
6858 [(match_operand:DF 1 "s_register_operand" "")
6859 (match_operand:DF 2 "arm_float_compare_operand" "")])
6860 (label_ref (match_operand 3 "" ""))
6861 (pc)))]
6862 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6863 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6864 operands[3])); DONE;"
6865 )
6866
6867 (define_expand "cbranchdi4"
6868 [(set (pc) (if_then_else
6869 (match_operator 0 "arm_comparison_operator"
6870 [(match_operand:DI 1 "cmpdi_operand" "")
6871 (match_operand:DI 2 "cmpdi_operand" "")])
6872 (label_ref (match_operand 3 "" ""))
6873 (pc)))]
6874 "TARGET_32BIT"
6875 "{
6876 rtx swap = NULL_RTX;
6877 enum rtx_code code = GET_CODE (operands[0]);
6878
6879 /* We should not have two constants. */
6880 gcc_assert (GET_MODE (operands[1]) == DImode
6881 || GET_MODE (operands[2]) == DImode);
6882
6883 /* Flip unimplemented DImode comparisons to a form that
6884 arm_gen_compare_reg can handle. */
6885 switch (code)
6886 {
6887 case GT:
6888 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6889 case LE:
6890 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6891 case GTU:
6892 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6893 case LEU:
6894 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6895 default:
6896 break;
6897 }
6898 if (swap)
6899 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6900 operands[3]));
6901 else
6902 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6903 operands[3]));
6904 DONE;
6905 }"
6906 )
6907
6908 (define_insn "cbranchsi4_insn"
6909 [(set (pc) (if_then_else
6910 (match_operator 0 "arm_comparison_operator"
6911 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6912 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6913 (label_ref (match_operand 3 "" ""))
6914 (pc)))]
6915 "TARGET_THUMB1"
6916 {
6917 rtx t = cfun->machine->thumb1_cc_insn;
6918 if (t != NULL_RTX)
6919 {
6920 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6921 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6922 t = NULL_RTX;
6923 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6924 {
6925 if (!noov_comparison_operator (operands[0], VOIDmode))
6926 t = NULL_RTX;
6927 }
6928 else if (cfun->machine->thumb1_cc_mode != CCmode)
6929 t = NULL_RTX;
6930 }
6931 if (t == NULL_RTX)
6932 {
6933 output_asm_insn ("cmp\t%1, %2", operands);
6934 cfun->machine->thumb1_cc_insn = insn;
6935 cfun->machine->thumb1_cc_op0 = operands[1];
6936 cfun->machine->thumb1_cc_op1 = operands[2];
6937 cfun->machine->thumb1_cc_mode = CCmode;
6938 }
6939 else
6940 /* Ensure we emit the right type of condition code on the jump. */
6941 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6942 CC_REGNUM);
6943
6944 switch (get_attr_length (insn))
6945 {
6946 case 4: return \"b%d0\\t%l3\";
6947 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6948 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6949 }
6950 }
6951 [(set (attr "far_jump")
6952 (if_then_else
6953 (eq_attr "length" "8")
6954 (const_string "yes")
6955 (const_string "no")))
6956 (set (attr "length")
6957 (if_then_else
6958 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6959 (le (minus (match_dup 3) (pc)) (const_int 256)))
6960 (const_int 4)
6961 (if_then_else
6962 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6963 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6964 (const_int 6)
6965 (const_int 8))))]
6966 )
6967
6968 (define_insn "cbranchsi4_scratch"
6969 [(set (pc) (if_then_else
6970 (match_operator 4 "arm_comparison_operator"
6971 [(match_operand:SI 1 "s_register_operand" "l,0")
6972 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6973 (label_ref (match_operand 3 "" ""))
6974 (pc)))
6975 (clobber (match_scratch:SI 0 "=l,l"))]
6976 "TARGET_THUMB1"
6977 "*
6978 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6979
6980 switch (get_attr_length (insn))
6981 {
6982 case 4: return \"b%d4\\t%l3\";
6983 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6984 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6985 }
6986 "
6987 [(set (attr "far_jump")
6988 (if_then_else
6989 (eq_attr "length" "8")
6990 (const_string "yes")
6991 (const_string "no")))
6992 (set (attr "length")
6993 (if_then_else
6994 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6995 (le (minus (match_dup 3) (pc)) (const_int 256)))
6996 (const_int 4)
6997 (if_then_else
6998 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6999 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7000 (const_int 6)
7001 (const_int 8))))]
7002 )
7003
7004 ;; Two peepholes to generate subtract of 0 instead of a move if the
7005 ;; condition codes will be useful.
7006 (define_peephole2
7007 [(set (match_operand:SI 0 "low_register_operand" "")
7008 (match_operand:SI 1 "low_register_operand" ""))
7009 (set (pc)
7010 (if_then_else (match_operator 2 "arm_comparison_operator"
7011 [(match_dup 1) (const_int 0)])
7012 (label_ref (match_operand 3 "" ""))
7013 (pc)))]
7014 "TARGET_THUMB1"
7015 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7016 (set (pc)
7017 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7018 (label_ref (match_dup 3))
7019 (pc)))]
7020 "")
7021
7022 ;; Sigh! This variant shouldn't be needed, but combine often fails to
7023 ;; merge cases like this because the op1 is a hard register in
7024 ;; arm_class_likely_spilled_p.
7025 (define_peephole2
7026 [(set (match_operand:SI 0 "low_register_operand" "")
7027 (match_operand:SI 1 "low_register_operand" ""))
7028 (set (pc)
7029 (if_then_else (match_operator 2 "arm_comparison_operator"
7030 [(match_dup 0) (const_int 0)])
7031 (label_ref (match_operand 3 "" ""))
7032 (pc)))]
7033 "TARGET_THUMB1"
7034 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
7035 (set (pc)
7036 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
7037 (label_ref (match_dup 3))
7038 (pc)))]
7039 "")
7040
7041 (define_insn "*negated_cbranchsi4"
7042 [(set (pc)
7043 (if_then_else
7044 (match_operator 0 "equality_operator"
7045 [(match_operand:SI 1 "s_register_operand" "l")
7046 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7047 (label_ref (match_operand 3 "" ""))
7048 (pc)))]
7049 "TARGET_THUMB1"
7050 "*
7051 output_asm_insn (\"cmn\\t%1, %2\", operands);
7052 switch (get_attr_length (insn))
7053 {
7054 case 4: return \"b%d0\\t%l3\";
7055 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7056 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7057 }
7058 "
7059 [(set (attr "far_jump")
7060 (if_then_else
7061 (eq_attr "length" "8")
7062 (const_string "yes")
7063 (const_string "no")))
7064 (set (attr "length")
7065 (if_then_else
7066 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7067 (le (minus (match_dup 3) (pc)) (const_int 256)))
7068 (const_int 4)
7069 (if_then_else
7070 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7071 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7072 (const_int 6)
7073 (const_int 8))))]
7074 )
7075
7076 (define_insn "*tbit_cbranch"
7077 [(set (pc)
7078 (if_then_else
7079 (match_operator 0 "equality_operator"
7080 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7081 (const_int 1)
7082 (match_operand:SI 2 "const_int_operand" "i"))
7083 (const_int 0)])
7084 (label_ref (match_operand 3 "" ""))
7085 (pc)))
7086 (clobber (match_scratch:SI 4 "=l"))]
7087 "TARGET_THUMB1"
7088 "*
7089 {
7090 rtx op[3];
7091 op[0] = operands[4];
7092 op[1] = operands[1];
7093 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7094
7095 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7096 switch (get_attr_length (insn))
7097 {
7098 case 4: return \"b%d0\\t%l3\";
7099 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7100 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7101 }
7102 }"
7103 [(set (attr "far_jump")
7104 (if_then_else
7105 (eq_attr "length" "8")
7106 (const_string "yes")
7107 (const_string "no")))
7108 (set (attr "length")
7109 (if_then_else
7110 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7111 (le (minus (match_dup 3) (pc)) (const_int 256)))
7112 (const_int 4)
7113 (if_then_else
7114 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7115 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7116 (const_int 6)
7117 (const_int 8))))]
7118 )
7119
7120 (define_insn "*tlobits_cbranch"
7121 [(set (pc)
7122 (if_then_else
7123 (match_operator 0 "equality_operator"
7124 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7125 (match_operand:SI 2 "const_int_operand" "i")
7126 (const_int 0))
7127 (const_int 0)])
7128 (label_ref (match_operand 3 "" ""))
7129 (pc)))
7130 (clobber (match_scratch:SI 4 "=l"))]
7131 "TARGET_THUMB1"
7132 "*
7133 {
7134 rtx op[3];
7135 op[0] = operands[4];
7136 op[1] = operands[1];
7137 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7138
7139 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7140 switch (get_attr_length (insn))
7141 {
7142 case 4: return \"b%d0\\t%l3\";
7143 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7144 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7145 }
7146 }"
7147 [(set (attr "far_jump")
7148 (if_then_else
7149 (eq_attr "length" "8")
7150 (const_string "yes")
7151 (const_string "no")))
7152 (set (attr "length")
7153 (if_then_else
7154 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7155 (le (minus (match_dup 3) (pc)) (const_int 256)))
7156 (const_int 4)
7157 (if_then_else
7158 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7159 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7160 (const_int 6)
7161 (const_int 8))))]
7162 )
7163
7164 (define_insn "*tstsi3_cbranch"
7165 [(set (pc)
7166 (if_then_else
7167 (match_operator 3 "equality_operator"
7168 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7169 (match_operand:SI 1 "s_register_operand" "l"))
7170 (const_int 0)])
7171 (label_ref (match_operand 2 "" ""))
7172 (pc)))]
7173 "TARGET_THUMB1"
7174 "*
7175 {
7176 output_asm_insn (\"tst\\t%0, %1\", operands);
7177 switch (get_attr_length (insn))
7178 {
7179 case 4: return \"b%d3\\t%l2\";
7180 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7181 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7182 }
7183 }"
7184 [(set (attr "far_jump")
7185 (if_then_else
7186 (eq_attr "length" "8")
7187 (const_string "yes")
7188 (const_string "no")))
7189 (set (attr "length")
7190 (if_then_else
7191 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7192 (le (minus (match_dup 2) (pc)) (const_int 256)))
7193 (const_int 4)
7194 (if_then_else
7195 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7196 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7197 (const_int 6)
7198 (const_int 8))))]
7199 )
7200
7201 (define_insn "*cbranchne_decr1"
7202 [(set (pc)
7203 (if_then_else (match_operator 3 "equality_operator"
7204 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7205 (const_int 0)])
7206 (label_ref (match_operand 4 "" ""))
7207 (pc)))
7208 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7209 (plus:SI (match_dup 2) (const_int -1)))
7210 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7211 "TARGET_THUMB1"
7212 "*
7213 {
7214 rtx cond[2];
7215 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7216 ? GEU : LTU),
7217 VOIDmode, operands[2], const1_rtx);
7218 cond[1] = operands[4];
7219
7220 if (which_alternative == 0)
7221 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7222 else if (which_alternative == 1)
7223 {
7224 /* We must provide an alternative for a hi reg because reload
7225 cannot handle output reloads on a jump instruction, but we
7226 can't subtract into that. Fortunately a mov from lo to hi
7227 does not clobber the condition codes. */
7228 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7229 output_asm_insn (\"mov\\t%0, %1\", operands);
7230 }
7231 else
7232 {
7233 /* Similarly, but the target is memory. */
7234 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7235 output_asm_insn (\"str\\t%1, %0\", operands);
7236 }
7237
7238 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7239 {
7240 case 4:
7241 output_asm_insn (\"b%d0\\t%l1\", cond);
7242 return \"\";
7243 case 6:
7244 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7245 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7246 default:
7247 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7248 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7249 }
7250 }
7251 "
7252 [(set (attr "far_jump")
7253 (if_then_else
7254 (ior (and (eq (symbol_ref ("which_alternative"))
7255 (const_int 0))
7256 (eq_attr "length" "8"))
7257 (eq_attr "length" "10"))
7258 (const_string "yes")
7259 (const_string "no")))
7260 (set_attr_alternative "length"
7261 [
7262 ;; Alternative 0
7263 (if_then_else
7264 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7265 (le (minus (match_dup 4) (pc)) (const_int 256)))
7266 (const_int 4)
7267 (if_then_else
7268 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7269 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7270 (const_int 6)
7271 (const_int 8)))
7272 ;; Alternative 1
7273 (if_then_else
7274 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7275 (le (minus (match_dup 4) (pc)) (const_int 256)))
7276 (const_int 6)
7277 (if_then_else
7278 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7279 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7280 (const_int 8)
7281 (const_int 10)))
7282 ;; Alternative 2
7283 (if_then_else
7284 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7285 (le (minus (match_dup 4) (pc)) (const_int 256)))
7286 (const_int 6)
7287 (if_then_else
7288 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7289 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7290 (const_int 8)
7291 (const_int 10)))
7292 ;; Alternative 3
7293 (if_then_else
7294 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7295 (le (minus (match_dup 4) (pc)) (const_int 256)))
7296 (const_int 6)
7297 (if_then_else
7298 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7299 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7300 (const_int 8)
7301 (const_int 10)))])]
7302 )
7303
7304 (define_insn "*addsi3_cbranch"
7305 [(set (pc)
7306 (if_then_else
7307 (match_operator 4 "arm_comparison_operator"
7308 [(plus:SI
7309 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7310 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7311 (const_int 0)])
7312 (label_ref (match_operand 5 "" ""))
7313 (pc)))
7314 (set
7315 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7316 (plus:SI (match_dup 2) (match_dup 3)))
7317 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7318 "TARGET_THUMB1
7319 && (GET_CODE (operands[4]) == EQ
7320 || GET_CODE (operands[4]) == NE
7321 || GET_CODE (operands[4]) == GE
7322 || GET_CODE (operands[4]) == LT)"
7323 "*
7324 {
7325 rtx cond[3];
7326
7327 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7328 cond[1] = operands[2];
7329 cond[2] = operands[3];
7330
7331 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7332 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7333 else
7334 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7335
7336 if (which_alternative >= 2
7337 && which_alternative < 4)
7338 output_asm_insn (\"mov\\t%0, %1\", operands);
7339 else if (which_alternative >= 4)
7340 output_asm_insn (\"str\\t%1, %0\", operands);
7341
7342 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7343 {
7344 case 4:
7345 return \"b%d4\\t%l5\";
7346 case 6:
7347 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7348 default:
7349 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7350 }
7351 }
7352 "
7353 [(set (attr "far_jump")
7354 (if_then_else
7355 (ior (and (lt (symbol_ref ("which_alternative"))
7356 (const_int 2))
7357 (eq_attr "length" "8"))
7358 (eq_attr "length" "10"))
7359 (const_string "yes")
7360 (const_string "no")))
7361 (set (attr "length")
7362 (if_then_else
7363 (lt (symbol_ref ("which_alternative"))
7364 (const_int 2))
7365 (if_then_else
7366 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7367 (le (minus (match_dup 5) (pc)) (const_int 256)))
7368 (const_int 4)
7369 (if_then_else
7370 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7371 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7372 (const_int 6)
7373 (const_int 8)))
7374 (if_then_else
7375 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7376 (le (minus (match_dup 5) (pc)) (const_int 256)))
7377 (const_int 6)
7378 (if_then_else
7379 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7380 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7381 (const_int 8)
7382 (const_int 10)))))]
7383 )
7384
7385 (define_insn "*addsi3_cbranch_scratch"
7386 [(set (pc)
7387 (if_then_else
7388 (match_operator 3 "arm_comparison_operator"
7389 [(plus:SI
7390 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7391 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7392 (const_int 0)])
7393 (label_ref (match_operand 4 "" ""))
7394 (pc)))
7395 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7396 "TARGET_THUMB1
7397 && (GET_CODE (operands[3]) == EQ
7398 || GET_CODE (operands[3]) == NE
7399 || GET_CODE (operands[3]) == GE
7400 || GET_CODE (operands[3]) == LT)"
7401 "*
7402 {
7403 switch (which_alternative)
7404 {
7405 case 0:
7406 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7407 break;
7408 case 1:
7409 output_asm_insn (\"cmn\t%1, %2\", operands);
7410 break;
7411 case 2:
7412 if (INTVAL (operands[2]) < 0)
7413 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7414 else
7415 output_asm_insn (\"add\t%0, %1, %2\", operands);
7416 break;
7417 case 3:
7418 if (INTVAL (operands[2]) < 0)
7419 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7420 else
7421 output_asm_insn (\"add\t%0, %0, %2\", operands);
7422 break;
7423 }
7424
7425 switch (get_attr_length (insn))
7426 {
7427 case 4:
7428 return \"b%d3\\t%l4\";
7429 case 6:
7430 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7431 default:
7432 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7433 }
7434 }
7435 "
7436 [(set (attr "far_jump")
7437 (if_then_else
7438 (eq_attr "length" "8")
7439 (const_string "yes")
7440 (const_string "no")))
7441 (set (attr "length")
7442 (if_then_else
7443 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7444 (le (minus (match_dup 4) (pc)) (const_int 256)))
7445 (const_int 4)
7446 (if_then_else
7447 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7448 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7449 (const_int 6)
7450 (const_int 8))))]
7451 )
7452
7453
7454 ;; Comparison and test insns
7455
7456 (define_insn "*arm_cmpsi_insn"
7457 [(set (reg:CC CC_REGNUM)
7458 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7459 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7460 "TARGET_32BIT"
7461 "@
7462 cmp%?\\t%0, %1
7463 cmp%?\\t%0, %1
7464 cmp%?\\t%0, %1
7465 cmn%?\\t%0, #%n1"
7466 [(set_attr "conds" "set")
7467 (set_attr "arch" "t2,t2,any,any")
7468 (set_attr "length" "2,2,4,4")]
7469 )
7470
7471 (define_insn "*cmpsi_shiftsi"
7472 [(set (reg:CC CC_REGNUM)
7473 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7474 (match_operator:SI 3 "shift_operator"
7475 [(match_operand:SI 1 "s_register_operand" "r,r")
7476 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7477 "TARGET_32BIT"
7478 "cmp%?\\t%0, %1%S3"
7479 [(set_attr "conds" "set")
7480 (set_attr "shift" "1")
7481 (set_attr "arch" "32,a")
7482 (set_attr "type" "alu_shift,alu_shift_reg")])
7483
7484 (define_insn "*cmpsi_shiftsi_swp"
7485 [(set (reg:CC_SWP CC_REGNUM)
7486 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7487 [(match_operand:SI 1 "s_register_operand" "r,r")
7488 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7489 (match_operand:SI 0 "s_register_operand" "r,r")))]
7490 "TARGET_32BIT"
7491 "cmp%?\\t%0, %1%S3"
7492 [(set_attr "conds" "set")
7493 (set_attr "shift" "1")
7494 (set_attr "arch" "32,a")
7495 (set_attr "type" "alu_shift,alu_shift_reg")])
7496
7497 (define_insn "*arm_cmpsi_negshiftsi_si"
7498 [(set (reg:CC_Z CC_REGNUM)
7499 (compare:CC_Z
7500 (neg:SI (match_operator:SI 1 "shift_operator"
7501 [(match_operand:SI 2 "s_register_operand" "r")
7502 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7503 (match_operand:SI 0 "s_register_operand" "r")))]
7504 "TARGET_ARM"
7505 "cmn%?\\t%0, %2%S1"
7506 [(set_attr "conds" "set")
7507 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7508 (const_string "alu_shift")
7509 (const_string "alu_shift_reg")))]
7510 )
7511
7512 ;; DImode comparisons. The generic code generates branches that
7513 ;; if-conversion can not reduce to a conditional compare, so we do
7514 ;; that directly.
7515
7516 (define_insn "*arm_cmpdi_insn"
7517 [(set (reg:CC_NCV CC_REGNUM)
7518 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7519 (match_operand:DI 1 "arm_di_operand" "rDi")))
7520 (clobber (match_scratch:SI 2 "=r"))]
7521 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7522 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7523 [(set_attr "conds" "set")
7524 (set_attr "length" "8")]
7525 )
7526
7527 (define_insn "*arm_cmpdi_unsigned"
7528 [(set (reg:CC_CZ CC_REGNUM)
7529 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7530 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7531 "TARGET_ARM"
7532 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7533 [(set_attr "conds" "set")
7534 (set_attr "length" "8")]
7535 )
7536
7537 (define_insn "*arm_cmpdi_zero"
7538 [(set (reg:CC_Z CC_REGNUM)
7539 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7540 (const_int 0)))
7541 (clobber (match_scratch:SI 1 "=r"))]
7542 "TARGET_32BIT"
7543 "orr%.\\t%1, %Q0, %R0"
7544 [(set_attr "conds" "set")]
7545 )
7546
7547 (define_insn "*thumb_cmpdi_zero"
7548 [(set (reg:CC_Z CC_REGNUM)
7549 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7550 (const_int 0)))
7551 (clobber (match_scratch:SI 1 "=l"))]
7552 "TARGET_THUMB1"
7553 "orr\\t%1, %Q0, %R0"
7554 [(set_attr "conds" "set")
7555 (set_attr "length" "2")]
7556 )
7557
7558 ;; Cirrus SF compare instruction
7559 (define_insn "*cirrus_cmpsf"
7560 [(set (reg:CCFP CC_REGNUM)
7561 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7562 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7563 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7564 "cfcmps%?\\tr15, %V0, %V1"
7565 [(set_attr "type" "mav_farith")
7566 (set_attr "cirrus" "compare")]
7567 )
7568
7569 ;; Cirrus DF compare instruction
7570 (define_insn "*cirrus_cmpdf"
7571 [(set (reg:CCFP CC_REGNUM)
7572 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7573 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7574 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7575 "cfcmpd%?\\tr15, %V0, %V1"
7576 [(set_attr "type" "mav_farith")
7577 (set_attr "cirrus" "compare")]
7578 )
7579
7580 (define_insn "*cirrus_cmpdi"
7581 [(set (reg:CC CC_REGNUM)
7582 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7583 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7584 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7585 "cfcmp64%?\\tr15, %V0, %V1"
7586 [(set_attr "type" "mav_farith")
7587 (set_attr "cirrus" "compare")]
7588 )
7589
7590 ; This insn allows redundant compares to be removed by cse, nothing should
7591 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7592 ; is deleted later on. The match_dup will match the mode here, so that
7593 ; mode changes of the condition codes aren't lost by this even though we don't
7594 ; specify what they are.
7595
7596 (define_insn "*deleted_compare"
7597 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7598 "TARGET_32BIT"
7599 "\\t%@ deleted compare"
7600 [(set_attr "conds" "set")
7601 (set_attr "length" "0")]
7602 )
7603
7604 \f
7605 ;; Conditional branch insns
7606
7607 (define_expand "cbranch_cc"
7608 [(set (pc)
7609 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7610 (match_operand 2 "" "")])
7611 (label_ref (match_operand 3 "" ""))
7612 (pc)))]
7613 "TARGET_32BIT"
7614 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7615 operands[1], operands[2]);
7616 operands[2] = const0_rtx;"
7617 )
7618
7619 ;;
7620 ;; Patterns to match conditional branch insns.
7621 ;;
7622
7623 (define_insn "*arm_cond_branch"
7624 [(set (pc)
7625 (if_then_else (match_operator 1 "arm_comparison_operator"
7626 [(match_operand 2 "cc_register" "") (const_int 0)])
7627 (label_ref (match_operand 0 "" ""))
7628 (pc)))]
7629 "TARGET_32BIT"
7630 "*
7631 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7632 {
7633 arm_ccfsm_state += 2;
7634 return \"\";
7635 }
7636 return \"b%d1\\t%l0\";
7637 "
7638 [(set_attr "conds" "use")
7639 (set_attr "type" "branch")
7640 (set (attr "length")
7641 (if_then_else
7642 (and (match_test "TARGET_THUMB2")
7643 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7644 (le (minus (match_dup 0) (pc)) (const_int 256))))
7645 (const_int 2)
7646 (const_int 4)))]
7647 )
7648
7649 (define_insn "*arm_cond_branch_reversed"
7650 [(set (pc)
7651 (if_then_else (match_operator 1 "arm_comparison_operator"
7652 [(match_operand 2 "cc_register" "") (const_int 0)])
7653 (pc)
7654 (label_ref (match_operand 0 "" ""))))]
7655 "TARGET_32BIT"
7656 "*
7657 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7658 {
7659 arm_ccfsm_state += 2;
7660 return \"\";
7661 }
7662 return \"b%D1\\t%l0\";
7663 "
7664 [(set_attr "conds" "use")
7665 (set_attr "type" "branch")
7666 (set (attr "length")
7667 (if_then_else
7668 (and (match_test "TARGET_THUMB2")
7669 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7670 (le (minus (match_dup 0) (pc)) (const_int 256))))
7671 (const_int 2)
7672 (const_int 4)))]
7673 )
7674
7675 \f
7676
7677 ; scc insns
7678
7679 (define_expand "cstore_cc"
7680 [(set (match_operand:SI 0 "s_register_operand" "")
7681 (match_operator:SI 1 "" [(match_operand 2 "" "")
7682 (match_operand 3 "" "")]))]
7683 "TARGET_32BIT"
7684 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7685 operands[2], operands[3]);
7686 operands[3] = const0_rtx;"
7687 )
7688
7689 (define_insn "*mov_scc"
7690 [(set (match_operand:SI 0 "s_register_operand" "=r")
7691 (match_operator:SI 1 "arm_comparison_operator"
7692 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7693 "TARGET_ARM"
7694 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7695 [(set_attr "conds" "use")
7696 (set_attr "insn" "mov")
7697 (set_attr "length" "8")]
7698 )
7699
7700 (define_insn "*mov_negscc"
7701 [(set (match_operand:SI 0 "s_register_operand" "=r")
7702 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7703 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7704 "TARGET_ARM"
7705 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7706 [(set_attr "conds" "use")
7707 (set_attr "insn" "mov")
7708 (set_attr "length" "8")]
7709 )
7710
7711 (define_insn "*mov_notscc"
7712 [(set (match_operand:SI 0 "s_register_operand" "=r")
7713 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7714 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7715 "TARGET_ARM"
7716 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7717 [(set_attr "conds" "use")
7718 (set_attr "insn" "mov")
7719 (set_attr "length" "8")]
7720 )
7721
7722 (define_expand "cstoresi4"
7723 [(set (match_operand:SI 0 "s_register_operand" "")
7724 (match_operator:SI 1 "arm_comparison_operator"
7725 [(match_operand:SI 2 "s_register_operand" "")
7726 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7727 "TARGET_32BIT || TARGET_THUMB1"
7728 "{
7729 rtx op3, scratch, scratch2;
7730
7731 if (!TARGET_THUMB1)
7732 {
7733 if (!arm_add_operand (operands[3], SImode))
7734 operands[3] = force_reg (SImode, operands[3]);
7735 emit_insn (gen_cstore_cc (operands[0], operands[1],
7736 operands[2], operands[3]));
7737 DONE;
7738 }
7739
7740 if (operands[3] == const0_rtx)
7741 {
7742 switch (GET_CODE (operands[1]))
7743 {
7744 case EQ:
7745 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7746 break;
7747
7748 case NE:
7749 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7750 break;
7751
7752 case LE:
7753 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7754 NULL_RTX, 0, OPTAB_WIDEN);
7755 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7756 NULL_RTX, 0, OPTAB_WIDEN);
7757 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7758 operands[0], 1, OPTAB_WIDEN);
7759 break;
7760
7761 case GE:
7762 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7763 NULL_RTX, 1);
7764 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7765 NULL_RTX, 1, OPTAB_WIDEN);
7766 break;
7767
7768 case GT:
7769 scratch = expand_binop (SImode, ashr_optab, operands[2],
7770 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7771 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7772 NULL_RTX, 0, OPTAB_WIDEN);
7773 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7774 0, OPTAB_WIDEN);
7775 break;
7776
7777 /* LT is handled by generic code. No need for unsigned with 0. */
7778 default:
7779 FAIL;
7780 }
7781 DONE;
7782 }
7783
7784 switch (GET_CODE (operands[1]))
7785 {
7786 case EQ:
7787 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7788 NULL_RTX, 0, OPTAB_WIDEN);
7789 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7790 break;
7791
7792 case NE:
7793 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7794 NULL_RTX, 0, OPTAB_WIDEN);
7795 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7796 break;
7797
7798 case LE:
7799 op3 = force_reg (SImode, operands[3]);
7800
7801 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7802 NULL_RTX, 1, OPTAB_WIDEN);
7803 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7804 NULL_RTX, 0, OPTAB_WIDEN);
7805 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7806 op3, operands[2]));
7807 break;
7808
7809 case GE:
7810 op3 = operands[3];
7811 if (!thumb1_cmp_operand (op3, SImode))
7812 op3 = force_reg (SImode, op3);
7813 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7814 NULL_RTX, 0, OPTAB_WIDEN);
7815 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7816 NULL_RTX, 1, OPTAB_WIDEN);
7817 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7818 operands[2], op3));
7819 break;
7820
7821 case LEU:
7822 op3 = force_reg (SImode, operands[3]);
7823 scratch = force_reg (SImode, const0_rtx);
7824 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7825 op3, operands[2]));
7826 break;
7827
7828 case GEU:
7829 op3 = operands[3];
7830 if (!thumb1_cmp_operand (op3, SImode))
7831 op3 = force_reg (SImode, op3);
7832 scratch = force_reg (SImode, const0_rtx);
7833 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7834 operands[2], op3));
7835 break;
7836
7837 case LTU:
7838 op3 = operands[3];
7839 if (!thumb1_cmp_operand (op3, SImode))
7840 op3 = force_reg (SImode, op3);
7841 scratch = gen_reg_rtx (SImode);
7842 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7843 break;
7844
7845 case GTU:
7846 op3 = force_reg (SImode, operands[3]);
7847 scratch = gen_reg_rtx (SImode);
7848 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7849 break;
7850
7851 /* No good sequences for GT, LT. */
7852 default:
7853 FAIL;
7854 }
7855 DONE;
7856 }")
7857
7858 (define_expand "cstoresf4"
7859 [(set (match_operand:SI 0 "s_register_operand" "")
7860 (match_operator:SI 1 "arm_comparison_operator"
7861 [(match_operand:SF 2 "s_register_operand" "")
7862 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7863 "TARGET_32BIT && TARGET_HARD_FLOAT"
7864 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7865 operands[2], operands[3])); DONE;"
7866 )
7867
7868 (define_expand "cstoredf4"
7869 [(set (match_operand:SI 0 "s_register_operand" "")
7870 (match_operator:SI 1 "arm_comparison_operator"
7871 [(match_operand:DF 2 "s_register_operand" "")
7872 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7873 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7874 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7875 operands[2], operands[3])); DONE;"
7876 )
7877
7878 (define_expand "cstoredi4"
7879 [(set (match_operand:SI 0 "s_register_operand" "")
7880 (match_operator:SI 1 "arm_comparison_operator"
7881 [(match_operand:DI 2 "cmpdi_operand" "")
7882 (match_operand:DI 3 "cmpdi_operand" "")]))]
7883 "TARGET_32BIT"
7884 "{
7885 rtx swap = NULL_RTX;
7886 enum rtx_code code = GET_CODE (operands[1]);
7887
7888 /* We should not have two constants. */
7889 gcc_assert (GET_MODE (operands[2]) == DImode
7890 || GET_MODE (operands[3]) == DImode);
7891
7892 /* Flip unimplemented DImode comparisons to a form that
7893 arm_gen_compare_reg can handle. */
7894 switch (code)
7895 {
7896 case GT:
7897 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7898 case LE:
7899 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7900 case GTU:
7901 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7902 case LEU:
7903 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7904 default:
7905 break;
7906 }
7907 if (swap)
7908 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7909 operands[2]));
7910 else
7911 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7912 operands[3]));
7913 DONE;
7914 }"
7915 )
7916
7917 (define_expand "cstoresi_eq0_thumb1"
7918 [(parallel
7919 [(set (match_operand:SI 0 "s_register_operand" "")
7920 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7921 (const_int 0)))
7922 (clobber (match_dup:SI 2))])]
7923 "TARGET_THUMB1"
7924 "operands[2] = gen_reg_rtx (SImode);"
7925 )
7926
7927 (define_expand "cstoresi_ne0_thumb1"
7928 [(parallel
7929 [(set (match_operand:SI 0 "s_register_operand" "")
7930 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7931 (const_int 0)))
7932 (clobber (match_dup:SI 2))])]
7933 "TARGET_THUMB1"
7934 "operands[2] = gen_reg_rtx (SImode);"
7935 )
7936
7937 (define_insn "*cstoresi_eq0_thumb1_insn"
7938 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7939 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7940 (const_int 0)))
7941 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7942 "TARGET_THUMB1"
7943 "@
7944 neg\\t%0, %1\;adc\\t%0, %0, %1
7945 neg\\t%2, %1\;adc\\t%0, %1, %2"
7946 [(set_attr "length" "4")]
7947 )
7948
7949 (define_insn "*cstoresi_ne0_thumb1_insn"
7950 [(set (match_operand:SI 0 "s_register_operand" "=l")
7951 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7952 (const_int 0)))
7953 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7954 "TARGET_THUMB1"
7955 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7956 [(set_attr "length" "4")]
7957 )
7958
7959 ;; Used as part of the expansion of thumb ltu and gtu sequences
7960 (define_insn "cstoresi_nltu_thumb1"
7961 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7962 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7963 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7964 "TARGET_THUMB1"
7965 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7966 [(set_attr "length" "4")]
7967 )
7968
7969 (define_insn_and_split "cstoresi_ltu_thumb1"
7970 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7971 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7972 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7973 "TARGET_THUMB1"
7974 "#"
7975 "TARGET_THUMB1"
7976 [(set (match_dup 3)
7977 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7978 (set (match_dup 0) (neg:SI (match_dup 3)))]
7979 "operands[3] = gen_reg_rtx (SImode);"
7980 [(set_attr "length" "4")]
7981 )
7982
7983 ;; Used as part of the expansion of thumb les sequence.
7984 (define_insn "thumb1_addsi3_addgeu"
7985 [(set (match_operand:SI 0 "s_register_operand" "=l")
7986 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7987 (match_operand:SI 2 "s_register_operand" "l"))
7988 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7989 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7990 "TARGET_THUMB1"
7991 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7992 [(set_attr "length" "4")]
7993 )
7994
7995 \f
7996 ;; Conditional move insns
7997
7998 (define_expand "movsicc"
7999 [(set (match_operand:SI 0 "s_register_operand" "")
8000 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8001 (match_operand:SI 2 "arm_not_operand" "")
8002 (match_operand:SI 3 "arm_not_operand" "")))]
8003 "TARGET_32BIT"
8004 "
8005 {
8006 enum rtx_code code = GET_CODE (operands[1]);
8007 rtx ccreg;
8008
8009 if (code == UNEQ || code == LTGT)
8010 FAIL;
8011
8012 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8013 XEXP (operands[1], 1));
8014 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8015 }"
8016 )
8017
8018 (define_expand "movsfcc"
8019 [(set (match_operand:SF 0 "s_register_operand" "")
8020 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8021 (match_operand:SF 2 "s_register_operand" "")
8022 (match_operand:SF 3 "nonmemory_operand" "")))]
8023 "TARGET_32BIT && TARGET_HARD_FLOAT"
8024 "
8025 {
8026 enum rtx_code code = GET_CODE (operands[1]);
8027 rtx ccreg;
8028
8029 if (code == UNEQ || code == LTGT)
8030 FAIL;
8031
8032 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8033 Otherwise, ensure it is a valid FP add operand */
8034 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8035 || (!arm_float_add_operand (operands[3], SFmode)))
8036 operands[3] = force_reg (SFmode, operands[3]);
8037
8038 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8039 XEXP (operands[1], 1));
8040 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8041 }"
8042 )
8043
8044 (define_expand "movdfcc"
8045 [(set (match_operand:DF 0 "s_register_operand" "")
8046 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8047 (match_operand:DF 2 "s_register_operand" "")
8048 (match_operand:DF 3 "arm_float_add_operand" "")))]
8049 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8050 "
8051 {
8052 enum rtx_code code = GET_CODE (operands[1]);
8053 rtx ccreg;
8054
8055 if (code == UNEQ || code == LTGT)
8056 FAIL;
8057
8058 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8059 XEXP (operands[1], 1));
8060 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8061 }"
8062 )
8063
8064 (define_insn "*movsicc_insn"
8065 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8066 (if_then_else:SI
8067 (match_operator 3 "arm_comparison_operator"
8068 [(match_operand 4 "cc_register" "") (const_int 0)])
8069 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8070 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8071 "TARGET_ARM"
8072 "@
8073 mov%D3\\t%0, %2
8074 mvn%D3\\t%0, #%B2
8075 mov%d3\\t%0, %1
8076 mvn%d3\\t%0, #%B1
8077 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8078 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8079 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8080 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8081 [(set_attr "length" "4,4,4,4,8,8,8,8")
8082 (set_attr "conds" "use")
8083 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
8084 )
8085
8086 (define_insn "*movsfcc_soft_insn"
8087 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8088 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8089 [(match_operand 4 "cc_register" "") (const_int 0)])
8090 (match_operand:SF 1 "s_register_operand" "0,r")
8091 (match_operand:SF 2 "s_register_operand" "r,0")))]
8092 "TARGET_ARM && TARGET_SOFT_FLOAT"
8093 "@
8094 mov%D3\\t%0, %2
8095 mov%d3\\t%0, %1"
8096 [(set_attr "conds" "use")
8097 (set_attr "insn" "mov")]
8098 )
8099
8100 \f
8101 ;; Jump and linkage insns
8102
8103 (define_expand "jump"
8104 [(set (pc)
8105 (label_ref (match_operand 0 "" "")))]
8106 "TARGET_EITHER"
8107 ""
8108 )
8109
8110 (define_insn "*arm_jump"
8111 [(set (pc)
8112 (label_ref (match_operand 0 "" "")))]
8113 "TARGET_32BIT"
8114 "*
8115 {
8116 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8117 {
8118 arm_ccfsm_state += 2;
8119 return \"\";
8120 }
8121 return \"b%?\\t%l0\";
8122 }
8123 "
8124 [(set_attr "predicable" "yes")
8125 (set (attr "length")
8126 (if_then_else
8127 (and (match_test "TARGET_THUMB2")
8128 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8129 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8130 (const_int 2)
8131 (const_int 4)))]
8132 )
8133
8134 (define_insn "*thumb_jump"
8135 [(set (pc)
8136 (label_ref (match_operand 0 "" "")))]
8137 "TARGET_THUMB1"
8138 "*
8139 if (get_attr_length (insn) == 2)
8140 return \"b\\t%l0\";
8141 return \"bl\\t%l0\\t%@ far jump\";
8142 "
8143 [(set (attr "far_jump")
8144 (if_then_else
8145 (eq_attr "length" "4")
8146 (const_string "yes")
8147 (const_string "no")))
8148 (set (attr "length")
8149 (if_then_else
8150 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8151 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8152 (const_int 2)
8153 (const_int 4)))]
8154 )
8155
8156 (define_expand "call"
8157 [(parallel [(call (match_operand 0 "memory_operand" "")
8158 (match_operand 1 "general_operand" ""))
8159 (use (match_operand 2 "" ""))
8160 (clobber (reg:SI LR_REGNUM))])]
8161 "TARGET_EITHER"
8162 "
8163 {
8164 rtx callee, pat;
8165
8166 /* In an untyped call, we can get NULL for operand 2. */
8167 if (operands[2] == NULL_RTX)
8168 operands[2] = const0_rtx;
8169
8170 /* Decide if we should generate indirect calls by loading the
8171 32-bit address of the callee into a register before performing the
8172 branch and link. */
8173 callee = XEXP (operands[0], 0);
8174 if (GET_CODE (callee) == SYMBOL_REF
8175 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8176 : !REG_P (callee))
8177 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8178
8179 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8180 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8181 DONE;
8182 }"
8183 )
8184
8185 (define_expand "call_internal"
8186 [(parallel [(call (match_operand 0 "memory_operand" "")
8187 (match_operand 1 "general_operand" ""))
8188 (use (match_operand 2 "" ""))
8189 (clobber (reg:SI LR_REGNUM))])])
8190
8191 (define_insn "*call_reg_armv5"
8192 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8193 (match_operand 1 "" ""))
8194 (use (match_operand 2 "" ""))
8195 (clobber (reg:SI LR_REGNUM))]
8196 "TARGET_ARM && arm_arch5"
8197 "blx%?\\t%0"
8198 [(set_attr "type" "call")]
8199 )
8200
8201 (define_insn "*call_reg_arm"
8202 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8203 (match_operand 1 "" ""))
8204 (use (match_operand 2 "" ""))
8205 (clobber (reg:SI LR_REGNUM))]
8206 "TARGET_ARM && !arm_arch5"
8207 "*
8208 return output_call (operands);
8209 "
8210 ;; length is worst case, normally it is only two
8211 [(set_attr "length" "12")
8212 (set_attr "type" "call")]
8213 )
8214
8215
8216 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8217 ;; considered a function call by the branch predictor of some cores (PR40887).
8218 ;; Falls back to blx rN (*call_reg_armv5).
8219
8220 (define_insn "*call_mem"
8221 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8222 (match_operand 1 "" ""))
8223 (use (match_operand 2 "" ""))
8224 (clobber (reg:SI LR_REGNUM))]
8225 "TARGET_ARM && !arm_arch5"
8226 "*
8227 return output_call_mem (operands);
8228 "
8229 [(set_attr "length" "12")
8230 (set_attr "type" "call")]
8231 )
8232
8233 (define_insn "*call_reg_thumb1_v5"
8234 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8235 (match_operand 1 "" ""))
8236 (use (match_operand 2 "" ""))
8237 (clobber (reg:SI LR_REGNUM))]
8238 "TARGET_THUMB1 && arm_arch5"
8239 "blx\\t%0"
8240 [(set_attr "length" "2")
8241 (set_attr "type" "call")]
8242 )
8243
8244 (define_insn "*call_reg_thumb1"
8245 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8246 (match_operand 1 "" ""))
8247 (use (match_operand 2 "" ""))
8248 (clobber (reg:SI LR_REGNUM))]
8249 "TARGET_THUMB1 && !arm_arch5"
8250 "*
8251 {
8252 if (!TARGET_CALLER_INTERWORKING)
8253 return thumb_call_via_reg (operands[0]);
8254 else if (operands[1] == const0_rtx)
8255 return \"bl\\t%__interwork_call_via_%0\";
8256 else if (frame_pointer_needed)
8257 return \"bl\\t%__interwork_r7_call_via_%0\";
8258 else
8259 return \"bl\\t%__interwork_r11_call_via_%0\";
8260 }"
8261 [(set_attr "type" "call")]
8262 )
8263
8264 (define_expand "call_value"
8265 [(parallel [(set (match_operand 0 "" "")
8266 (call (match_operand 1 "memory_operand" "")
8267 (match_operand 2 "general_operand" "")))
8268 (use (match_operand 3 "" ""))
8269 (clobber (reg:SI LR_REGNUM))])]
8270 "TARGET_EITHER"
8271 "
8272 {
8273 rtx pat, callee;
8274
8275 /* In an untyped call, we can get NULL for operand 2. */
8276 if (operands[3] == 0)
8277 operands[3] = const0_rtx;
8278
8279 /* Decide if we should generate indirect calls by loading the
8280 32-bit address of the callee into a register before performing the
8281 branch and link. */
8282 callee = XEXP (operands[1], 0);
8283 if (GET_CODE (callee) == SYMBOL_REF
8284 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8285 : !REG_P (callee))
8286 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8287
8288 pat = gen_call_value_internal (operands[0], operands[1],
8289 operands[2], operands[3]);
8290 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8291 DONE;
8292 }"
8293 )
8294
8295 (define_expand "call_value_internal"
8296 [(parallel [(set (match_operand 0 "" "")
8297 (call (match_operand 1 "memory_operand" "")
8298 (match_operand 2 "general_operand" "")))
8299 (use (match_operand 3 "" ""))
8300 (clobber (reg:SI LR_REGNUM))])])
8301
8302 (define_insn "*call_value_reg_armv5"
8303 [(set (match_operand 0 "" "")
8304 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8305 (match_operand 2 "" "")))
8306 (use (match_operand 3 "" ""))
8307 (clobber (reg:SI LR_REGNUM))]
8308 "TARGET_ARM && arm_arch5"
8309 "blx%?\\t%1"
8310 [(set_attr "type" "call")]
8311 )
8312
8313 (define_insn "*call_value_reg_arm"
8314 [(set (match_operand 0 "" "")
8315 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8316 (match_operand 2 "" "")))
8317 (use (match_operand 3 "" ""))
8318 (clobber (reg:SI LR_REGNUM))]
8319 "TARGET_ARM && !arm_arch5"
8320 "*
8321 return output_call (&operands[1]);
8322 "
8323 [(set_attr "length" "12")
8324 (set_attr "type" "call")]
8325 )
8326
8327 ;; Note: see *call_mem
8328
8329 (define_insn "*call_value_mem"
8330 [(set (match_operand 0 "" "")
8331 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8332 (match_operand 2 "" "")))
8333 (use (match_operand 3 "" ""))
8334 (clobber (reg:SI LR_REGNUM))]
8335 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8336 "*
8337 return output_call_mem (&operands[1]);
8338 "
8339 [(set_attr "length" "12")
8340 (set_attr "type" "call")]
8341 )
8342
8343 (define_insn "*call_value_reg_thumb1_v5"
8344 [(set (match_operand 0 "" "")
8345 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8346 (match_operand 2 "" "")))
8347 (use (match_operand 3 "" ""))
8348 (clobber (reg:SI LR_REGNUM))]
8349 "TARGET_THUMB1 && arm_arch5"
8350 "blx\\t%1"
8351 [(set_attr "length" "2")
8352 (set_attr "type" "call")]
8353 )
8354
8355 (define_insn "*call_value_reg_thumb1"
8356 [(set (match_operand 0 "" "")
8357 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8358 (match_operand 2 "" "")))
8359 (use (match_operand 3 "" ""))
8360 (clobber (reg:SI LR_REGNUM))]
8361 "TARGET_THUMB1 && !arm_arch5"
8362 "*
8363 {
8364 if (!TARGET_CALLER_INTERWORKING)
8365 return thumb_call_via_reg (operands[1]);
8366 else if (operands[2] == const0_rtx)
8367 return \"bl\\t%__interwork_call_via_%1\";
8368 else if (frame_pointer_needed)
8369 return \"bl\\t%__interwork_r7_call_via_%1\";
8370 else
8371 return \"bl\\t%__interwork_r11_call_via_%1\";
8372 }"
8373 [(set_attr "type" "call")]
8374 )
8375
8376 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8377 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8378
8379 (define_insn "*call_symbol"
8380 [(call (mem:SI (match_operand:SI 0 "" ""))
8381 (match_operand 1 "" ""))
8382 (use (match_operand 2 "" ""))
8383 (clobber (reg:SI LR_REGNUM))]
8384 "TARGET_32BIT
8385 && (GET_CODE (operands[0]) == SYMBOL_REF)
8386 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8387 "*
8388 {
8389 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8390 }"
8391 [(set_attr "type" "call")]
8392 )
8393
8394 (define_insn "*call_value_symbol"
8395 [(set (match_operand 0 "" "")
8396 (call (mem:SI (match_operand:SI 1 "" ""))
8397 (match_operand:SI 2 "" "")))
8398 (use (match_operand 3 "" ""))
8399 (clobber (reg:SI LR_REGNUM))]
8400 "TARGET_32BIT
8401 && (GET_CODE (operands[1]) == SYMBOL_REF)
8402 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8403 "*
8404 {
8405 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8406 }"
8407 [(set_attr "type" "call")]
8408 )
8409
8410 (define_insn "*call_insn"
8411 [(call (mem:SI (match_operand:SI 0 "" ""))
8412 (match_operand:SI 1 "" ""))
8413 (use (match_operand 2 "" ""))
8414 (clobber (reg:SI LR_REGNUM))]
8415 "TARGET_THUMB1
8416 && GET_CODE (operands[0]) == SYMBOL_REF
8417 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8418 "bl\\t%a0"
8419 [(set_attr "length" "4")
8420 (set_attr "type" "call")]
8421 )
8422
8423 (define_insn "*call_value_insn"
8424 [(set (match_operand 0 "" "")
8425 (call (mem:SI (match_operand 1 "" ""))
8426 (match_operand 2 "" "")))
8427 (use (match_operand 3 "" ""))
8428 (clobber (reg:SI LR_REGNUM))]
8429 "TARGET_THUMB1
8430 && GET_CODE (operands[1]) == SYMBOL_REF
8431 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8432 "bl\\t%a1"
8433 [(set_attr "length" "4")
8434 (set_attr "type" "call")]
8435 )
8436
8437 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8438 (define_expand "sibcall"
8439 [(parallel [(call (match_operand 0 "memory_operand" "")
8440 (match_operand 1 "general_operand" ""))
8441 (return)
8442 (use (match_operand 2 "" ""))])]
8443 "TARGET_32BIT"
8444 "
8445 {
8446 if (operands[2] == NULL_RTX)
8447 operands[2] = const0_rtx;
8448 }"
8449 )
8450
8451 (define_expand "sibcall_value"
8452 [(parallel [(set (match_operand 0 "" "")
8453 (call (match_operand 1 "memory_operand" "")
8454 (match_operand 2 "general_operand" "")))
8455 (return)
8456 (use (match_operand 3 "" ""))])]
8457 "TARGET_32BIT"
8458 "
8459 {
8460 if (operands[3] == NULL_RTX)
8461 operands[3] = const0_rtx;
8462 }"
8463 )
8464
8465 (define_insn "*sibcall_insn"
8466 [(call (mem:SI (match_operand:SI 0 "" "X"))
8467 (match_operand 1 "" ""))
8468 (return)
8469 (use (match_operand 2 "" ""))]
8470 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8471 "*
8472 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8473 "
8474 [(set_attr "type" "call")]
8475 )
8476
8477 (define_insn "*sibcall_value_insn"
8478 [(set (match_operand 0 "" "")
8479 (call (mem:SI (match_operand:SI 1 "" "X"))
8480 (match_operand 2 "" "")))
8481 (return)
8482 (use (match_operand 3 "" ""))]
8483 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8484 "*
8485 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8486 "
8487 [(set_attr "type" "call")]
8488 )
8489
8490 (define_expand "return"
8491 [(return)]
8492 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8493 "")
8494
8495 ;; Often the return insn will be the same as loading from memory, so set attr
8496 (define_insn "*arm_return"
8497 [(return)]
8498 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8499 "*
8500 {
8501 if (arm_ccfsm_state == 2)
8502 {
8503 arm_ccfsm_state += 2;
8504 return \"\";
8505 }
8506 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8507 }"
8508 [(set_attr "type" "load1")
8509 (set_attr "length" "12")
8510 (set_attr "predicable" "yes")]
8511 )
8512
8513 (define_insn "*cond_return"
8514 [(set (pc)
8515 (if_then_else (match_operator 0 "arm_comparison_operator"
8516 [(match_operand 1 "cc_register" "") (const_int 0)])
8517 (return)
8518 (pc)))]
8519 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8520 "*
8521 {
8522 if (arm_ccfsm_state == 2)
8523 {
8524 arm_ccfsm_state += 2;
8525 return \"\";
8526 }
8527 return output_return_instruction (operands[0], TRUE, FALSE);
8528 }"
8529 [(set_attr "conds" "use")
8530 (set_attr "length" "12")
8531 (set_attr "type" "load1")]
8532 )
8533
8534 (define_insn "*cond_return_inverted"
8535 [(set (pc)
8536 (if_then_else (match_operator 0 "arm_comparison_operator"
8537 [(match_operand 1 "cc_register" "") (const_int 0)])
8538 (pc)
8539 (return)))]
8540 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8541 "*
8542 {
8543 if (arm_ccfsm_state == 2)
8544 {
8545 arm_ccfsm_state += 2;
8546 return \"\";
8547 }
8548 return output_return_instruction (operands[0], TRUE, TRUE);
8549 }"
8550 [(set_attr "conds" "use")
8551 (set_attr "length" "12")
8552 (set_attr "type" "load1")]
8553 )
8554
8555 ;; Generate a sequence of instructions to determine if the processor is
8556 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8557 ;; mask.
8558
8559 (define_expand "return_addr_mask"
8560 [(set (match_dup 1)
8561 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8562 (const_int 0)))
8563 (set (match_operand:SI 0 "s_register_operand" "")
8564 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8565 (const_int -1)
8566 (const_int 67108860)))] ; 0x03fffffc
8567 "TARGET_ARM"
8568 "
8569 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8570 ")
8571
8572 (define_insn "*check_arch2"
8573 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8574 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8575 (const_int 0)))]
8576 "TARGET_ARM"
8577 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8578 [(set_attr "length" "8")
8579 (set_attr "conds" "set")]
8580 )
8581
8582 ;; Call subroutine returning any type.
8583
8584 (define_expand "untyped_call"
8585 [(parallel [(call (match_operand 0 "" "")
8586 (const_int 0))
8587 (match_operand 1 "" "")
8588 (match_operand 2 "" "")])]
8589 "TARGET_EITHER"
8590 "
8591 {
8592 int i;
8593 rtx par = gen_rtx_PARALLEL (VOIDmode,
8594 rtvec_alloc (XVECLEN (operands[2], 0)));
8595 rtx addr = gen_reg_rtx (Pmode);
8596 rtx mem;
8597 int size = 0;
8598
8599 emit_move_insn (addr, XEXP (operands[1], 0));
8600 mem = change_address (operands[1], BLKmode, addr);
8601
8602 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8603 {
8604 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8605
8606 /* Default code only uses r0 as a return value, but we could
8607 be using anything up to 4 registers. */
8608 if (REGNO (src) == R0_REGNUM)
8609 src = gen_rtx_REG (TImode, R0_REGNUM);
8610
8611 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8612 GEN_INT (size));
8613 size += GET_MODE_SIZE (GET_MODE (src));
8614 }
8615
8616 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8617 const0_rtx));
8618
8619 size = 0;
8620
8621 for (i = 0; i < XVECLEN (par, 0); i++)
8622 {
8623 HOST_WIDE_INT offset = 0;
8624 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8625
8626 if (size != 0)
8627 emit_move_insn (addr, plus_constant (addr, size));
8628
8629 mem = change_address (mem, GET_MODE (reg), NULL);
8630 if (REGNO (reg) == R0_REGNUM)
8631 {
8632 /* On thumb we have to use a write-back instruction. */
8633 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8634 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8635 size = TARGET_ARM ? 16 : 0;
8636 }
8637 else
8638 {
8639 emit_move_insn (mem, reg);
8640 size = GET_MODE_SIZE (GET_MODE (reg));
8641 }
8642 }
8643
8644 /* The optimizer does not know that the call sets the function value
8645 registers we stored in the result block. We avoid problems by
8646 claiming that all hard registers are used and clobbered at this
8647 point. */
8648 emit_insn (gen_blockage ());
8649
8650 DONE;
8651 }"
8652 )
8653
8654 (define_expand "untyped_return"
8655 [(match_operand:BLK 0 "memory_operand" "")
8656 (match_operand 1 "" "")]
8657 "TARGET_EITHER"
8658 "
8659 {
8660 int i;
8661 rtx addr = gen_reg_rtx (Pmode);
8662 rtx mem;
8663 int size = 0;
8664
8665 emit_move_insn (addr, XEXP (operands[0], 0));
8666 mem = change_address (operands[0], BLKmode, addr);
8667
8668 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8669 {
8670 HOST_WIDE_INT offset = 0;
8671 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8672
8673 if (size != 0)
8674 emit_move_insn (addr, plus_constant (addr, size));
8675
8676 mem = change_address (mem, GET_MODE (reg), NULL);
8677 if (REGNO (reg) == R0_REGNUM)
8678 {
8679 /* On thumb we have to use a write-back instruction. */
8680 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8681 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8682 size = TARGET_ARM ? 16 : 0;
8683 }
8684 else
8685 {
8686 emit_move_insn (reg, mem);
8687 size = GET_MODE_SIZE (GET_MODE (reg));
8688 }
8689 }
8690
8691 /* Emit USE insns before the return. */
8692 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8693 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8694
8695 /* Construct the return. */
8696 expand_naked_return ();
8697
8698 DONE;
8699 }"
8700 )
8701
8702 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8703 ;; all of memory. This blocks insns from being moved across this point.
8704
8705 (define_insn "blockage"
8706 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8707 "TARGET_EITHER"
8708 ""
8709 [(set_attr "length" "0")
8710 (set_attr "type" "block")]
8711 )
8712
8713 (define_expand "casesi"
8714 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8715 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8716 (match_operand:SI 2 "const_int_operand" "") ; total range
8717 (match_operand:SI 3 "" "") ; table label
8718 (match_operand:SI 4 "" "")] ; Out of range label
8719 "TARGET_32BIT || optimize_size || flag_pic"
8720 "
8721 {
8722 enum insn_code code;
8723 if (operands[1] != const0_rtx)
8724 {
8725 rtx reg = gen_reg_rtx (SImode);
8726
8727 emit_insn (gen_addsi3 (reg, operands[0],
8728 gen_int_mode (-INTVAL (operands[1]),
8729 SImode)));
8730 operands[0] = reg;
8731 }
8732
8733 if (TARGET_ARM)
8734 code = CODE_FOR_arm_casesi_internal;
8735 else if (TARGET_THUMB1)
8736 code = CODE_FOR_thumb1_casesi_internal_pic;
8737 else if (flag_pic)
8738 code = CODE_FOR_thumb2_casesi_internal_pic;
8739 else
8740 code = CODE_FOR_thumb2_casesi_internal;
8741
8742 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8743 operands[2] = force_reg (SImode, operands[2]);
8744
8745 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8746 operands[3], operands[4]));
8747 DONE;
8748 }"
8749 )
8750
8751 ;; The USE in this pattern is needed to tell flow analysis that this is
8752 ;; a CASESI insn. It has no other purpose.
8753 (define_insn "arm_casesi_internal"
8754 [(parallel [(set (pc)
8755 (if_then_else
8756 (leu (match_operand:SI 0 "s_register_operand" "r")
8757 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8758 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8759 (label_ref (match_operand 2 "" ""))))
8760 (label_ref (match_operand 3 "" ""))))
8761 (clobber (reg:CC CC_REGNUM))
8762 (use (label_ref (match_dup 2)))])]
8763 "TARGET_ARM"
8764 "*
8765 if (flag_pic)
8766 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8767 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8768 "
8769 [(set_attr "conds" "clob")
8770 (set_attr "length" "12")]
8771 )
8772
8773 (define_expand "thumb1_casesi_internal_pic"
8774 [(match_operand:SI 0 "s_register_operand" "")
8775 (match_operand:SI 1 "thumb1_cmp_operand" "")
8776 (match_operand 2 "" "")
8777 (match_operand 3 "" "")]
8778 "TARGET_THUMB1"
8779 {
8780 rtx reg0;
8781 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8782 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8783 operands[3]));
8784 reg0 = gen_rtx_REG (SImode, 0);
8785 emit_move_insn (reg0, operands[0]);
8786 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8787 DONE;
8788 }
8789 )
8790
8791 (define_insn "thumb1_casesi_dispatch"
8792 [(parallel [(set (pc) (unspec [(reg:SI 0)
8793 (label_ref (match_operand 0 "" ""))
8794 ;; (label_ref (match_operand 1 "" ""))
8795 ]
8796 UNSPEC_THUMB1_CASESI))
8797 (clobber (reg:SI IP_REGNUM))
8798 (clobber (reg:SI LR_REGNUM))])]
8799 "TARGET_THUMB1"
8800 "* return thumb1_output_casesi(operands);"
8801 [(set_attr "length" "4")]
8802 )
8803
8804 (define_expand "indirect_jump"
8805 [(set (pc)
8806 (match_operand:SI 0 "s_register_operand" ""))]
8807 "TARGET_EITHER"
8808 "
8809 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8810 address and use bx. */
8811 if (TARGET_THUMB2)
8812 {
8813 rtx tmp;
8814 tmp = gen_reg_rtx (SImode);
8815 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8816 operands[0] = tmp;
8817 }
8818 "
8819 )
8820
8821 ;; NB Never uses BX.
8822 (define_insn "*arm_indirect_jump"
8823 [(set (pc)
8824 (match_operand:SI 0 "s_register_operand" "r"))]
8825 "TARGET_ARM"
8826 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8827 [(set_attr "predicable" "yes")]
8828 )
8829
8830 (define_insn "*load_indirect_jump"
8831 [(set (pc)
8832 (match_operand:SI 0 "memory_operand" "m"))]
8833 "TARGET_ARM"
8834 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8835 [(set_attr "type" "load1")
8836 (set_attr "pool_range" "4096")
8837 (set_attr "neg_pool_range" "4084")
8838 (set_attr "predicable" "yes")]
8839 )
8840
8841 ;; NB Never uses BX.
8842 (define_insn "*thumb1_indirect_jump"
8843 [(set (pc)
8844 (match_operand:SI 0 "register_operand" "l*r"))]
8845 "TARGET_THUMB1"
8846 "mov\\tpc, %0"
8847 [(set_attr "conds" "clob")
8848 (set_attr "length" "2")]
8849 )
8850
8851 \f
8852 ;; Misc insns
8853
8854 (define_insn "nop"
8855 [(const_int 0)]
8856 "TARGET_EITHER"
8857 "*
8858 if (TARGET_UNIFIED_ASM)
8859 return \"nop\";
8860 if (TARGET_ARM)
8861 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8862 return \"mov\\tr8, r8\";
8863 "
8864 [(set (attr "length")
8865 (if_then_else (eq_attr "is_thumb" "yes")
8866 (const_int 2)
8867 (const_int 4)))]
8868 )
8869
8870 \f
8871 ;; Patterns to allow combination of arithmetic, cond code and shifts
8872
8873 (define_insn "*arith_shiftsi"
8874 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8875 (match_operator:SI 1 "shiftable_operator"
8876 [(match_operator:SI 3 "shift_operator"
8877 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8878 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8879 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8880 "TARGET_32BIT"
8881 "%i1%?\\t%0, %2, %4%S3"
8882 [(set_attr "predicable" "yes")
8883 (set_attr "shift" "4")
8884 (set_attr "arch" "a,t2,t2,a")
8885 ;; Thumb2 doesn't allow the stack pointer to be used for
8886 ;; operand1 for all operations other than add and sub. In this case
8887 ;; the minus operation is a candidate for an rsub and hence needs
8888 ;; to be disabled.
8889 ;; We have to make sure to disable the fourth alternative if
8890 ;; the shift_operator is MULT, since otherwise the insn will
8891 ;; also match a multiply_accumulate pattern and validate_change
8892 ;; will allow a replacement of the constant with a register
8893 ;; despite the checks done in shift_operator.
8894 (set_attr_alternative "insn_enabled"
8895 [(const_string "yes")
8896 (if_then_else
8897 (match_operand:SI 1 "add_operator" "")
8898 (const_string "yes") (const_string "no"))
8899 (const_string "yes")
8900 (if_then_else
8901 (match_operand:SI 3 "mult_operator" "")
8902 (const_string "no") (const_string "yes"))])
8903 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8904
8905 (define_split
8906 [(set (match_operand:SI 0 "s_register_operand" "")
8907 (match_operator:SI 1 "shiftable_operator"
8908 [(match_operator:SI 2 "shiftable_operator"
8909 [(match_operator:SI 3 "shift_operator"
8910 [(match_operand:SI 4 "s_register_operand" "")
8911 (match_operand:SI 5 "reg_or_int_operand" "")])
8912 (match_operand:SI 6 "s_register_operand" "")])
8913 (match_operand:SI 7 "arm_rhs_operand" "")]))
8914 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8915 "TARGET_32BIT"
8916 [(set (match_dup 8)
8917 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8918 (match_dup 6)]))
8919 (set (match_dup 0)
8920 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8921 "")
8922
8923 (define_insn "*arith_shiftsi_compare0"
8924 [(set (reg:CC_NOOV CC_REGNUM)
8925 (compare:CC_NOOV
8926 (match_operator:SI 1 "shiftable_operator"
8927 [(match_operator:SI 3 "shift_operator"
8928 [(match_operand:SI 4 "s_register_operand" "r,r")
8929 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8930 (match_operand:SI 2 "s_register_operand" "r,r")])
8931 (const_int 0)))
8932 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8933 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8934 (match_dup 2)]))]
8935 "TARGET_32BIT"
8936 "%i1%.\\t%0, %2, %4%S3"
8937 [(set_attr "conds" "set")
8938 (set_attr "shift" "4")
8939 (set_attr "arch" "32,a")
8940 (set_attr "type" "alu_shift,alu_shift_reg")])
8941
8942 (define_insn "*arith_shiftsi_compare0_scratch"
8943 [(set (reg:CC_NOOV CC_REGNUM)
8944 (compare:CC_NOOV
8945 (match_operator:SI 1 "shiftable_operator"
8946 [(match_operator:SI 3 "shift_operator"
8947 [(match_operand:SI 4 "s_register_operand" "r,r")
8948 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8949 (match_operand:SI 2 "s_register_operand" "r,r")])
8950 (const_int 0)))
8951 (clobber (match_scratch:SI 0 "=r,r"))]
8952 "TARGET_32BIT"
8953 "%i1%.\\t%0, %2, %4%S3"
8954 [(set_attr "conds" "set")
8955 (set_attr "shift" "4")
8956 (set_attr "arch" "32,a")
8957 (set_attr "type" "alu_shift,alu_shift_reg")])
8958
8959 (define_insn "*sub_shiftsi"
8960 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8961 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8962 (match_operator:SI 2 "shift_operator"
8963 [(match_operand:SI 3 "s_register_operand" "r,r")
8964 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8965 "TARGET_32BIT"
8966 "sub%?\\t%0, %1, %3%S2"
8967 [(set_attr "predicable" "yes")
8968 (set_attr "shift" "3")
8969 (set_attr "arch" "32,a")
8970 (set_attr "type" "alu_shift,alu_shift_reg")])
8971
8972 (define_insn "*sub_shiftsi_compare0"
8973 [(set (reg:CC_NOOV CC_REGNUM)
8974 (compare:CC_NOOV
8975 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8976 (match_operator:SI 2 "shift_operator"
8977 [(match_operand:SI 3 "s_register_operand" "r,r")
8978 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8979 (const_int 0)))
8980 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8981 (minus:SI (match_dup 1)
8982 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8983 "TARGET_32BIT"
8984 "sub%.\\t%0, %1, %3%S2"
8985 [(set_attr "conds" "set")
8986 (set_attr "shift" "3")
8987 (set_attr "arch" "32,a")
8988 (set_attr "type" "alu_shift,alu_shift_reg")])
8989
8990 (define_insn "*sub_shiftsi_compare0_scratch"
8991 [(set (reg:CC_NOOV CC_REGNUM)
8992 (compare:CC_NOOV
8993 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8994 (match_operator:SI 2 "shift_operator"
8995 [(match_operand:SI 3 "s_register_operand" "r,r")
8996 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8997 (const_int 0)))
8998 (clobber (match_scratch:SI 0 "=r,r"))]
8999 "TARGET_32BIT"
9000 "sub%.\\t%0, %1, %3%S2"
9001 [(set_attr "conds" "set")
9002 (set_attr "shift" "3")
9003 (set_attr "arch" "32,a")
9004 (set_attr "type" "alu_shift,alu_shift_reg")])
9005 \f
9006
9007 (define_insn "*and_scc"
9008 [(set (match_operand:SI 0 "s_register_operand" "=r")
9009 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9010 [(match_operand 3 "cc_register" "") (const_int 0)])
9011 (match_operand:SI 2 "s_register_operand" "r")))]
9012 "TARGET_ARM"
9013 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9014 [(set_attr "conds" "use")
9015 (set_attr "insn" "mov")
9016 (set_attr "length" "8")]
9017 )
9018
9019 (define_insn "*ior_scc"
9020 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9021 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9022 [(match_operand 3 "cc_register" "") (const_int 0)])
9023 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9024 "TARGET_ARM"
9025 "@
9026 orr%d2\\t%0, %1, #1
9027 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9028 [(set_attr "conds" "use")
9029 (set_attr "length" "4,8")]
9030 )
9031
9032 ; A series of splitters for the compare_scc pattern below. Note that
9033 ; order is important.
9034 (define_split
9035 [(set (match_operand:SI 0 "s_register_operand" "")
9036 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9037 (const_int 0)))
9038 (clobber (reg:CC CC_REGNUM))]
9039 "TARGET_32BIT && reload_completed"
9040 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9041
9042 (define_split
9043 [(set (match_operand:SI 0 "s_register_operand" "")
9044 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9045 (const_int 0)))
9046 (clobber (reg:CC CC_REGNUM))]
9047 "TARGET_32BIT && reload_completed"
9048 [(set (match_dup 0) (not:SI (match_dup 1)))
9049 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9050
9051 (define_split
9052 [(set (match_operand:SI 0 "s_register_operand" "")
9053 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9054 (const_int 0)))
9055 (clobber (reg:CC CC_REGNUM))]
9056 "TARGET_32BIT && reload_completed"
9057 [(parallel
9058 [(set (reg:CC CC_REGNUM)
9059 (compare:CC (const_int 1) (match_dup 1)))
9060 (set (match_dup 0)
9061 (minus:SI (const_int 1) (match_dup 1)))])
9062 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9063 (set (match_dup 0) (const_int 0)))])
9064
9065 (define_split
9066 [(set (match_operand:SI 0 "s_register_operand" "")
9067 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9068 (match_operand:SI 2 "const_int_operand" "")))
9069 (clobber (reg:CC CC_REGNUM))]
9070 "TARGET_32BIT && reload_completed"
9071 [(parallel
9072 [(set (reg:CC CC_REGNUM)
9073 (compare:CC (match_dup 1) (match_dup 2)))
9074 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9075 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9076 (set (match_dup 0) (const_int 1)))]
9077 {
9078 operands[3] = GEN_INT (-INTVAL (operands[2]));
9079 })
9080
9081 (define_split
9082 [(set (match_operand:SI 0 "s_register_operand" "")
9083 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9084 (match_operand:SI 2 "arm_add_operand" "")))
9085 (clobber (reg:CC CC_REGNUM))]
9086 "TARGET_32BIT && reload_completed"
9087 [(parallel
9088 [(set (reg:CC_NOOV CC_REGNUM)
9089 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9090 (const_int 0)))
9091 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9092 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9093 (set (match_dup 0) (const_int 1)))])
9094
9095 (define_insn_and_split "*compare_scc"
9096 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9097 (match_operator:SI 1 "arm_comparison_operator"
9098 [(match_operand:SI 2 "s_register_operand" "r,r")
9099 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9100 (clobber (reg:CC CC_REGNUM))]
9101 "TARGET_32BIT"
9102 "#"
9103 "&& reload_completed"
9104 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9105 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9106 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9107 {
9108 rtx tmp1;
9109 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9110 operands[2], operands[3]);
9111 enum rtx_code rc = GET_CODE (operands[1]);
9112
9113 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9114
9115 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9116 if (mode == CCFPmode || mode == CCFPEmode)
9117 rc = reverse_condition_maybe_unordered (rc);
9118 else
9119 rc = reverse_condition (rc);
9120 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9121 })
9122
9123 ;; Attempt to improve the sequence generated by the compare_scc splitters
9124 ;; not to use conditional execution.
9125 (define_peephole2
9126 [(set (reg:CC CC_REGNUM)
9127 (compare:CC (match_operand:SI 1 "register_operand" "")
9128 (match_operand:SI 2 "arm_rhs_operand" "")))
9129 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9130 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9131 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9132 (set (match_dup 0) (const_int 1)))
9133 (match_scratch:SI 3 "r")]
9134 "TARGET_32BIT"
9135 [(parallel
9136 [(set (reg:CC CC_REGNUM)
9137 (compare:CC (match_dup 1) (match_dup 2)))
9138 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9139 (parallel
9140 [(set (reg:CC CC_REGNUM)
9141 (compare:CC (const_int 0) (match_dup 3)))
9142 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9143 (parallel
9144 [(set (match_dup 0)
9145 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9146 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9147 (clobber (reg:CC CC_REGNUM))])])
9148
9149 (define_insn "*cond_move"
9150 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9151 (if_then_else:SI (match_operator 3 "equality_operator"
9152 [(match_operator 4 "arm_comparison_operator"
9153 [(match_operand 5 "cc_register" "") (const_int 0)])
9154 (const_int 0)])
9155 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9156 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9157 "TARGET_ARM"
9158 "*
9159 if (GET_CODE (operands[3]) == NE)
9160 {
9161 if (which_alternative != 1)
9162 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9163 if (which_alternative != 0)
9164 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9165 return \"\";
9166 }
9167 if (which_alternative != 0)
9168 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9169 if (which_alternative != 1)
9170 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9171 return \"\";
9172 "
9173 [(set_attr "conds" "use")
9174 (set_attr "insn" "mov")
9175 (set_attr "length" "4,4,8")]
9176 )
9177
9178 (define_insn "*cond_arith"
9179 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9180 (match_operator:SI 5 "shiftable_operator"
9181 [(match_operator:SI 4 "arm_comparison_operator"
9182 [(match_operand:SI 2 "s_register_operand" "r,r")
9183 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9184 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9185 (clobber (reg:CC CC_REGNUM))]
9186 "TARGET_ARM"
9187 "*
9188 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9189 return \"%i5\\t%0, %1, %2, lsr #31\";
9190
9191 output_asm_insn (\"cmp\\t%2, %3\", operands);
9192 if (GET_CODE (operands[5]) == AND)
9193 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9194 else if (GET_CODE (operands[5]) == MINUS)
9195 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9196 else if (which_alternative != 0)
9197 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9198 return \"%i5%d4\\t%0, %1, #1\";
9199 "
9200 [(set_attr "conds" "clob")
9201 (set_attr "length" "12")]
9202 )
9203
9204 (define_insn "*cond_sub"
9205 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9206 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9207 (match_operator:SI 4 "arm_comparison_operator"
9208 [(match_operand:SI 2 "s_register_operand" "r,r")
9209 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9210 (clobber (reg:CC CC_REGNUM))]
9211 "TARGET_ARM"
9212 "*
9213 output_asm_insn (\"cmp\\t%2, %3\", operands);
9214 if (which_alternative != 0)
9215 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9216 return \"sub%d4\\t%0, %1, #1\";
9217 "
9218 [(set_attr "conds" "clob")
9219 (set_attr "length" "8,12")]
9220 )
9221
9222 (define_insn "*cmp_ite0"
9223 [(set (match_operand 6 "dominant_cc_register" "")
9224 (compare
9225 (if_then_else:SI
9226 (match_operator 4 "arm_comparison_operator"
9227 [(match_operand:SI 0 "s_register_operand"
9228 "l,l,l,r,r,r,r,r,r")
9229 (match_operand:SI 1 "arm_add_operand"
9230 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9231 (match_operator:SI 5 "arm_comparison_operator"
9232 [(match_operand:SI 2 "s_register_operand"
9233 "l,r,r,l,l,r,r,r,r")
9234 (match_operand:SI 3 "arm_add_operand"
9235 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9236 (const_int 0))
9237 (const_int 0)))]
9238 "TARGET_32BIT"
9239 "*
9240 {
9241 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9242 {
9243 {\"cmp%d5\\t%0, %1\",
9244 \"cmp%d4\\t%2, %3\"},
9245 {\"cmn%d5\\t%0, #%n1\",
9246 \"cmp%d4\\t%2, %3\"},
9247 {\"cmp%d5\\t%0, %1\",
9248 \"cmn%d4\\t%2, #%n3\"},
9249 {\"cmn%d5\\t%0, #%n1\",
9250 \"cmn%d4\\t%2, #%n3\"}
9251 };
9252 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9253 {
9254 {\"cmp\\t%2, %3\",
9255 \"cmp\\t%0, %1\"},
9256 {\"cmp\\t%2, %3\",
9257 \"cmn\\t%0, #%n1\"},
9258 {\"cmn\\t%2, #%n3\",
9259 \"cmp\\t%0, %1\"},
9260 {\"cmn\\t%2, #%n3\",
9261 \"cmn\\t%0, #%n1\"}
9262 };
9263 static const char * const ite[2] =
9264 {
9265 \"it\\t%d5\",
9266 \"it\\t%d4\"
9267 };
9268 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9269 CMP_CMP, CMN_CMP, CMP_CMP,
9270 CMN_CMP, CMP_CMN, CMN_CMN};
9271 int swap =
9272 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9273
9274 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9275 if (TARGET_THUMB2) {
9276 output_asm_insn (ite[swap], operands);
9277 }
9278 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9279 return \"\";
9280 }"
9281 [(set_attr "conds" "set")
9282 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9283 (set_attr_alternative "length"
9284 [(const_int 6)
9285 (const_int 8)
9286 (const_int 8)
9287 (const_int 8)
9288 (const_int 8)
9289 (if_then_else (eq_attr "is_thumb" "no")
9290 (const_int 8)
9291 (const_int 10))
9292 (if_then_else (eq_attr "is_thumb" "no")
9293 (const_int 8)
9294 (const_int 10))
9295 (if_then_else (eq_attr "is_thumb" "no")
9296 (const_int 8)
9297 (const_int 10))
9298 (if_then_else (eq_attr "is_thumb" "no")
9299 (const_int 8)
9300 (const_int 10))])]
9301 )
9302
9303 (define_insn "*cmp_ite1"
9304 [(set (match_operand 6 "dominant_cc_register" "")
9305 (compare
9306 (if_then_else:SI
9307 (match_operator 4 "arm_comparison_operator"
9308 [(match_operand:SI 0 "s_register_operand"
9309 "l,l,l,r,r,r,r,r,r")
9310 (match_operand:SI 1 "arm_add_operand"
9311 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9312 (match_operator:SI 5 "arm_comparison_operator"
9313 [(match_operand:SI 2 "s_register_operand"
9314 "l,r,r,l,l,r,r,r,r")
9315 (match_operand:SI 3 "arm_add_operand"
9316 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9317 (const_int 1))
9318 (const_int 0)))]
9319 "TARGET_32BIT"
9320 "*
9321 {
9322 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9323 {
9324 {\"cmp\\t%0, %1\",
9325 \"cmp\\t%2, %3\"},
9326 {\"cmn\\t%0, #%n1\",
9327 \"cmp\\t%2, %3\"},
9328 {\"cmp\\t%0, %1\",
9329 \"cmn\\t%2, #%n3\"},
9330 {\"cmn\\t%0, #%n1\",
9331 \"cmn\\t%2, #%n3\"}
9332 };
9333 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9334 {
9335 {\"cmp%d4\\t%2, %3\",
9336 \"cmp%D5\\t%0, %1\"},
9337 {\"cmp%d4\\t%2, %3\",
9338 \"cmn%D5\\t%0, #%n1\"},
9339 {\"cmn%d4\\t%2, #%n3\",
9340 \"cmp%D5\\t%0, %1\"},
9341 {\"cmn%d4\\t%2, #%n3\",
9342 \"cmn%D5\\t%0, #%n1\"}
9343 };
9344 static const char * const ite[2] =
9345 {
9346 \"it\\t%d4\",
9347 \"it\\t%D5\"
9348 };
9349 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9350 CMP_CMP, CMN_CMP, CMP_CMP,
9351 CMN_CMP, CMP_CMN, CMN_CMN};
9352 int swap =
9353 comparison_dominates_p (GET_CODE (operands[5]),
9354 reverse_condition (GET_CODE (operands[4])));
9355
9356 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9357 if (TARGET_THUMB2) {
9358 output_asm_insn (ite[swap], operands);
9359 }
9360 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9361 return \"\";
9362 }"
9363 [(set_attr "conds" "set")
9364 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9365 (set_attr_alternative "length"
9366 [(const_int 6)
9367 (const_int 8)
9368 (const_int 8)
9369 (const_int 8)
9370 (const_int 8)
9371 (if_then_else (eq_attr "is_thumb" "no")
9372 (const_int 8)
9373 (const_int 10))
9374 (if_then_else (eq_attr "is_thumb" "no")
9375 (const_int 8)
9376 (const_int 10))
9377 (if_then_else (eq_attr "is_thumb" "no")
9378 (const_int 8)
9379 (const_int 10))
9380 (if_then_else (eq_attr "is_thumb" "no")
9381 (const_int 8)
9382 (const_int 10))])]
9383 )
9384
9385 (define_insn "*cmp_and"
9386 [(set (match_operand 6 "dominant_cc_register" "")
9387 (compare
9388 (and:SI
9389 (match_operator 4 "arm_comparison_operator"
9390 [(match_operand:SI 0 "s_register_operand"
9391 "l,l,l,r,r,r,r,r,r")
9392 (match_operand:SI 1 "arm_add_operand"
9393 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9394 (match_operator:SI 5 "arm_comparison_operator"
9395 [(match_operand:SI 2 "s_register_operand"
9396 "l,r,r,l,l,r,r,r,r")
9397 (match_operand:SI 3 "arm_add_operand"
9398 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9399 (const_int 0)))]
9400 "TARGET_32BIT"
9401 "*
9402 {
9403 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9404 {
9405 {\"cmp%d5\\t%0, %1\",
9406 \"cmp%d4\\t%2, %3\"},
9407 {\"cmn%d5\\t%0, #%n1\",
9408 \"cmp%d4\\t%2, %3\"},
9409 {\"cmp%d5\\t%0, %1\",
9410 \"cmn%d4\\t%2, #%n3\"},
9411 {\"cmn%d5\\t%0, #%n1\",
9412 \"cmn%d4\\t%2, #%n3\"}
9413 };
9414 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9415 {
9416 {\"cmp\\t%2, %3\",
9417 \"cmp\\t%0, %1\"},
9418 {\"cmp\\t%2, %3\",
9419 \"cmn\\t%0, #%n1\"},
9420 {\"cmn\\t%2, #%n3\",
9421 \"cmp\\t%0, %1\"},
9422 {\"cmn\\t%2, #%n3\",
9423 \"cmn\\t%0, #%n1\"}
9424 };
9425 static const char *const ite[2] =
9426 {
9427 \"it\\t%d5\",
9428 \"it\\t%d4\"
9429 };
9430 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9431 CMP_CMP, CMN_CMP, CMP_CMP,
9432 CMN_CMP, CMP_CMN, CMN_CMN};
9433 int swap =
9434 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9435
9436 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9437 if (TARGET_THUMB2) {
9438 output_asm_insn (ite[swap], operands);
9439 }
9440 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9441 return \"\";
9442 }"
9443 [(set_attr "conds" "set")
9444 (set_attr "predicable" "no")
9445 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9446 (set_attr_alternative "length"
9447 [(const_int 6)
9448 (const_int 8)
9449 (const_int 8)
9450 (const_int 8)
9451 (const_int 8)
9452 (if_then_else (eq_attr "is_thumb" "no")
9453 (const_int 8)
9454 (const_int 10))
9455 (if_then_else (eq_attr "is_thumb" "no")
9456 (const_int 8)
9457 (const_int 10))
9458 (if_then_else (eq_attr "is_thumb" "no")
9459 (const_int 8)
9460 (const_int 10))
9461 (if_then_else (eq_attr "is_thumb" "no")
9462 (const_int 8)
9463 (const_int 10))])]
9464 )
9465
9466 (define_insn "*cmp_ior"
9467 [(set (match_operand 6 "dominant_cc_register" "")
9468 (compare
9469 (ior:SI
9470 (match_operator 4 "arm_comparison_operator"
9471 [(match_operand:SI 0 "s_register_operand"
9472 "l,l,l,r,r,r,r,r,r")
9473 (match_operand:SI 1 "arm_add_operand"
9474 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9475 (match_operator:SI 5 "arm_comparison_operator"
9476 [(match_operand:SI 2 "s_register_operand"
9477 "l,r,r,l,l,r,r,r,r")
9478 (match_operand:SI 3 "arm_add_operand"
9479 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9480 (const_int 0)))]
9481 "TARGET_32BIT"
9482 "*
9483 {
9484 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9485 {
9486 {\"cmp\\t%0, %1\",
9487 \"cmp\\t%2, %3\"},
9488 {\"cmn\\t%0, #%n1\",
9489 \"cmp\\t%2, %3\"},
9490 {\"cmp\\t%0, %1\",
9491 \"cmn\\t%2, #%n3\"},
9492 {\"cmn\\t%0, #%n1\",
9493 \"cmn\\t%2, #%n3\"}
9494 };
9495 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9496 {
9497 {\"cmp%D4\\t%2, %3\",
9498 \"cmp%D5\\t%0, %1\"},
9499 {\"cmp%D4\\t%2, %3\",
9500 \"cmn%D5\\t%0, #%n1\"},
9501 {\"cmn%D4\\t%2, #%n3\",
9502 \"cmp%D5\\t%0, %1\"},
9503 {\"cmn%D4\\t%2, #%n3\",
9504 \"cmn%D5\\t%0, #%n1\"}
9505 };
9506 static const char *const ite[2] =
9507 {
9508 \"it\\t%D4\",
9509 \"it\\t%D5\"
9510 };
9511 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9512 CMP_CMP, CMN_CMP, CMP_CMP,
9513 CMN_CMP, CMP_CMN, CMN_CMN};
9514 int swap =
9515 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9516
9517 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9518 if (TARGET_THUMB2) {
9519 output_asm_insn (ite[swap], operands);
9520 }
9521 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9522 return \"\";
9523 }
9524 "
9525 [(set_attr "conds" "set")
9526 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9527 (set_attr_alternative "length"
9528 [(const_int 6)
9529 (const_int 8)
9530 (const_int 8)
9531 (const_int 8)
9532 (const_int 8)
9533 (if_then_else (eq_attr "is_thumb" "no")
9534 (const_int 8)
9535 (const_int 10))
9536 (if_then_else (eq_attr "is_thumb" "no")
9537 (const_int 8)
9538 (const_int 10))
9539 (if_then_else (eq_attr "is_thumb" "no")
9540 (const_int 8)
9541 (const_int 10))
9542 (if_then_else (eq_attr "is_thumb" "no")
9543 (const_int 8)
9544 (const_int 10))])]
9545 )
9546
9547 (define_insn_and_split "*ior_scc_scc"
9548 [(set (match_operand:SI 0 "s_register_operand" "=r")
9549 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9550 [(match_operand:SI 1 "s_register_operand" "r")
9551 (match_operand:SI 2 "arm_add_operand" "rIL")])
9552 (match_operator:SI 6 "arm_comparison_operator"
9553 [(match_operand:SI 4 "s_register_operand" "r")
9554 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9555 (clobber (reg:CC CC_REGNUM))]
9556 "TARGET_32BIT
9557 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9558 != CCmode)"
9559 "#"
9560 "TARGET_32BIT && reload_completed"
9561 [(set (match_dup 7)
9562 (compare
9563 (ior:SI
9564 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9565 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9566 (const_int 0)))
9567 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9568 "operands[7]
9569 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9570 DOM_CC_X_OR_Y),
9571 CC_REGNUM);"
9572 [(set_attr "conds" "clob")
9573 (set_attr "length" "16")])
9574
9575 ; If the above pattern is followed by a CMP insn, then the compare is
9576 ; redundant, since we can rework the conditional instruction that follows.
9577 (define_insn_and_split "*ior_scc_scc_cmp"
9578 [(set (match_operand 0 "dominant_cc_register" "")
9579 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9580 [(match_operand:SI 1 "s_register_operand" "r")
9581 (match_operand:SI 2 "arm_add_operand" "rIL")])
9582 (match_operator:SI 6 "arm_comparison_operator"
9583 [(match_operand:SI 4 "s_register_operand" "r")
9584 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9585 (const_int 0)))
9586 (set (match_operand:SI 7 "s_register_operand" "=r")
9587 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9588 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9589 "TARGET_32BIT"
9590 "#"
9591 "TARGET_32BIT && reload_completed"
9592 [(set (match_dup 0)
9593 (compare
9594 (ior:SI
9595 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9596 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9597 (const_int 0)))
9598 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9599 ""
9600 [(set_attr "conds" "set")
9601 (set_attr "length" "16")])
9602
9603 (define_insn_and_split "*and_scc_scc"
9604 [(set (match_operand:SI 0 "s_register_operand" "=r")
9605 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9606 [(match_operand:SI 1 "s_register_operand" "r")
9607 (match_operand:SI 2 "arm_add_operand" "rIL")])
9608 (match_operator:SI 6 "arm_comparison_operator"
9609 [(match_operand:SI 4 "s_register_operand" "r")
9610 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9611 (clobber (reg:CC CC_REGNUM))]
9612 "TARGET_32BIT
9613 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9614 != CCmode)"
9615 "#"
9616 "TARGET_32BIT && reload_completed
9617 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9618 != CCmode)"
9619 [(set (match_dup 7)
9620 (compare
9621 (and:SI
9622 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9623 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9624 (const_int 0)))
9625 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9626 "operands[7]
9627 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9628 DOM_CC_X_AND_Y),
9629 CC_REGNUM);"
9630 [(set_attr "conds" "clob")
9631 (set_attr "length" "16")])
9632
9633 ; If the above pattern is followed by a CMP insn, then the compare is
9634 ; redundant, since we can rework the conditional instruction that follows.
9635 (define_insn_and_split "*and_scc_scc_cmp"
9636 [(set (match_operand 0 "dominant_cc_register" "")
9637 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9638 [(match_operand:SI 1 "s_register_operand" "r")
9639 (match_operand:SI 2 "arm_add_operand" "rIL")])
9640 (match_operator:SI 6 "arm_comparison_operator"
9641 [(match_operand:SI 4 "s_register_operand" "r")
9642 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9643 (const_int 0)))
9644 (set (match_operand:SI 7 "s_register_operand" "=r")
9645 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9646 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9647 "TARGET_32BIT"
9648 "#"
9649 "TARGET_32BIT && reload_completed"
9650 [(set (match_dup 0)
9651 (compare
9652 (and:SI
9653 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9654 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9655 (const_int 0)))
9656 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9657 ""
9658 [(set_attr "conds" "set")
9659 (set_attr "length" "16")])
9660
9661 ;; If there is no dominance in the comparison, then we can still save an
9662 ;; instruction in the AND case, since we can know that the second compare
9663 ;; need only zero the value if false (if true, then the value is already
9664 ;; correct).
9665 (define_insn_and_split "*and_scc_scc_nodom"
9666 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9667 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9668 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9669 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9670 (match_operator:SI 6 "arm_comparison_operator"
9671 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9672 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9673 (clobber (reg:CC CC_REGNUM))]
9674 "TARGET_32BIT
9675 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9676 == CCmode)"
9677 "#"
9678 "TARGET_32BIT && reload_completed"
9679 [(parallel [(set (match_dup 0)
9680 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9681 (clobber (reg:CC CC_REGNUM))])
9682 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9683 (set (match_dup 0)
9684 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9685 (match_dup 0)
9686 (const_int 0)))]
9687 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9688 operands[4], operands[5]),
9689 CC_REGNUM);
9690 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9691 operands[5]);"
9692 [(set_attr "conds" "clob")
9693 (set_attr "length" "20")])
9694
9695 (define_split
9696 [(set (reg:CC_NOOV CC_REGNUM)
9697 (compare:CC_NOOV (ior:SI
9698 (and:SI (match_operand:SI 0 "s_register_operand" "")
9699 (const_int 1))
9700 (match_operator:SI 1 "arm_comparison_operator"
9701 [(match_operand:SI 2 "s_register_operand" "")
9702 (match_operand:SI 3 "arm_add_operand" "")]))
9703 (const_int 0)))
9704 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9705 "TARGET_ARM"
9706 [(set (match_dup 4)
9707 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9708 (match_dup 0)))
9709 (set (reg:CC_NOOV CC_REGNUM)
9710 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9711 (const_int 0)))]
9712 "")
9713
9714 (define_split
9715 [(set (reg:CC_NOOV CC_REGNUM)
9716 (compare:CC_NOOV (ior:SI
9717 (match_operator:SI 1 "arm_comparison_operator"
9718 [(match_operand:SI 2 "s_register_operand" "")
9719 (match_operand:SI 3 "arm_add_operand" "")])
9720 (and:SI (match_operand:SI 0 "s_register_operand" "")
9721 (const_int 1)))
9722 (const_int 0)))
9723 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9724 "TARGET_ARM"
9725 [(set (match_dup 4)
9726 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9727 (match_dup 0)))
9728 (set (reg:CC_NOOV CC_REGNUM)
9729 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9730 (const_int 0)))]
9731 "")
9732 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9733
9734 (define_insn "*negscc"
9735 [(set (match_operand:SI 0 "s_register_operand" "=r")
9736 (neg:SI (match_operator 3 "arm_comparison_operator"
9737 [(match_operand:SI 1 "s_register_operand" "r")
9738 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9739 (clobber (reg:CC CC_REGNUM))]
9740 "TARGET_ARM"
9741 "*
9742 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9743 return \"mov\\t%0, %1, asr #31\";
9744
9745 if (GET_CODE (operands[3]) == NE)
9746 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9747
9748 output_asm_insn (\"cmp\\t%1, %2\", operands);
9749 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9750 return \"mvn%d3\\t%0, #0\";
9751 "
9752 [(set_attr "conds" "clob")
9753 (set_attr "length" "12")]
9754 )
9755
9756 (define_insn "movcond"
9757 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9758 (if_then_else:SI
9759 (match_operator 5 "arm_comparison_operator"
9760 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9761 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9762 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9763 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9764 (clobber (reg:CC CC_REGNUM))]
9765 "TARGET_ARM"
9766 "*
9767 if (GET_CODE (operands[5]) == LT
9768 && (operands[4] == const0_rtx))
9769 {
9770 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9771 {
9772 if (operands[2] == const0_rtx)
9773 return \"and\\t%0, %1, %3, asr #31\";
9774 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9775 }
9776 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9777 {
9778 if (operands[1] == const0_rtx)
9779 return \"bic\\t%0, %2, %3, asr #31\";
9780 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9781 }
9782 /* The only case that falls through to here is when both ops 1 & 2
9783 are constants. */
9784 }
9785
9786 if (GET_CODE (operands[5]) == GE
9787 && (operands[4] == const0_rtx))
9788 {
9789 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9790 {
9791 if (operands[2] == const0_rtx)
9792 return \"bic\\t%0, %1, %3, asr #31\";
9793 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9794 }
9795 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9796 {
9797 if (operands[1] == const0_rtx)
9798 return \"and\\t%0, %2, %3, asr #31\";
9799 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9800 }
9801 /* The only case that falls through to here is when both ops 1 & 2
9802 are constants. */
9803 }
9804 if (GET_CODE (operands[4]) == CONST_INT
9805 && !const_ok_for_arm (INTVAL (operands[4])))
9806 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9807 else
9808 output_asm_insn (\"cmp\\t%3, %4\", operands);
9809 if (which_alternative != 0)
9810 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9811 if (which_alternative != 1)
9812 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9813 return \"\";
9814 "
9815 [(set_attr "conds" "clob")
9816 (set_attr "length" "8,8,12")]
9817 )
9818
9819 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9820
9821 (define_insn "*ifcompare_plus_move"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9823 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9824 [(match_operand:SI 4 "s_register_operand" "r,r")
9825 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9826 (plus:SI
9827 (match_operand:SI 2 "s_register_operand" "r,r")
9828 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9829 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9830 (clobber (reg:CC CC_REGNUM))]
9831 "TARGET_ARM"
9832 "#"
9833 [(set_attr "conds" "clob")
9834 (set_attr "length" "8,12")]
9835 )
9836
9837 (define_insn "*if_plus_move"
9838 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9839 (if_then_else:SI
9840 (match_operator 4 "arm_comparison_operator"
9841 [(match_operand 5 "cc_register" "") (const_int 0)])
9842 (plus:SI
9843 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9844 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9845 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9846 "TARGET_ARM"
9847 "@
9848 add%d4\\t%0, %2, %3
9849 sub%d4\\t%0, %2, #%n3
9850 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9851 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9852 [(set_attr "conds" "use")
9853 (set_attr "length" "4,4,8,8")
9854 (set_attr "type" "*,*,*,*")]
9855 )
9856
9857 (define_insn "*ifcompare_move_plus"
9858 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9859 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9860 [(match_operand:SI 4 "s_register_operand" "r,r")
9861 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9862 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9863 (plus:SI
9864 (match_operand:SI 2 "s_register_operand" "r,r")
9865 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9866 (clobber (reg:CC CC_REGNUM))]
9867 "TARGET_ARM"
9868 "#"
9869 [(set_attr "conds" "clob")
9870 (set_attr "length" "8,12")]
9871 )
9872
9873 (define_insn "*if_move_plus"
9874 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9875 (if_then_else:SI
9876 (match_operator 4 "arm_comparison_operator"
9877 [(match_operand 5 "cc_register" "") (const_int 0)])
9878 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9879 (plus:SI
9880 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9881 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9882 "TARGET_ARM"
9883 "@
9884 add%D4\\t%0, %2, %3
9885 sub%D4\\t%0, %2, #%n3
9886 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9887 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9888 [(set_attr "conds" "use")
9889 (set_attr "length" "4,4,8,8")
9890 (set_attr "type" "*,*,*,*")]
9891 )
9892
9893 (define_insn "*ifcompare_arith_arith"
9894 [(set (match_operand:SI 0 "s_register_operand" "=r")
9895 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9896 [(match_operand:SI 5 "s_register_operand" "r")
9897 (match_operand:SI 6 "arm_add_operand" "rIL")])
9898 (match_operator:SI 8 "shiftable_operator"
9899 [(match_operand:SI 1 "s_register_operand" "r")
9900 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9901 (match_operator:SI 7 "shiftable_operator"
9902 [(match_operand:SI 3 "s_register_operand" "r")
9903 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9904 (clobber (reg:CC CC_REGNUM))]
9905 "TARGET_ARM"
9906 "#"
9907 [(set_attr "conds" "clob")
9908 (set_attr "length" "12")]
9909 )
9910
9911 (define_insn "*if_arith_arith"
9912 [(set (match_operand:SI 0 "s_register_operand" "=r")
9913 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9914 [(match_operand 8 "cc_register" "") (const_int 0)])
9915 (match_operator:SI 6 "shiftable_operator"
9916 [(match_operand:SI 1 "s_register_operand" "r")
9917 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9918 (match_operator:SI 7 "shiftable_operator"
9919 [(match_operand:SI 3 "s_register_operand" "r")
9920 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9921 "TARGET_ARM"
9922 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9923 [(set_attr "conds" "use")
9924 (set_attr "length" "8")]
9925 )
9926
9927 (define_insn "*ifcompare_arith_move"
9928 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9929 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9930 [(match_operand:SI 2 "s_register_operand" "r,r")
9931 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9932 (match_operator:SI 7 "shiftable_operator"
9933 [(match_operand:SI 4 "s_register_operand" "r,r")
9934 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9935 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9936 (clobber (reg:CC CC_REGNUM))]
9937 "TARGET_ARM"
9938 "*
9939 /* If we have an operation where (op x 0) is the identity operation and
9940 the conditional operator is LT or GE and we are comparing against zero and
9941 everything is in registers then we can do this in two instructions. */
9942 if (operands[3] == const0_rtx
9943 && GET_CODE (operands[7]) != AND
9944 && GET_CODE (operands[5]) == REG
9945 && GET_CODE (operands[1]) == REG
9946 && REGNO (operands[1]) == REGNO (operands[4])
9947 && REGNO (operands[4]) != REGNO (operands[0]))
9948 {
9949 if (GET_CODE (operands[6]) == LT)
9950 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9951 else if (GET_CODE (operands[6]) == GE)
9952 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9953 }
9954 if (GET_CODE (operands[3]) == CONST_INT
9955 && !const_ok_for_arm (INTVAL (operands[3])))
9956 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9957 else
9958 output_asm_insn (\"cmp\\t%2, %3\", operands);
9959 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9960 if (which_alternative != 0)
9961 return \"mov%D6\\t%0, %1\";
9962 return \"\";
9963 "
9964 [(set_attr "conds" "clob")
9965 (set_attr "length" "8,12")]
9966 )
9967
9968 (define_insn "*if_arith_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9970 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9971 [(match_operand 6 "cc_register" "") (const_int 0)])
9972 (match_operator:SI 5 "shiftable_operator"
9973 [(match_operand:SI 2 "s_register_operand" "r,r")
9974 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9975 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9976 "TARGET_ARM"
9977 "@
9978 %I5%d4\\t%0, %2, %3
9979 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9980 [(set_attr "conds" "use")
9981 (set_attr "length" "4,8")
9982 (set_attr "type" "*,*")]
9983 )
9984
9985 (define_insn "*ifcompare_move_arith"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9987 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9988 [(match_operand:SI 4 "s_register_operand" "r,r")
9989 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9990 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9991 (match_operator:SI 7 "shiftable_operator"
9992 [(match_operand:SI 2 "s_register_operand" "r,r")
9993 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9994 (clobber (reg:CC CC_REGNUM))]
9995 "TARGET_ARM"
9996 "*
9997 /* If we have an operation where (op x 0) is the identity operation and
9998 the conditional operator is LT or GE and we are comparing against zero and
9999 everything is in registers then we can do this in two instructions */
10000 if (operands[5] == const0_rtx
10001 && GET_CODE (operands[7]) != AND
10002 && GET_CODE (operands[3]) == REG
10003 && GET_CODE (operands[1]) == REG
10004 && REGNO (operands[1]) == REGNO (operands[2])
10005 && REGNO (operands[2]) != REGNO (operands[0]))
10006 {
10007 if (GET_CODE (operands[6]) == GE)
10008 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10009 else if (GET_CODE (operands[6]) == LT)
10010 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10011 }
10012
10013 if (GET_CODE (operands[5]) == CONST_INT
10014 && !const_ok_for_arm (INTVAL (operands[5])))
10015 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10016 else
10017 output_asm_insn (\"cmp\\t%4, %5\", operands);
10018
10019 if (which_alternative != 0)
10020 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10021 return \"%I7%D6\\t%0, %2, %3\";
10022 "
10023 [(set_attr "conds" "clob")
10024 (set_attr "length" "8,12")]
10025 )
10026
10027 (define_insn "*if_move_arith"
10028 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10029 (if_then_else:SI
10030 (match_operator 4 "arm_comparison_operator"
10031 [(match_operand 6 "cc_register" "") (const_int 0)])
10032 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10033 (match_operator:SI 5 "shiftable_operator"
10034 [(match_operand:SI 2 "s_register_operand" "r,r")
10035 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10036 "TARGET_ARM"
10037 "@
10038 %I5%D4\\t%0, %2, %3
10039 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10040 [(set_attr "conds" "use")
10041 (set_attr "length" "4,8")
10042 (set_attr "type" "*,*")]
10043 )
10044
10045 (define_insn "*ifcompare_move_not"
10046 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10047 (if_then_else:SI
10048 (match_operator 5 "arm_comparison_operator"
10049 [(match_operand:SI 3 "s_register_operand" "r,r")
10050 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10051 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10052 (not:SI
10053 (match_operand:SI 2 "s_register_operand" "r,r"))))
10054 (clobber (reg:CC CC_REGNUM))]
10055 "TARGET_ARM"
10056 "#"
10057 [(set_attr "conds" "clob")
10058 (set_attr "length" "8,12")]
10059 )
10060
10061 (define_insn "*if_move_not"
10062 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10063 (if_then_else:SI
10064 (match_operator 4 "arm_comparison_operator"
10065 [(match_operand 3 "cc_register" "") (const_int 0)])
10066 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10067 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10068 "TARGET_ARM"
10069 "@
10070 mvn%D4\\t%0, %2
10071 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10072 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10073 [(set_attr "conds" "use")
10074 (set_attr "insn" "mvn")
10075 (set_attr "length" "4,8,8")]
10076 )
10077
10078 (define_insn "*ifcompare_not_move"
10079 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10080 (if_then_else:SI
10081 (match_operator 5 "arm_comparison_operator"
10082 [(match_operand:SI 3 "s_register_operand" "r,r")
10083 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10084 (not:SI
10085 (match_operand:SI 2 "s_register_operand" "r,r"))
10086 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10087 (clobber (reg:CC CC_REGNUM))]
10088 "TARGET_ARM"
10089 "#"
10090 [(set_attr "conds" "clob")
10091 (set_attr "length" "8,12")]
10092 )
10093
10094 (define_insn "*if_not_move"
10095 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10096 (if_then_else:SI
10097 (match_operator 4 "arm_comparison_operator"
10098 [(match_operand 3 "cc_register" "") (const_int 0)])
10099 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10100 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10101 "TARGET_ARM"
10102 "@
10103 mvn%d4\\t%0, %2
10104 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10105 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10106 [(set_attr "conds" "use")
10107 (set_attr "insn" "mvn")
10108 (set_attr "length" "4,8,8")]
10109 )
10110
10111 (define_insn "*ifcompare_shift_move"
10112 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10113 (if_then_else:SI
10114 (match_operator 6 "arm_comparison_operator"
10115 [(match_operand:SI 4 "s_register_operand" "r,r")
10116 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10117 (match_operator:SI 7 "shift_operator"
10118 [(match_operand:SI 2 "s_register_operand" "r,r")
10119 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10120 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10121 (clobber (reg:CC CC_REGNUM))]
10122 "TARGET_ARM"
10123 "#"
10124 [(set_attr "conds" "clob")
10125 (set_attr "length" "8,12")]
10126 )
10127
10128 (define_insn "*if_shift_move"
10129 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10130 (if_then_else:SI
10131 (match_operator 5 "arm_comparison_operator"
10132 [(match_operand 6 "cc_register" "") (const_int 0)])
10133 (match_operator:SI 4 "shift_operator"
10134 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10135 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10136 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10137 "TARGET_ARM"
10138 "@
10139 mov%d5\\t%0, %2%S4
10140 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10141 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10142 [(set_attr "conds" "use")
10143 (set_attr "shift" "2")
10144 (set_attr "length" "4,8,8")
10145 (set_attr "insn" "mov")
10146 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10147 (const_string "alu_shift")
10148 (const_string "alu_shift_reg")))]
10149 )
10150
10151 (define_insn "*ifcompare_move_shift"
10152 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10153 (if_then_else:SI
10154 (match_operator 6 "arm_comparison_operator"
10155 [(match_operand:SI 4 "s_register_operand" "r,r")
10156 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10157 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10158 (match_operator:SI 7 "shift_operator"
10159 [(match_operand:SI 2 "s_register_operand" "r,r")
10160 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10161 (clobber (reg:CC CC_REGNUM))]
10162 "TARGET_ARM"
10163 "#"
10164 [(set_attr "conds" "clob")
10165 (set_attr "length" "8,12")]
10166 )
10167
10168 (define_insn "*if_move_shift"
10169 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10170 (if_then_else:SI
10171 (match_operator 5 "arm_comparison_operator"
10172 [(match_operand 6 "cc_register" "") (const_int 0)])
10173 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10174 (match_operator:SI 4 "shift_operator"
10175 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10176 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10177 "TARGET_ARM"
10178 "@
10179 mov%D5\\t%0, %2%S4
10180 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10181 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10182 [(set_attr "conds" "use")
10183 (set_attr "shift" "2")
10184 (set_attr "length" "4,8,8")
10185 (set_attr "insn" "mov")
10186 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10187 (const_string "alu_shift")
10188 (const_string "alu_shift_reg")))]
10189 )
10190
10191 (define_insn "*ifcompare_shift_shift"
10192 [(set (match_operand:SI 0 "s_register_operand" "=r")
10193 (if_then_else:SI
10194 (match_operator 7 "arm_comparison_operator"
10195 [(match_operand:SI 5 "s_register_operand" "r")
10196 (match_operand:SI 6 "arm_add_operand" "rIL")])
10197 (match_operator:SI 8 "shift_operator"
10198 [(match_operand:SI 1 "s_register_operand" "r")
10199 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10200 (match_operator:SI 9 "shift_operator"
10201 [(match_operand:SI 3 "s_register_operand" "r")
10202 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10203 (clobber (reg:CC CC_REGNUM))]
10204 "TARGET_ARM"
10205 "#"
10206 [(set_attr "conds" "clob")
10207 (set_attr "length" "12")]
10208 )
10209
10210 (define_insn "*if_shift_shift"
10211 [(set (match_operand:SI 0 "s_register_operand" "=r")
10212 (if_then_else:SI
10213 (match_operator 5 "arm_comparison_operator"
10214 [(match_operand 8 "cc_register" "") (const_int 0)])
10215 (match_operator:SI 6 "shift_operator"
10216 [(match_operand:SI 1 "s_register_operand" "r")
10217 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10218 (match_operator:SI 7 "shift_operator"
10219 [(match_operand:SI 3 "s_register_operand" "r")
10220 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10221 "TARGET_ARM"
10222 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10223 [(set_attr "conds" "use")
10224 (set_attr "shift" "1")
10225 (set_attr "length" "8")
10226 (set_attr "insn" "mov")
10227 (set (attr "type") (if_then_else
10228 (and (match_operand 2 "const_int_operand" "")
10229 (match_operand 4 "const_int_operand" ""))
10230 (const_string "alu_shift")
10231 (const_string "alu_shift_reg")))]
10232 )
10233
10234 (define_insn "*ifcompare_not_arith"
10235 [(set (match_operand:SI 0 "s_register_operand" "=r")
10236 (if_then_else:SI
10237 (match_operator 6 "arm_comparison_operator"
10238 [(match_operand:SI 4 "s_register_operand" "r")
10239 (match_operand:SI 5 "arm_add_operand" "rIL")])
10240 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10241 (match_operator:SI 7 "shiftable_operator"
10242 [(match_operand:SI 2 "s_register_operand" "r")
10243 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10244 (clobber (reg:CC CC_REGNUM))]
10245 "TARGET_ARM"
10246 "#"
10247 [(set_attr "conds" "clob")
10248 (set_attr "length" "12")]
10249 )
10250
10251 (define_insn "*if_not_arith"
10252 [(set (match_operand:SI 0 "s_register_operand" "=r")
10253 (if_then_else:SI
10254 (match_operator 5 "arm_comparison_operator"
10255 [(match_operand 4 "cc_register" "") (const_int 0)])
10256 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10257 (match_operator:SI 6 "shiftable_operator"
10258 [(match_operand:SI 2 "s_register_operand" "r")
10259 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10260 "TARGET_ARM"
10261 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10262 [(set_attr "conds" "use")
10263 (set_attr "insn" "mvn")
10264 (set_attr "length" "8")]
10265 )
10266
10267 (define_insn "*ifcompare_arith_not"
10268 [(set (match_operand:SI 0 "s_register_operand" "=r")
10269 (if_then_else:SI
10270 (match_operator 6 "arm_comparison_operator"
10271 [(match_operand:SI 4 "s_register_operand" "r")
10272 (match_operand:SI 5 "arm_add_operand" "rIL")])
10273 (match_operator:SI 7 "shiftable_operator"
10274 [(match_operand:SI 2 "s_register_operand" "r")
10275 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10276 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10277 (clobber (reg:CC CC_REGNUM))]
10278 "TARGET_ARM"
10279 "#"
10280 [(set_attr "conds" "clob")
10281 (set_attr "length" "12")]
10282 )
10283
10284 (define_insn "*if_arith_not"
10285 [(set (match_operand:SI 0 "s_register_operand" "=r")
10286 (if_then_else:SI
10287 (match_operator 5 "arm_comparison_operator"
10288 [(match_operand 4 "cc_register" "") (const_int 0)])
10289 (match_operator:SI 6 "shiftable_operator"
10290 [(match_operand:SI 2 "s_register_operand" "r")
10291 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10292 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10293 "TARGET_ARM"
10294 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10295 [(set_attr "conds" "use")
10296 (set_attr "insn" "mvn")
10297 (set_attr "length" "8")]
10298 )
10299
10300 (define_insn "*ifcompare_neg_move"
10301 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10302 (if_then_else:SI
10303 (match_operator 5 "arm_comparison_operator"
10304 [(match_operand:SI 3 "s_register_operand" "r,r")
10305 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10306 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10307 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10308 (clobber (reg:CC CC_REGNUM))]
10309 "TARGET_ARM"
10310 "#"
10311 [(set_attr "conds" "clob")
10312 (set_attr "length" "8,12")]
10313 )
10314
10315 (define_insn "*if_neg_move"
10316 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10317 (if_then_else:SI
10318 (match_operator 4 "arm_comparison_operator"
10319 [(match_operand 3 "cc_register" "") (const_int 0)])
10320 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10321 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10322 "TARGET_ARM"
10323 "@
10324 rsb%d4\\t%0, %2, #0
10325 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10326 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10327 [(set_attr "conds" "use")
10328 (set_attr "length" "4,8,8")]
10329 )
10330
10331 (define_insn "*ifcompare_move_neg"
10332 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10333 (if_then_else:SI
10334 (match_operator 5 "arm_comparison_operator"
10335 [(match_operand:SI 3 "s_register_operand" "r,r")
10336 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10337 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10338 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10339 (clobber (reg:CC CC_REGNUM))]
10340 "TARGET_ARM"
10341 "#"
10342 [(set_attr "conds" "clob")
10343 (set_attr "length" "8,12")]
10344 )
10345
10346 (define_insn "*if_move_neg"
10347 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10348 (if_then_else:SI
10349 (match_operator 4 "arm_comparison_operator"
10350 [(match_operand 3 "cc_register" "") (const_int 0)])
10351 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10352 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10353 "TARGET_ARM"
10354 "@
10355 rsb%D4\\t%0, %2, #0
10356 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10357 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10358 [(set_attr "conds" "use")
10359 (set_attr "length" "4,8,8")]
10360 )
10361
10362 (define_insn "*arith_adjacentmem"
10363 [(set (match_operand:SI 0 "s_register_operand" "=r")
10364 (match_operator:SI 1 "shiftable_operator"
10365 [(match_operand:SI 2 "memory_operand" "m")
10366 (match_operand:SI 3 "memory_operand" "m")]))
10367 (clobber (match_scratch:SI 4 "=r"))]
10368 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10369 "*
10370 {
10371 rtx ldm[3];
10372 rtx arith[4];
10373 rtx base_reg;
10374 HOST_WIDE_INT val1 = 0, val2 = 0;
10375
10376 if (REGNO (operands[0]) > REGNO (operands[4]))
10377 {
10378 ldm[1] = operands[4];
10379 ldm[2] = operands[0];
10380 }
10381 else
10382 {
10383 ldm[1] = operands[0];
10384 ldm[2] = operands[4];
10385 }
10386
10387 base_reg = XEXP (operands[2], 0);
10388
10389 if (!REG_P (base_reg))
10390 {
10391 val1 = INTVAL (XEXP (base_reg, 1));
10392 base_reg = XEXP (base_reg, 0);
10393 }
10394
10395 if (!REG_P (XEXP (operands[3], 0)))
10396 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10397
10398 arith[0] = operands[0];
10399 arith[3] = operands[1];
10400
10401 if (val1 < val2)
10402 {
10403 arith[1] = ldm[1];
10404 arith[2] = ldm[2];
10405 }
10406 else
10407 {
10408 arith[1] = ldm[2];
10409 arith[2] = ldm[1];
10410 }
10411
10412 ldm[0] = base_reg;
10413 if (val1 !=0 && val2 != 0)
10414 {
10415 rtx ops[3];
10416
10417 if (val1 == 4 || val2 == 4)
10418 /* Other val must be 8, since we know they are adjacent and neither
10419 is zero. */
10420 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10421 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10422 {
10423 ldm[0] = ops[0] = operands[4];
10424 ops[1] = base_reg;
10425 ops[2] = GEN_INT (val1);
10426 output_add_immediate (ops);
10427 if (val1 < val2)
10428 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10429 else
10430 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10431 }
10432 else
10433 {
10434 /* Offset is out of range for a single add, so use two ldr. */
10435 ops[0] = ldm[1];
10436 ops[1] = base_reg;
10437 ops[2] = GEN_INT (val1);
10438 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10439 ops[0] = ldm[2];
10440 ops[2] = GEN_INT (val2);
10441 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10442 }
10443 }
10444 else if (val1 != 0)
10445 {
10446 if (val1 < val2)
10447 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10448 else
10449 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10450 }
10451 else
10452 {
10453 if (val1 < val2)
10454 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10455 else
10456 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10457 }
10458 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10459 return \"\";
10460 }"
10461 [(set_attr "length" "12")
10462 (set_attr "predicable" "yes")
10463 (set_attr "type" "load1")]
10464 )
10465
10466 ; This pattern is never tried by combine, so do it as a peephole
10467
10468 (define_peephole2
10469 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10470 (match_operand:SI 1 "arm_general_register_operand" ""))
10471 (set (reg:CC CC_REGNUM)
10472 (compare:CC (match_dup 1) (const_int 0)))]
10473 "TARGET_ARM"
10474 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10475 (set (match_dup 0) (match_dup 1))])]
10476 ""
10477 )
10478
10479 (define_split
10480 [(set (match_operand:SI 0 "s_register_operand" "")
10481 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10482 (const_int 0))
10483 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10484 [(match_operand:SI 3 "s_register_operand" "")
10485 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10486 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10487 "TARGET_ARM"
10488 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10489 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10490 (match_dup 5)))]
10491 ""
10492 )
10493
10494 ;; This split can be used because CC_Z mode implies that the following
10495 ;; branch will be an equality, or an unsigned inequality, so the sign
10496 ;; extension is not needed.
10497
10498 (define_split
10499 [(set (reg:CC_Z CC_REGNUM)
10500 (compare:CC_Z
10501 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10502 (const_int 24))
10503 (match_operand 1 "const_int_operand" "")))
10504 (clobber (match_scratch:SI 2 ""))]
10505 "TARGET_ARM
10506 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10507 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10508 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10509 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10510 "
10511 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10512 "
10513 )
10514 ;; ??? Check the patterns above for Thumb-2 usefulness
10515
10516 (define_expand "prologue"
10517 [(clobber (const_int 0))]
10518 "TARGET_EITHER"
10519 "if (TARGET_32BIT)
10520 arm_expand_prologue ();
10521 else
10522 thumb1_expand_prologue ();
10523 DONE;
10524 "
10525 )
10526
10527 (define_expand "epilogue"
10528 [(clobber (const_int 0))]
10529 "TARGET_EITHER"
10530 "
10531 if (crtl->calls_eh_return)
10532 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10533 if (TARGET_THUMB1)
10534 thumb1_expand_epilogue ();
10535 else if (USE_RETURN_INSN (FALSE))
10536 {
10537 emit_jump_insn (gen_return ());
10538 DONE;
10539 }
10540 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10541 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10542 DONE;
10543 "
10544 )
10545
10546 (define_insn "prologue_thumb1_interwork"
10547 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10548 "TARGET_THUMB1"
10549 "* return thumb1_output_interwork ();"
10550 [(set_attr "length" "8")]
10551 )
10552
10553 ;; Note - although unspec_volatile's USE all hard registers,
10554 ;; USEs are ignored after relaod has completed. Thus we need
10555 ;; to add an unspec of the link register to ensure that flow
10556 ;; does not think that it is unused by the sibcall branch that
10557 ;; will replace the standard function epilogue.
10558 (define_insn "sibcall_epilogue"
10559 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10560 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10561 "TARGET_32BIT"
10562 "*
10563 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10564 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10565 return arm_output_epilogue (next_nonnote_insn (insn));
10566 "
10567 ;; Length is absolute worst case
10568 [(set_attr "length" "44")
10569 (set_attr "type" "block")
10570 ;; We don't clobber the conditions, but the potential length of this
10571 ;; operation is sufficient to make conditionalizing the sequence
10572 ;; unlikely to be profitable.
10573 (set_attr "conds" "clob")]
10574 )
10575
10576 (define_insn "*epilogue_insns"
10577 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10578 "TARGET_EITHER"
10579 "*
10580 if (TARGET_32BIT)
10581 return arm_output_epilogue (NULL);
10582 else /* TARGET_THUMB1 */
10583 return thumb_unexpanded_epilogue ();
10584 "
10585 ; Length is absolute worst case
10586 [(set_attr "length" "44")
10587 (set_attr "type" "block")
10588 ;; We don't clobber the conditions, but the potential length of this
10589 ;; operation is sufficient to make conditionalizing the sequence
10590 ;; unlikely to be profitable.
10591 (set_attr "conds" "clob")]
10592 )
10593
10594 (define_expand "eh_epilogue"
10595 [(use (match_operand:SI 0 "register_operand" ""))
10596 (use (match_operand:SI 1 "register_operand" ""))
10597 (use (match_operand:SI 2 "register_operand" ""))]
10598 "TARGET_EITHER"
10599 "
10600 {
10601 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10602 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10603 {
10604 rtx ra = gen_rtx_REG (Pmode, 2);
10605
10606 emit_move_insn (ra, operands[2]);
10607 operands[2] = ra;
10608 }
10609 /* This is a hack -- we may have crystalized the function type too
10610 early. */
10611 cfun->machine->func_type = 0;
10612 }"
10613 )
10614
10615 ;; This split is only used during output to reduce the number of patterns
10616 ;; that need assembler instructions adding to them. We allowed the setting
10617 ;; of the conditions to be implicit during rtl generation so that
10618 ;; the conditional compare patterns would work. However this conflicts to
10619 ;; some extent with the conditional data operations, so we have to split them
10620 ;; up again here.
10621
10622 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10623 ;; conditional execution sufficient?
10624
10625 (define_split
10626 [(set (match_operand:SI 0 "s_register_operand" "")
10627 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10628 [(match_operand 2 "" "") (match_operand 3 "" "")])
10629 (match_dup 0)
10630 (match_operand 4 "" "")))
10631 (clobber (reg:CC CC_REGNUM))]
10632 "TARGET_ARM && reload_completed"
10633 [(set (match_dup 5) (match_dup 6))
10634 (cond_exec (match_dup 7)
10635 (set (match_dup 0) (match_dup 4)))]
10636 "
10637 {
10638 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10639 operands[2], operands[3]);
10640 enum rtx_code rc = GET_CODE (operands[1]);
10641
10642 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10643 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10644 if (mode == CCFPmode || mode == CCFPEmode)
10645 rc = reverse_condition_maybe_unordered (rc);
10646 else
10647 rc = reverse_condition (rc);
10648
10649 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10650 }"
10651 )
10652
10653 (define_split
10654 [(set (match_operand:SI 0 "s_register_operand" "")
10655 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10656 [(match_operand 2 "" "") (match_operand 3 "" "")])
10657 (match_operand 4 "" "")
10658 (match_dup 0)))
10659 (clobber (reg:CC CC_REGNUM))]
10660 "TARGET_ARM && reload_completed"
10661 [(set (match_dup 5) (match_dup 6))
10662 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10663 (set (match_dup 0) (match_dup 4)))]
10664 "
10665 {
10666 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10667 operands[2], operands[3]);
10668
10669 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10670 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10671 }"
10672 )
10673
10674 (define_split
10675 [(set (match_operand:SI 0 "s_register_operand" "")
10676 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10677 [(match_operand 2 "" "") (match_operand 3 "" "")])
10678 (match_operand 4 "" "")
10679 (match_operand 5 "" "")))
10680 (clobber (reg:CC CC_REGNUM))]
10681 "TARGET_ARM && reload_completed"
10682 [(set (match_dup 6) (match_dup 7))
10683 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10684 (set (match_dup 0) (match_dup 4)))
10685 (cond_exec (match_dup 8)
10686 (set (match_dup 0) (match_dup 5)))]
10687 "
10688 {
10689 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10690 operands[2], operands[3]);
10691 enum rtx_code rc = GET_CODE (operands[1]);
10692
10693 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10694 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10695 if (mode == CCFPmode || mode == CCFPEmode)
10696 rc = reverse_condition_maybe_unordered (rc);
10697 else
10698 rc = reverse_condition (rc);
10699
10700 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10701 }"
10702 )
10703
10704 (define_split
10705 [(set (match_operand:SI 0 "s_register_operand" "")
10706 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10707 [(match_operand:SI 2 "s_register_operand" "")
10708 (match_operand:SI 3 "arm_add_operand" "")])
10709 (match_operand:SI 4 "arm_rhs_operand" "")
10710 (not:SI
10711 (match_operand:SI 5 "s_register_operand" ""))))
10712 (clobber (reg:CC CC_REGNUM))]
10713 "TARGET_ARM && reload_completed"
10714 [(set (match_dup 6) (match_dup 7))
10715 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10716 (set (match_dup 0) (match_dup 4)))
10717 (cond_exec (match_dup 8)
10718 (set (match_dup 0) (not:SI (match_dup 5))))]
10719 "
10720 {
10721 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10722 operands[2], operands[3]);
10723 enum rtx_code rc = GET_CODE (operands[1]);
10724
10725 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10726 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10727 if (mode == CCFPmode || mode == CCFPEmode)
10728 rc = reverse_condition_maybe_unordered (rc);
10729 else
10730 rc = reverse_condition (rc);
10731
10732 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10733 }"
10734 )
10735
10736 (define_insn "*cond_move_not"
10737 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10738 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10739 [(match_operand 3 "cc_register" "") (const_int 0)])
10740 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10741 (not:SI
10742 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10743 "TARGET_ARM"
10744 "@
10745 mvn%D4\\t%0, %2
10746 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10747 [(set_attr "conds" "use")
10748 (set_attr "insn" "mvn")
10749 (set_attr "length" "4,8")]
10750 )
10751
10752 ;; The next two patterns occur when an AND operation is followed by a
10753 ;; scc insn sequence
10754
10755 (define_insn "*sign_extract_onebit"
10756 [(set (match_operand:SI 0 "s_register_operand" "=r")
10757 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10758 (const_int 1)
10759 (match_operand:SI 2 "const_int_operand" "n")))
10760 (clobber (reg:CC CC_REGNUM))]
10761 "TARGET_ARM"
10762 "*
10763 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10764 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10765 return \"mvnne\\t%0, #0\";
10766 "
10767 [(set_attr "conds" "clob")
10768 (set_attr "length" "8")]
10769 )
10770
10771 (define_insn "*not_signextract_onebit"
10772 [(set (match_operand:SI 0 "s_register_operand" "=r")
10773 (not:SI
10774 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10775 (const_int 1)
10776 (match_operand:SI 2 "const_int_operand" "n"))))
10777 (clobber (reg:CC CC_REGNUM))]
10778 "TARGET_ARM"
10779 "*
10780 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10781 output_asm_insn (\"tst\\t%1, %2\", operands);
10782 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10783 return \"movne\\t%0, #0\";
10784 "
10785 [(set_attr "conds" "clob")
10786 (set_attr "length" "12")]
10787 )
10788 ;; ??? The above patterns need auditing for Thumb-2
10789
10790 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10791 ;; expressions. For simplicity, the first register is also in the unspec
10792 ;; part.
10793 ;; To avoid the usage of GNU extension, the length attribute is computed
10794 ;; in a C function arm_attr_length_push_multi.
10795 (define_insn "*push_multi"
10796 [(match_parallel 2 "multi_register_push"
10797 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10798 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10799 UNSPEC_PUSH_MULT))])]
10800 ""
10801 "*
10802 {
10803 int num_saves = XVECLEN (operands[2], 0);
10804
10805 /* For the StrongARM at least it is faster to
10806 use STR to store only a single register.
10807 In Thumb mode always use push, and the assembler will pick
10808 something appropriate. */
10809 if (num_saves == 1 && TARGET_ARM)
10810 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10811 else
10812 {
10813 int i;
10814 char pattern[100];
10815
10816 if (TARGET_ARM)
10817 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10818 else if (TARGET_THUMB2)
10819 strcpy (pattern, \"push%?\\t{%1\");
10820 else
10821 strcpy (pattern, \"push\\t{%1\");
10822
10823 for (i = 1; i < num_saves; i++)
10824 {
10825 strcat (pattern, \", %|\");
10826 strcat (pattern,
10827 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10828 }
10829
10830 strcat (pattern, \"}\");
10831 output_asm_insn (pattern, operands);
10832 }
10833
10834 return \"\";
10835 }"
10836 [(set_attr "type" "store4")
10837 (set (attr "length")
10838 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10839 )
10840
10841 (define_insn "stack_tie"
10842 [(set (mem:BLK (scratch))
10843 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10844 (match_operand:SI 1 "s_register_operand" "rk")]
10845 UNSPEC_PRLG_STK))]
10846 ""
10847 ""
10848 [(set_attr "length" "0")]
10849 )
10850
10851 ;; Similarly for the floating point registers
10852 (define_insn "*push_fp_multi"
10853 [(match_parallel 2 "multi_register_push"
10854 [(set (match_operand:BLK 0 "memory_operand" "=m")
10855 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10856 UNSPEC_PUSH_MULT))])]
10857 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10858 "*
10859 {
10860 char pattern[100];
10861
10862 sprintf (pattern, \"sfm%%(fd%%)\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10863 output_asm_insn (pattern, operands);
10864 return \"\";
10865 }"
10866 [(set_attr "type" "f_fpa_store")]
10867 )
10868
10869 ;; Special patterns for dealing with the constant pool
10870
10871 (define_insn "align_4"
10872 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10873 "TARGET_EITHER"
10874 "*
10875 assemble_align (32);
10876 return \"\";
10877 "
10878 )
10879
10880 (define_insn "align_8"
10881 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10882 "TARGET_EITHER"
10883 "*
10884 assemble_align (64);
10885 return \"\";
10886 "
10887 )
10888
10889 (define_insn "consttable_end"
10890 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10891 "TARGET_EITHER"
10892 "*
10893 making_const_table = FALSE;
10894 return \"\";
10895 "
10896 )
10897
10898 (define_insn "consttable_1"
10899 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10900 "TARGET_THUMB1"
10901 "*
10902 making_const_table = TRUE;
10903 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10904 assemble_zeros (3);
10905 return \"\";
10906 "
10907 [(set_attr "length" "4")]
10908 )
10909
10910 (define_insn "consttable_2"
10911 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10912 "TARGET_THUMB1"
10913 "*
10914 making_const_table = TRUE;
10915 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10916 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10917 assemble_zeros (2);
10918 return \"\";
10919 "
10920 [(set_attr "length" "4")]
10921 )
10922
10923 (define_insn "consttable_4"
10924 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10925 "TARGET_EITHER"
10926 "*
10927 {
10928 rtx x = operands[0];
10929 making_const_table = TRUE;
10930 switch (GET_MODE_CLASS (GET_MODE (x)))
10931 {
10932 case MODE_FLOAT:
10933 if (GET_MODE (x) == HFmode)
10934 arm_emit_fp16_const (x);
10935 else
10936 {
10937 REAL_VALUE_TYPE r;
10938 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10939 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10940 }
10941 break;
10942 default:
10943 /* XXX: Sometimes gcc does something really dumb and ends up with
10944 a HIGH in a constant pool entry, usually because it's trying to
10945 load into a VFP register. We know this will always be used in
10946 combination with a LO_SUM which ignores the high bits, so just
10947 strip off the HIGH. */
10948 if (GET_CODE (x) == HIGH)
10949 x = XEXP (x, 0);
10950 assemble_integer (x, 4, BITS_PER_WORD, 1);
10951 mark_symbol_refs_as_used (x);
10952 break;
10953 }
10954 return \"\";
10955 }"
10956 [(set_attr "length" "4")]
10957 )
10958
10959 (define_insn "consttable_8"
10960 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10961 "TARGET_EITHER"
10962 "*
10963 {
10964 making_const_table = TRUE;
10965 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10966 {
10967 case MODE_FLOAT:
10968 {
10969 REAL_VALUE_TYPE r;
10970 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10971 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10972 break;
10973 }
10974 default:
10975 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10976 break;
10977 }
10978 return \"\";
10979 }"
10980 [(set_attr "length" "8")]
10981 )
10982
10983 (define_insn "consttable_16"
10984 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10985 "TARGET_EITHER"
10986 "*
10987 {
10988 making_const_table = TRUE;
10989 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10990 {
10991 case MODE_FLOAT:
10992 {
10993 REAL_VALUE_TYPE r;
10994 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10995 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10996 break;
10997 }
10998 default:
10999 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11000 break;
11001 }
11002 return \"\";
11003 }"
11004 [(set_attr "length" "16")]
11005 )
11006
11007 ;; Miscellaneous Thumb patterns
11008
11009 (define_expand "tablejump"
11010 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11011 (use (label_ref (match_operand 1 "" "")))])]
11012 "TARGET_THUMB1"
11013 "
11014 if (flag_pic)
11015 {
11016 /* Hopefully, CSE will eliminate this copy. */
11017 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11018 rtx reg2 = gen_reg_rtx (SImode);
11019
11020 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11021 operands[0] = reg2;
11022 }
11023 "
11024 )
11025
11026 ;; NB never uses BX.
11027 (define_insn "*thumb1_tablejump"
11028 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11029 (use (label_ref (match_operand 1 "" "")))]
11030 "TARGET_THUMB1"
11031 "mov\\t%|pc, %0"
11032 [(set_attr "length" "2")]
11033 )
11034
11035 ;; V5 Instructions,
11036
11037 (define_insn "clzsi2"
11038 [(set (match_operand:SI 0 "s_register_operand" "=r")
11039 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11040 "TARGET_32BIT && arm_arch5"
11041 "clz%?\\t%0, %1"
11042 [(set_attr "predicable" "yes")
11043 (set_attr "insn" "clz")])
11044
11045 (define_insn "rbitsi2"
11046 [(set (match_operand:SI 0 "s_register_operand" "=r")
11047 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11048 "TARGET_32BIT && arm_arch_thumb2"
11049 "rbit%?\\t%0, %1"
11050 [(set_attr "predicable" "yes")
11051 (set_attr "insn" "clz")])
11052
11053 (define_expand "ctzsi2"
11054 [(set (match_operand:SI 0 "s_register_operand" "")
11055 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11056 "TARGET_32BIT && arm_arch_thumb2"
11057 "
11058 {
11059 rtx tmp = gen_reg_rtx (SImode);
11060 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11061 emit_insn (gen_clzsi2 (operands[0], tmp));
11062 }
11063 DONE;
11064 "
11065 )
11066
11067 ;; V5E instructions.
11068
11069 (define_insn "prefetch"
11070 [(prefetch (match_operand:SI 0 "address_operand" "p")
11071 (match_operand:SI 1 "" "")
11072 (match_operand:SI 2 "" ""))]
11073 "TARGET_32BIT && arm_arch5e"
11074 "pld\\t%a0")
11075
11076 ;; General predication pattern
11077
11078 (define_cond_exec
11079 [(match_operator 0 "arm_comparison_operator"
11080 [(match_operand 1 "cc_register" "")
11081 (const_int 0)])]
11082 "TARGET_32BIT"
11083 ""
11084 )
11085
11086 (define_insn "prologue_use"
11087 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11088 ""
11089 "%@ %0 needed for prologue"
11090 [(set_attr "length" "0")]
11091 )
11092
11093
11094 ;; Patterns for exception handling
11095
11096 (define_expand "eh_return"
11097 [(use (match_operand 0 "general_operand" ""))]
11098 "TARGET_EITHER"
11099 "
11100 {
11101 if (TARGET_32BIT)
11102 emit_insn (gen_arm_eh_return (operands[0]));
11103 else
11104 emit_insn (gen_thumb_eh_return (operands[0]));
11105 DONE;
11106 }"
11107 )
11108
11109 ;; We can't expand this before we know where the link register is stored.
11110 (define_insn_and_split "arm_eh_return"
11111 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11112 VUNSPEC_EH_RETURN)
11113 (clobber (match_scratch:SI 1 "=&r"))]
11114 "TARGET_ARM"
11115 "#"
11116 "&& reload_completed"
11117 [(const_int 0)]
11118 "
11119 {
11120 arm_set_return_address (operands[0], operands[1]);
11121 DONE;
11122 }"
11123 )
11124
11125 (define_insn_and_split "thumb_eh_return"
11126 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11127 VUNSPEC_EH_RETURN)
11128 (clobber (match_scratch:SI 1 "=&l"))]
11129 "TARGET_THUMB1"
11130 "#"
11131 "&& reload_completed"
11132 [(const_int 0)]
11133 "
11134 {
11135 thumb_set_return_address (operands[0], operands[1]);
11136 DONE;
11137 }"
11138 )
11139
11140 \f
11141 ;; TLS support
11142
11143 (define_insn "load_tp_hard"
11144 [(set (match_operand:SI 0 "register_operand" "=r")
11145 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11146 "TARGET_HARD_TP"
11147 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11148 [(set_attr "predicable" "yes")]
11149 )
11150
11151 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11152 (define_insn "load_tp_soft"
11153 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11154 (clobber (reg:SI LR_REGNUM))
11155 (clobber (reg:SI IP_REGNUM))
11156 (clobber (reg:CC CC_REGNUM))]
11157 "TARGET_SOFT_TP"
11158 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11159 [(set_attr "conds" "clob")]
11160 )
11161
11162 ;; tls descriptor call
11163 (define_insn "tlscall"
11164 [(set (reg:SI R0_REGNUM)
11165 (unspec:SI [(reg:SI R0_REGNUM)
11166 (match_operand:SI 0 "" "X")
11167 (match_operand 1 "" "")] UNSPEC_TLS))
11168 (clobber (reg:SI R1_REGNUM))
11169 (clobber (reg:SI LR_REGNUM))
11170 (clobber (reg:SI CC_REGNUM))]
11171 "TARGET_GNU2_TLS"
11172 {
11173 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11174 INTVAL (operands[1]));
11175 return "bl\\t%c0(tlscall)";
11176 }
11177 [(set_attr "conds" "clob")
11178 (set_attr "length" "4")]
11179 )
11180
11181 ;;
11182
11183 ;; We only care about the lower 16 bits of the constant
11184 ;; being inserted into the upper 16 bits of the register.
11185 (define_insn "*arm_movtas_ze"
11186 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11187 (const_int 16)
11188 (const_int 16))
11189 (match_operand:SI 1 "const_int_operand" ""))]
11190 "arm_arch_thumb2"
11191 "movt%?\t%0, %L1"
11192 [(set_attr "predicable" "yes")
11193 (set_attr "length" "4")]
11194 )
11195
11196 (define_insn "*arm_rev"
11197 [(set (match_operand:SI 0 "s_register_operand" "=r")
11198 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11199 "TARGET_32BIT && arm_arch6"
11200 "rev%?\t%0, %1"
11201 [(set_attr "predicable" "yes")
11202 (set_attr "length" "4")]
11203 )
11204
11205 (define_insn "*thumb1_rev"
11206 [(set (match_operand:SI 0 "s_register_operand" "=l")
11207 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11208 "TARGET_THUMB1 && arm_arch6"
11209 "rev\t%0, %1"
11210 [(set_attr "length" "2")]
11211 )
11212
11213 (define_expand "arm_legacy_rev"
11214 [(set (match_operand:SI 2 "s_register_operand" "")
11215 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11216 (const_int 16))
11217 (match_dup 1)))
11218 (set (match_dup 2)
11219 (lshiftrt:SI (match_dup 2)
11220 (const_int 8)))
11221 (set (match_operand:SI 3 "s_register_operand" "")
11222 (rotatert:SI (match_dup 1)
11223 (const_int 8)))
11224 (set (match_dup 2)
11225 (and:SI (match_dup 2)
11226 (const_int -65281)))
11227 (set (match_operand:SI 0 "s_register_operand" "")
11228 (xor:SI (match_dup 3)
11229 (match_dup 2)))]
11230 "TARGET_32BIT"
11231 ""
11232 )
11233
11234 ;; Reuse temporaries to keep register pressure down.
11235 (define_expand "thumb_legacy_rev"
11236 [(set (match_operand:SI 2 "s_register_operand" "")
11237 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11238 (const_int 24)))
11239 (set (match_operand:SI 3 "s_register_operand" "")
11240 (lshiftrt:SI (match_dup 1)
11241 (const_int 24)))
11242 (set (match_dup 3)
11243 (ior:SI (match_dup 3)
11244 (match_dup 2)))
11245 (set (match_operand:SI 4 "s_register_operand" "")
11246 (const_int 16))
11247 (set (match_operand:SI 5 "s_register_operand" "")
11248 (rotatert:SI (match_dup 1)
11249 (match_dup 4)))
11250 (set (match_dup 2)
11251 (ashift:SI (match_dup 5)
11252 (const_int 24)))
11253 (set (match_dup 5)
11254 (lshiftrt:SI (match_dup 5)
11255 (const_int 24)))
11256 (set (match_dup 5)
11257 (ior:SI (match_dup 5)
11258 (match_dup 2)))
11259 (set (match_dup 5)
11260 (rotatert:SI (match_dup 5)
11261 (match_dup 4)))
11262 (set (match_operand:SI 0 "s_register_operand" "")
11263 (ior:SI (match_dup 5)
11264 (match_dup 3)))]
11265 "TARGET_THUMB"
11266 ""
11267 )
11268
11269 (define_expand "bswapsi2"
11270 [(set (match_operand:SI 0 "s_register_operand" "=r")
11271 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11272 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11273 "
11274 if (!arm_arch6)
11275 {
11276 rtx op2 = gen_reg_rtx (SImode);
11277 rtx op3 = gen_reg_rtx (SImode);
11278
11279 if (TARGET_THUMB)
11280 {
11281 rtx op4 = gen_reg_rtx (SImode);
11282 rtx op5 = gen_reg_rtx (SImode);
11283
11284 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11285 op2, op3, op4, op5));
11286 }
11287 else
11288 {
11289 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11290 op2, op3));
11291 }
11292
11293 DONE;
11294 }
11295 "
11296 )
11297
11298 ;; Load the load/store multiple patterns
11299 (include "ldmstm.md")
11300 ;; Load the FPA co-processor patterns
11301 (include "fpa.md")
11302 ;; Load the Maverick co-processor patterns
11303 (include "cirrus.md")
11304 ;; Vector bits common to IWMMXT and Neon
11305 (include "vec-common.md")
11306 ;; Load the Intel Wireless Multimedia Extension patterns
11307 (include "iwmmxt.md")
11308 ;; Load the VFP co-processor patterns
11309 (include "vfp.md")
11310 ;; Thumb-2 patterns
11311 (include "thumb2.md")
11312 ;; Neon patterns
11313 (include "neon.md")
11314 ;; Synchronization Primitives
11315 (include "sync.md")
11316 ;; Fixed-point patterns
11317 (include "arm-fixed.md")