40341bd7c038b177389fcefd310ef2948e52ac68
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (R1_REGNUM 1) ; Second CORE register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (CC_REGNUM 24) ; Condition code pseudo register
40 (LAST_ARM_REGNUM 15) ;
41 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
42 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
43 ]
44 )
45 ;; 3rd operand to select_dominance_cc_mode
46 (define_constants
47 [(DOM_CC_X_AND_Y 0)
48 (DOM_CC_NX_OR_Y 1)
49 (DOM_CC_X_OR_Y 2)
50 ]
51 )
52 ;; conditional compare combination
53 (define_constants
54 [(CMP_CMP 0)
55 (CMN_CMP 1)
56 (CMP_CMN 2)
57 (CMN_CMN 3)
58 (NUM_OF_COND_CMP 4)
59 ]
60 )
61
62 ;; UNSPEC Usage:
63 ;; Note: sin and cos are no-longer used.
64 ;; Unspec enumerators for Neon are defined in neon.md.
65
66 (define_c_enum "unspec" [
67 UNSPEC_SIN ; `sin' operation (MODE_FLOAT):
68 ; operand 0 is the result,
69 ; operand 1 the parameter.
70 UNPSEC_COS ; `cos' operation (MODE_FLOAT):
71 ; operand 0 is the result,
72 ; operand 1 the parameter.
73 UNSPEC_PUSH_MULT ; `push multiple' operation:
74 ; operand 0 is the first register,
75 ; subsequent registers are in parallel (use ...)
76 ; expressions.
77 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic
78 ; usage, that is, we will add the pic_register
79 ; value to it before trying to dereference it.
80 UNSPEC_PIC_BASE ; Add PC and all but the last operand together,
81 ; The last operand is the number of a PIC_LABEL
82 ; that points at the containing instruction.
83 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses
84 ; being scheduled before the stack adjustment insn.
85 UNSPEC_PROLOGUE_USE ; As USE insns are not meaningful after reload,
86 ; this unspec is used to prevent the deletion of
87 ; instructions setting registers for EH handling
88 ; and stack frame generation. Operand 0 is the
89 ; register to "use".
90 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode.
91 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
92 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction.
93 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
94 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction.
95 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
96 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction.
97 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction.
98 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
99 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
100 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
101 UNSPEC_WMADDS ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
102 UNSPEC_WMADDU ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
103 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage.
104 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the
105 ; instruction stream.
106 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated
107 ; correctly for PIC usage.
108 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a
109 ; a given symbolic address.
110 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call.
111 UNSPEC_RBIT ; rbit operation.
112 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from
113 ; another symbolic address.
114 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier.
115 ])
116
117 ;; UNSPEC_VOLATILE Usage:
118
119 (define_c_enum "unspecv" [
120 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an
121 ; insn in the code.
122 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the
123 ; instruction epilogue sequence that isn't expanded
124 ; into normal RTL. Used for both normal and sibcall
125 ; epilogues.
126 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap
127 ; modes from arm to thumb.
128 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table
129 ; for inlined constants.
130 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool
131 ; table.
132 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for
133 ; an 8-bit object.
134 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for
135 ; a 16-bit object.
136 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for
137 ; a 32-bit object.
138 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for
139 ; a 64-bit object.
140 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for
141 ; a 128-bit object.
142 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction.
143 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction.
144 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN
145 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions
146 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions
147 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions
148 VUNSPEC_EH_RETURN ; Use to override the return address for exception
149 ; handling.
150 VUNSPEC_SYNC_COMPARE_AND_SWAP ; Represent an atomic compare swap.
151 VUNSPEC_SYNC_LOCK ; Represent a sync_lock_test_and_set.
152 VUNSPEC_SYNC_OP ; Represent a sync_<op>
153 VUNSPEC_SYNC_NEW_OP ; Represent a sync_new_<op>
154 VUNSPEC_SYNC_OLD_OP ; Represent a sync_old_<op>
155 ])
156 \f
157 ;;---------------------------------------------------------------------------
158 ;; Attributes
159
160 ;; Processor type. This is created automatically from arm-cores.def.
161 (include "arm-tune.md")
162
163 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
164 ; generating ARM code. This is used to control the length of some insn
165 ; patterns that share the same RTL in both ARM and Thumb code.
166 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
167
168 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
169 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
170
171 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
172 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
173
174 ;; Operand number of an input operand that is shifted. Zero if the
175 ;; given instruction does not shift one of its input operands.
176 (define_attr "shift" "" (const_int 0))
177
178 ; Floating Point Unit. If we only have floating point emulation, then there
179 ; is no point in scheduling the floating point insns. (Well, for best
180 ; performance we should try and group them together).
181 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
182 (const (symbol_ref "arm_fpu_attr")))
183
184 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
185 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
186 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
187 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
188 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
189 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
190 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
191 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
192 (const_string "none"))
193
194 ; LENGTH of an instruction (in bytes)
195 (define_attr "length" ""
196 (cond [(not (eq_attr "sync_memory" "none"))
197 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
198 ] (const_int 4)))
199
200 ; The architecture which supports the instruction (or alternative).
201 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
202 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
203 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
204 ; arm_arch6. This attribute is used to compute attribute "enabled",
205 ; use type "any" to enable an alternative in all cases.
206 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,onlya8,nota8"
207 (const_string "any"))
208
209 (define_attr "arch_enabled" "no,yes"
210 (cond [(eq_attr "arch" "any")
211 (const_string "yes")
212
213 (and (eq_attr "arch" "a")
214 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
215 (const_string "yes")
216
217 (and (eq_attr "arch" "t")
218 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
219 (const_string "yes")
220
221 (and (eq_attr "arch" "t1")
222 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
223 (const_string "yes")
224
225 (and (eq_attr "arch" "t2")
226 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
227 (const_string "yes")
228
229 (and (eq_attr "arch" "32")
230 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
231 (const_string "yes")
232
233 (and (eq_attr "arch" "v6")
234 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
235 (const_string "yes")
236
237 (and (eq_attr "arch" "nov6")
238 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
239 (const_string "yes")
240
241 (and (eq_attr "arch" "onlya8")
242 (eq_attr "tune" "cortexa8"))
243 (const_string "yes")
244
245 (and (eq_attr "arch" "nota8")
246 (not (eq_attr "tune" "cortexa8")))
247 (const_string "yes")]
248 (const_string "no")))
249
250 ; Allows an insn to disable certain alternatives for reasons other than
251 ; arch support.
252 (define_attr "insn_enabled" "no,yes"
253 (const_string "yes"))
254
255 ; Enable all alternatives that are both arch_enabled and insn_enabled.
256 (define_attr "enabled" "no,yes"
257 (if_then_else (eq_attr "insn_enabled" "yes")
258 (if_then_else (eq_attr "arch_enabled" "yes")
259 (const_string "yes")
260 (const_string "no"))
261 (const_string "no")))
262
263 ; POOL_RANGE is how far away from a constant pool entry that this insn
264 ; can be placed. If the distance is zero, then this insn will never
265 ; reference the pool.
266 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
267 ; before its address.
268 (define_attr "arm_pool_range" "" (const_int 0))
269 (define_attr "thumb2_pool_range" "" (const_int 0))
270 (define_attr "arm_neg_pool_range" "" (const_int 0))
271 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
272
273 (define_attr "pool_range" ""
274 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
275 (attr "arm_pool_range")))
276 (define_attr "neg_pool_range" ""
277 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
278 (attr "arm_neg_pool_range")))
279
280 ; An assembler sequence may clobber the condition codes without us knowing.
281 ; If such an insn references the pool, then we have no way of knowing how,
282 ; so use the most conservative value for pool_range.
283 (define_asm_attributes
284 [(set_attr "conds" "clob")
285 (set_attr "length" "4")
286 (set_attr "pool_range" "250")])
287
288 ;; The instruction used to implement a particular pattern. This
289 ;; information is used by pipeline descriptions to provide accurate
290 ;; scheduling information.
291
292 (define_attr "insn"
293 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
294 (const_string "other"))
295
296 ; TYPE attribute is used to detect floating point instructions which, if
297 ; running on a co-processor can run in parallel with other, basic instructions
298 ; If write-buffer scheduling is enabled then it can also be used in the
299 ; scheduling of writes.
300
301 ; Classification of each insn
302 ; Note: vfp.md has different meanings for some of these, and some further
303 ; types as well. See that file for details.
304 ; alu any alu instruction that doesn't hit memory or fp
305 ; regs or have a shifted source operand
306 ; alu_shift any data instruction that doesn't hit memory or fp
307 ; regs, but has a source operand shifted by a constant
308 ; alu_shift_reg any data instruction that doesn't hit memory or fp
309 ; regs, but has a source operand shifted by a register value
310 ; mult a multiply instruction
311 ; block blockage insn, this blocks all functional units
312 ; float a floating point arithmetic operation (subject to expansion)
313 ; fdivd DFmode floating point division
314 ; fdivs SFmode floating point division
315 ; fmul Floating point multiply
316 ; ffmul Fast floating point multiply
317 ; farith Floating point arithmetic (4 cycle)
318 ; ffarith Fast floating point arithmetic (2 cycle)
319 ; float_em a floating point arithmetic operation that is normally emulated
320 ; even on a machine with an fpa.
321 ; f_fpa_load a floating point load from memory. Only for the FPA.
322 ; f_fpa_store a floating point store to memory. Only for the FPA.
323 ; f_load[sd] A single/double load from memory. Used for VFP unit.
324 ; f_store[sd] A single/double store to memory. Used for VFP unit.
325 ; f_flag a transfer of co-processor flags to the CPSR
326 ; f_mem_r a transfer of a floating point register to a real reg via mem
327 ; r_mem_f the reverse of f_mem_r
328 ; f_2_r fast transfer float to arm (no memory needed)
329 ; r_2_f fast transfer arm to float
330 ; f_cvt convert floating<->integral
331 ; branch a branch
332 ; call a subroutine call
333 ; load_byte load byte(s) from memory to arm registers
334 ; load1 load 1 word from memory to arm registers
335 ; load2 load 2 words from memory to arm registers
336 ; load3 load 3 words from memory to arm registers
337 ; load4 load 4 words from memory to arm registers
338 ; store store 1 word to memory from arm registers
339 ; store2 store 2 words
340 ; store3 store 3 words
341 ; store4 store 4 (or more) words
342 ; Additions for Cirrus Maverick co-processor:
343 ; mav_farith Floating point arithmetic (4 cycle)
344 ; mav_dmult Double multiplies (7 cycle)
345 ;
346
347 (define_attr "type"
348 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_fpa_load,f_fpa_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
349 (if_then_else
350 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
351 (const_string "mult")
352 (const_string "alu")))
353
354 ; Load scheduling, set from the arm_ld_sched variable
355 ; initialized by arm_option_override()
356 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
357
358 ;; Classification of NEON instructions for scheduling purposes.
359 ;; Do not set this attribute and the "type" attribute together in
360 ;; any one instruction pattern.
361 (define_attr "neon_type"
362 "neon_int_1,\
363 neon_int_2,\
364 neon_int_3,\
365 neon_int_4,\
366 neon_int_5,\
367 neon_vqneg_vqabs,\
368 neon_vmov,\
369 neon_vaba,\
370 neon_vsma,\
371 neon_vaba_qqq,\
372 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
373 neon_mul_qqq_8_16_32_ddd_32,\
374 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
375 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
376 neon_mla_qqq_8_16,\
377 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
378 neon_mla_qqq_32_qqd_32_scalar,\
379 neon_mul_ddd_16_scalar_32_16_long_scalar,\
380 neon_mul_qqd_32_scalar,\
381 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
382 neon_shift_1,\
383 neon_shift_2,\
384 neon_shift_3,\
385 neon_vshl_ddd,\
386 neon_vqshl_vrshl_vqrshl_qqq,\
387 neon_vsra_vrsra,\
388 neon_fp_vadd_ddd_vabs_dd,\
389 neon_fp_vadd_qqq_vabs_qq,\
390 neon_fp_vsum,\
391 neon_fp_vmul_ddd,\
392 neon_fp_vmul_qqd,\
393 neon_fp_vmla_ddd,\
394 neon_fp_vmla_qqq,\
395 neon_fp_vmla_ddd_scalar,\
396 neon_fp_vmla_qqq_scalar,\
397 neon_fp_vrecps_vrsqrts_ddd,\
398 neon_fp_vrecps_vrsqrts_qqq,\
399 neon_bp_simple,\
400 neon_bp_2cycle,\
401 neon_bp_3cycle,\
402 neon_ldr,\
403 neon_str,\
404 neon_vld1_1_2_regs,\
405 neon_vld1_3_4_regs,\
406 neon_vld2_2_regs_vld1_vld2_all_lanes,\
407 neon_vld2_4_regs,\
408 neon_vld3_vld4,\
409 neon_vst1_1_2_regs_vst2_2_regs,\
410 neon_vst1_3_4_regs,\
411 neon_vst2_4_regs_vst3_vst4,\
412 neon_vst3_vst4,\
413 neon_vld1_vld2_lane,\
414 neon_vld3_vld4_lane,\
415 neon_vst1_vst2_lane,\
416 neon_vst3_vst4_lane,\
417 neon_vld3_vld4_all_lanes,\
418 neon_mcr,\
419 neon_mcr_2_mcrr,\
420 neon_mrc,\
421 neon_mrrc,\
422 neon_ldm_2,\
423 neon_stm_2,\
424 none"
425 (const_string "none"))
426
427 ; condition codes: this one is used by final_prescan_insn to speed up
428 ; conditionalizing instructions. It saves having to scan the rtl to see if
429 ; it uses or alters the condition codes.
430 ;
431 ; USE means that the condition codes are used by the insn in the process of
432 ; outputting code, this means (at present) that we can't use the insn in
433 ; inlined branches
434 ;
435 ; SET means that the purpose of the insn is to set the condition codes in a
436 ; well defined manner.
437 ;
438 ; CLOB means that the condition codes are altered in an undefined manner, if
439 ; they are altered at all
440 ;
441 ; UNCONDITIONAL means the instruction can not be conditionally executed and
442 ; that the instruction does not use or alter the condition codes.
443 ;
444 ; NOCOND means that the instruction does not use or alter the condition
445 ; codes but can be converted into a conditionally exectuted instruction.
446
447 (define_attr "conds" "use,set,clob,unconditional,nocond"
448 (if_then_else
449 (ior (eq_attr "is_thumb1" "yes")
450 (eq_attr "type" "call"))
451 (const_string "clob")
452 (if_then_else (eq_attr "neon_type" "none")
453 (const_string "nocond")
454 (const_string "unconditional"))))
455
456 ; Predicable means that the insn can be conditionally executed based on
457 ; an automatically added predicate (additional patterns are generated by
458 ; gen...). We default to 'no' because no Thumb patterns match this rule
459 ; and not all ARM patterns do.
460 (define_attr "predicable" "no,yes" (const_string "no"))
461
462 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
463 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
464 ; suffer blockages enough to warrant modelling this (and it can adversely
465 ; affect the schedule).
466 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
467
468 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
469 ; to stall the processor. Used with model_wbuf above.
470 (define_attr "write_conflict" "no,yes"
471 (if_then_else (eq_attr "type"
472 "block,float_em,f_fpa_load,f_fpa_store,f_mem_r,r_mem_f,call,load1")
473 (const_string "yes")
474 (const_string "no")))
475
476 ; Classify the insns into those that take one cycle and those that take more
477 ; than one on the main cpu execution unit.
478 (define_attr "core_cycles" "single,multi"
479 (if_then_else (eq_attr "type"
480 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
481 (const_string "single")
482 (const_string "multi")))
483
484 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
485 ;; distant label. Only applicable to Thumb code.
486 (define_attr "far_jump" "yes,no" (const_string "no"))
487
488
489 ;; The number of machine instructions this pattern expands to.
490 ;; Used for Thumb-2 conditional execution.
491 (define_attr "ce_count" "" (const_int 1))
492
493 ;;---------------------------------------------------------------------------
494 ;; Mode iterators
495
496 (include "iterators.md")
497
498 ;;---------------------------------------------------------------------------
499 ;; Predicates
500
501 (include "predicates.md")
502 (include "constraints.md")
503
504 ;;---------------------------------------------------------------------------
505 ;; Pipeline descriptions
506
507 (define_attr "tune_cortexr4" "yes,no"
508 (const (if_then_else
509 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
510 (const_string "yes")
511 (const_string "no"))))
512
513 ;; True if the generic scheduling description should be used.
514
515 (define_attr "generic_sched" "yes,no"
516 (const (if_then_else
517 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa8,cortexa9,cortexm4")
518 (eq_attr "tune_cortexr4" "yes"))
519 (const_string "no")
520 (const_string "yes"))))
521
522 (define_attr "generic_vfp" "yes,no"
523 (const (if_then_else
524 (and (eq_attr "fpu" "vfp")
525 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa8,cortexa9,cortexm4")
526 (eq_attr "tune_cortexr4" "no"))
527 (const_string "yes")
528 (const_string "no"))))
529
530 (include "arm-generic.md")
531 (include "arm926ejs.md")
532 (include "arm1020e.md")
533 (include "arm1026ejs.md")
534 (include "arm1136jfs.md")
535 (include "fa526.md")
536 (include "fa606te.md")
537 (include "fa626te.md")
538 (include "fmp626.md")
539 (include "fa726te.md")
540 (include "cortex-a5.md")
541 (include "cortex-a8.md")
542 (include "cortex-a9.md")
543 (include "cortex-r4.md")
544 (include "cortex-r4f.md")
545 (include "cortex-m4.md")
546 (include "cortex-m4-fpu.md")
547 (include "vfp11.md")
548
549 \f
550 ;;---------------------------------------------------------------------------
551 ;; Insn patterns
552 ;;
553 ;; Addition insns.
554
555 ;; Note: For DImode insns, there is normally no reason why operands should
556 ;; not be in the same register, what we don't want is for something being
557 ;; written to partially overlap something that is an input.
558 ;; Cirrus 64bit additions should not be split because we have a native
559 ;; 64bit addition instructions.
560
561 (define_expand "adddi3"
562 [(parallel
563 [(set (match_operand:DI 0 "s_register_operand" "")
564 (plus:DI (match_operand:DI 1 "s_register_operand" "")
565 (match_operand:DI 2 "s_register_operand" "")))
566 (clobber (reg:CC CC_REGNUM))])]
567 "TARGET_EITHER"
568 "
569 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
570 {
571 if (!cirrus_fp_register (operands[0], DImode))
572 operands[0] = force_reg (DImode, operands[0]);
573 if (!cirrus_fp_register (operands[1], DImode))
574 operands[1] = force_reg (DImode, operands[1]);
575 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
576 DONE;
577 }
578
579 if (TARGET_THUMB1)
580 {
581 if (GET_CODE (operands[1]) != REG)
582 operands[1] = force_reg (DImode, operands[1]);
583 if (GET_CODE (operands[2]) != REG)
584 operands[2] = force_reg (DImode, operands[2]);
585 }
586 "
587 )
588
589 (define_insn "*thumb1_adddi3"
590 [(set (match_operand:DI 0 "register_operand" "=l")
591 (plus:DI (match_operand:DI 1 "register_operand" "%0")
592 (match_operand:DI 2 "register_operand" "l")))
593 (clobber (reg:CC CC_REGNUM))
594 ]
595 "TARGET_THUMB1"
596 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
597 [(set_attr "length" "4")]
598 )
599
600 (define_insn_and_split "*arm_adddi3"
601 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
602 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
603 (match_operand:DI 2 "s_register_operand" "r, 0")))
604 (clobber (reg:CC CC_REGNUM))]
605 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
606 "#"
607 "TARGET_32BIT && reload_completed
608 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
609 [(parallel [(set (reg:CC_C CC_REGNUM)
610 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
611 (match_dup 1)))
612 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
613 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
614 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
615 "
616 {
617 operands[3] = gen_highpart (SImode, operands[0]);
618 operands[0] = gen_lowpart (SImode, operands[0]);
619 operands[4] = gen_highpart (SImode, operands[1]);
620 operands[1] = gen_lowpart (SImode, operands[1]);
621 operands[5] = gen_highpart (SImode, operands[2]);
622 operands[2] = gen_lowpart (SImode, operands[2]);
623 }"
624 [(set_attr "conds" "clob")
625 (set_attr "length" "8")]
626 )
627
628 (define_insn_and_split "*adddi_sesidi_di"
629 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
630 (plus:DI (sign_extend:DI
631 (match_operand:SI 2 "s_register_operand" "r,r"))
632 (match_operand:DI 1 "s_register_operand" "0,r")))
633 (clobber (reg:CC CC_REGNUM))]
634 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
635 "#"
636 "TARGET_32BIT && reload_completed"
637 [(parallel [(set (reg:CC_C CC_REGNUM)
638 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
639 (match_dup 1)))
640 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
641 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
642 (const_int 31))
643 (match_dup 4))
644 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
645 "
646 {
647 operands[3] = gen_highpart (SImode, operands[0]);
648 operands[0] = gen_lowpart (SImode, operands[0]);
649 operands[4] = gen_highpart (SImode, operands[1]);
650 operands[1] = gen_lowpart (SImode, operands[1]);
651 operands[2] = gen_lowpart (SImode, operands[2]);
652 }"
653 [(set_attr "conds" "clob")
654 (set_attr "length" "8")]
655 )
656
657 (define_insn_and_split "*adddi_zesidi_di"
658 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
659 (plus:DI (zero_extend:DI
660 (match_operand:SI 2 "s_register_operand" "r,r"))
661 (match_operand:DI 1 "s_register_operand" "0,r")))
662 (clobber (reg:CC CC_REGNUM))]
663 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
664 "#"
665 "TARGET_32BIT && reload_completed"
666 [(parallel [(set (reg:CC_C CC_REGNUM)
667 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
668 (match_dup 1)))
669 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
670 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
671 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
672 "
673 {
674 operands[3] = gen_highpart (SImode, operands[0]);
675 operands[0] = gen_lowpart (SImode, operands[0]);
676 operands[4] = gen_highpart (SImode, operands[1]);
677 operands[1] = gen_lowpart (SImode, operands[1]);
678 operands[2] = gen_lowpart (SImode, operands[2]);
679 }"
680 [(set_attr "conds" "clob")
681 (set_attr "length" "8")]
682 )
683
684 (define_expand "addsi3"
685 [(set (match_operand:SI 0 "s_register_operand" "")
686 (plus:SI (match_operand:SI 1 "s_register_operand" "")
687 (match_operand:SI 2 "reg_or_int_operand" "")))]
688 "TARGET_EITHER"
689 "
690 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
691 {
692 arm_split_constant (PLUS, SImode, NULL_RTX,
693 INTVAL (operands[2]), operands[0], operands[1],
694 optimize && can_create_pseudo_p ());
695 DONE;
696 }
697 "
698 )
699
700 ; If there is a scratch available, this will be faster than synthesizing the
701 ; addition.
702 (define_peephole2
703 [(match_scratch:SI 3 "r")
704 (set (match_operand:SI 0 "arm_general_register_operand" "")
705 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
706 (match_operand:SI 2 "const_int_operand" "")))]
707 "TARGET_32BIT &&
708 !(const_ok_for_arm (INTVAL (operands[2]))
709 || const_ok_for_arm (-INTVAL (operands[2])))
710 && const_ok_for_arm (~INTVAL (operands[2]))"
711 [(set (match_dup 3) (match_dup 2))
712 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
713 ""
714 )
715
716 ;; The r/r/k alternative is required when reloading the address
717 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
718 ;; put the duplicated register first, and not try the commutative version.
719 (define_insn_and_split "*arm_addsi3"
720 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,r, k, r")
721 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,rk,k, rk")
722 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,PJ,PJ,?n")))]
723 "TARGET_32BIT"
724 "@
725 add%?\\t%0, %1, %2
726 add%?\\t%0, %1, %2
727 add%?\\t%0, %2, %1
728 addw%?\\t%0, %1, %2
729 addw%?\\t%0, %1, %2
730 sub%?\\t%0, %1, #%n2
731 sub%?\\t%0, %1, #%n2
732 subw%?\\t%0, %1, #%n2
733 subw%?\\t%0, %1, #%n2
734 #"
735 "TARGET_32BIT
736 && GET_CODE (operands[2]) == CONST_INT
737 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
738 && (reload_completed || !arm_eliminable_register (operands[1]))"
739 [(clobber (const_int 0))]
740 "
741 arm_split_constant (PLUS, SImode, curr_insn,
742 INTVAL (operands[2]), operands[0],
743 operands[1], 0);
744 DONE;
745 "
746 [(set_attr "length" "4,4,4,4,4,4,4,4,4,16")
747 (set_attr "predicable" "yes")
748 (set_attr "arch" "*,*,*,t2,t2,*,*,t2,t2,*")]
749 )
750
751 (define_insn_and_split "*thumb1_addsi3"
752 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
753 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
754 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
755 "TARGET_THUMB1"
756 "*
757 static const char * const asms[] =
758 {
759 \"add\\t%0, %0, %2\",
760 \"sub\\t%0, %0, #%n2\",
761 \"add\\t%0, %1, %2\",
762 \"add\\t%0, %0, %2\",
763 \"add\\t%0, %0, %2\",
764 \"add\\t%0, %1, %2\",
765 \"add\\t%0, %1, %2\",
766 \"#\",
767 \"#\",
768 \"#\"
769 };
770 if ((which_alternative == 2 || which_alternative == 6)
771 && GET_CODE (operands[2]) == CONST_INT
772 && INTVAL (operands[2]) < 0)
773 return \"sub\\t%0, %1, #%n2\";
774 return asms[which_alternative];
775 "
776 "&& reload_completed && CONST_INT_P (operands[2])
777 && ((operands[1] != stack_pointer_rtx
778 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
779 || (operands[1] == stack_pointer_rtx
780 && INTVAL (operands[2]) > 1020))"
781 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
782 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
783 {
784 HOST_WIDE_INT offset = INTVAL (operands[2]);
785 if (operands[1] == stack_pointer_rtx)
786 offset -= 1020;
787 else
788 {
789 if (offset > 255)
790 offset = 255;
791 else if (offset < -255)
792 offset = -255;
793 }
794 operands[3] = GEN_INT (offset);
795 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
796 }
797 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
798 )
799
800 ;; Reloading and elimination of the frame pointer can
801 ;; sometimes cause this optimization to be missed.
802 (define_peephole2
803 [(set (match_operand:SI 0 "arm_general_register_operand" "")
804 (match_operand:SI 1 "const_int_operand" ""))
805 (set (match_dup 0)
806 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
807 "TARGET_THUMB1
808 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
809 && (INTVAL (operands[1]) & 3) == 0"
810 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
811 ""
812 )
813
814 (define_insn "addsi3_compare0"
815 [(set (reg:CC_NOOV CC_REGNUM)
816 (compare:CC_NOOV
817 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
818 (match_operand:SI 2 "arm_add_operand" "rI,L"))
819 (const_int 0)))
820 (set (match_operand:SI 0 "s_register_operand" "=r,r")
821 (plus:SI (match_dup 1) (match_dup 2)))]
822 "TARGET_ARM"
823 "@
824 add%.\\t%0, %1, %2
825 sub%.\\t%0, %1, #%n2"
826 [(set_attr "conds" "set")]
827 )
828
829 (define_insn "*addsi3_compare0_scratch"
830 [(set (reg:CC_NOOV CC_REGNUM)
831 (compare:CC_NOOV
832 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
833 (match_operand:SI 1 "arm_add_operand" "rI,L"))
834 (const_int 0)))]
835 "TARGET_ARM"
836 "@
837 cmn%?\\t%0, %1
838 cmp%?\\t%0, #%n1"
839 [(set_attr "conds" "set")]
840 )
841
842 (define_insn "*compare_negsi_si"
843 [(set (reg:CC_Z CC_REGNUM)
844 (compare:CC_Z
845 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
846 (match_operand:SI 1 "s_register_operand" "r")))]
847 "TARGET_32BIT"
848 "cmn%?\\t%1, %0"
849 [(set_attr "conds" "set")]
850 )
851
852 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
853 ;; addend is a constant.
854 (define_insn "*cmpsi2_addneg"
855 [(set (reg:CC CC_REGNUM)
856 (compare:CC
857 (match_operand:SI 1 "s_register_operand" "r,r")
858 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
859 (set (match_operand:SI 0 "s_register_operand" "=r,r")
860 (plus:SI (match_dup 1)
861 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
862 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
863 "@
864 add%.\\t%0, %1, %3
865 sub%.\\t%0, %1, #%n3"
866 [(set_attr "conds" "set")]
867 )
868
869 ;; Convert the sequence
870 ;; sub rd, rn, #1
871 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
872 ;; bne dest
873 ;; into
874 ;; subs rd, rn, #1
875 ;; bcs dest ((unsigned)rn >= 1)
876 ;; similarly for the beq variant using bcc.
877 ;; This is a common looping idiom (while (n--))
878 (define_peephole2
879 [(set (match_operand:SI 0 "arm_general_register_operand" "")
880 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
881 (const_int -1)))
882 (set (match_operand 2 "cc_register" "")
883 (compare (match_dup 0) (const_int -1)))
884 (set (pc)
885 (if_then_else (match_operator 3 "equality_operator"
886 [(match_dup 2) (const_int 0)])
887 (match_operand 4 "" "")
888 (match_operand 5 "" "")))]
889 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
890 [(parallel[
891 (set (match_dup 2)
892 (compare:CC
893 (match_dup 1) (const_int 1)))
894 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
895 (set (pc)
896 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
897 (match_dup 4)
898 (match_dup 5)))]
899 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
900 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
901 ? GEU : LTU),
902 VOIDmode,
903 operands[2], const0_rtx);"
904 )
905
906 ;; The next four insns work because they compare the result with one of
907 ;; the operands, and we know that the use of the condition code is
908 ;; either GEU or LTU, so we can use the carry flag from the addition
909 ;; instead of doing the compare a second time.
910 (define_insn "*addsi3_compare_op1"
911 [(set (reg:CC_C CC_REGNUM)
912 (compare:CC_C
913 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
914 (match_operand:SI 2 "arm_add_operand" "rI,L"))
915 (match_dup 1)))
916 (set (match_operand:SI 0 "s_register_operand" "=r,r")
917 (plus:SI (match_dup 1) (match_dup 2)))]
918 "TARGET_32BIT"
919 "@
920 add%.\\t%0, %1, %2
921 sub%.\\t%0, %1, #%n2"
922 [(set_attr "conds" "set")]
923 )
924
925 (define_insn "*addsi3_compare_op2"
926 [(set (reg:CC_C CC_REGNUM)
927 (compare:CC_C
928 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
929 (match_operand:SI 2 "arm_add_operand" "rI,L"))
930 (match_dup 2)))
931 (set (match_operand:SI 0 "s_register_operand" "=r,r")
932 (plus:SI (match_dup 1) (match_dup 2)))]
933 "TARGET_32BIT"
934 "@
935 add%.\\t%0, %1, %2
936 sub%.\\t%0, %1, #%n2"
937 [(set_attr "conds" "set")]
938 )
939
940 (define_insn "*compare_addsi2_op0"
941 [(set (reg:CC_C CC_REGNUM)
942 (compare:CC_C
943 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
944 (match_operand:SI 1 "arm_add_operand" "rI,L"))
945 (match_dup 0)))]
946 "TARGET_32BIT"
947 "@
948 cmn%?\\t%0, %1
949 cmp%?\\t%0, #%n1"
950 [(set_attr "conds" "set")]
951 )
952
953 (define_insn "*compare_addsi2_op1"
954 [(set (reg:CC_C CC_REGNUM)
955 (compare:CC_C
956 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
957 (match_operand:SI 1 "arm_add_operand" "rI,L"))
958 (match_dup 1)))]
959 "TARGET_32BIT"
960 "@
961 cmn%?\\t%0, %1
962 cmp%?\\t%0, #%n1"
963 [(set_attr "conds" "set")]
964 )
965
966 (define_insn "*addsi3_carryin_<optab>"
967 [(set (match_operand:SI 0 "s_register_operand" "=r")
968 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
969 (match_operand:SI 2 "arm_rhs_operand" "rI"))
970 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
971 "TARGET_32BIT"
972 "adc%?\\t%0, %1, %2"
973 [(set_attr "conds" "use")]
974 )
975
976 (define_insn "*addsi3_carryin_alt2_<optab>"
977 [(set (match_operand:SI 0 "s_register_operand" "=r")
978 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
979 (match_operand:SI 1 "s_register_operand" "%r"))
980 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
981 "TARGET_32BIT"
982 "adc%?\\t%0, %1, %2"
983 [(set_attr "conds" "use")]
984 )
985
986 (define_insn "*addsi3_carryin_shift_<optab>"
987 [(set (match_operand:SI 0 "s_register_operand" "=r")
988 (plus:SI (plus:SI
989 (match_operator:SI 2 "shift_operator"
990 [(match_operand:SI 3 "s_register_operand" "r")
991 (match_operand:SI 4 "reg_or_int_operand" "rM")])
992 (match_operand:SI 1 "s_register_operand" "r"))
993 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
994 "TARGET_32BIT"
995 "adc%?\\t%0, %1, %3%S2"
996 [(set_attr "conds" "use")
997 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
998 (const_string "alu_shift")
999 (const_string "alu_shift_reg")))]
1000 )
1001
1002 (define_insn "*addsi3_carryin_clobercc_<optab>"
1003 [(set (match_operand:SI 0 "s_register_operand" "=r")
1004 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1005 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1006 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1007 (clobber (reg:CC CC_REGNUM))]
1008 "TARGET_32BIT"
1009 "adc%.\\t%0, %1, %2"
1010 [(set_attr "conds" "set")]
1011 )
1012
1013 (define_expand "incscc"
1014 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1015 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1016 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1017 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1018 "TARGET_32BIT"
1019 ""
1020 )
1021
1022 (define_insn "*arm_incscc"
1023 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1024 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1025 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1026 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1027 "TARGET_ARM"
1028 "@
1029 add%d2\\t%0, %1, #1
1030 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1031 [(set_attr "conds" "use")
1032 (set_attr "length" "4,8")]
1033 )
1034
1035 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1036 (define_split
1037 [(set (match_operand:SI 0 "s_register_operand" "")
1038 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1039 (match_operand:SI 2 "s_register_operand" ""))
1040 (const_int -1)))
1041 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1042 "TARGET_32BIT"
1043 [(set (match_dup 3) (match_dup 1))
1044 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1045 "
1046 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1047 ")
1048
1049 (define_expand "addsf3"
1050 [(set (match_operand:SF 0 "s_register_operand" "")
1051 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1052 (match_operand:SF 2 "arm_float_add_operand" "")))]
1053 "TARGET_32BIT && TARGET_HARD_FLOAT"
1054 "
1055 if (TARGET_MAVERICK
1056 && !cirrus_fp_register (operands[2], SFmode))
1057 operands[2] = force_reg (SFmode, operands[2]);
1058 ")
1059
1060 (define_expand "adddf3"
1061 [(set (match_operand:DF 0 "s_register_operand" "")
1062 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1063 (match_operand:DF 2 "arm_float_add_operand" "")))]
1064 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1065 "
1066 if (TARGET_MAVERICK
1067 && !cirrus_fp_register (operands[2], DFmode))
1068 operands[2] = force_reg (DFmode, operands[2]);
1069 ")
1070
1071 (define_expand "subdi3"
1072 [(parallel
1073 [(set (match_operand:DI 0 "s_register_operand" "")
1074 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1075 (match_operand:DI 2 "s_register_operand" "")))
1076 (clobber (reg:CC CC_REGNUM))])]
1077 "TARGET_EITHER"
1078 "
1079 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1080 && TARGET_32BIT
1081 && cirrus_fp_register (operands[0], DImode)
1082 && cirrus_fp_register (operands[1], DImode))
1083 {
1084 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1085 DONE;
1086 }
1087
1088 if (TARGET_THUMB1)
1089 {
1090 if (GET_CODE (operands[1]) != REG)
1091 operands[1] = force_reg (DImode, operands[1]);
1092 if (GET_CODE (operands[2]) != REG)
1093 operands[2] = force_reg (DImode, operands[2]);
1094 }
1095 "
1096 )
1097
1098 (define_insn "*arm_subdi3"
1099 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1100 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1101 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1102 (clobber (reg:CC CC_REGNUM))]
1103 "TARGET_32BIT && !TARGET_NEON"
1104 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1105 [(set_attr "conds" "clob")
1106 (set_attr "length" "8")]
1107 )
1108
1109 (define_insn "*thumb_subdi3"
1110 [(set (match_operand:DI 0 "register_operand" "=l")
1111 (minus:DI (match_operand:DI 1 "register_operand" "0")
1112 (match_operand:DI 2 "register_operand" "l")))
1113 (clobber (reg:CC CC_REGNUM))]
1114 "TARGET_THUMB1"
1115 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1116 [(set_attr "length" "4")]
1117 )
1118
1119 (define_insn "*subdi_di_zesidi"
1120 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1121 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1122 (zero_extend:DI
1123 (match_operand:SI 2 "s_register_operand" "r,r"))))
1124 (clobber (reg:CC CC_REGNUM))]
1125 "TARGET_32BIT"
1126 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1127 [(set_attr "conds" "clob")
1128 (set_attr "length" "8")]
1129 )
1130
1131 (define_insn "*subdi_di_sesidi"
1132 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1133 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1134 (sign_extend:DI
1135 (match_operand:SI 2 "s_register_operand" "r,r"))))
1136 (clobber (reg:CC CC_REGNUM))]
1137 "TARGET_32BIT"
1138 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1139 [(set_attr "conds" "clob")
1140 (set_attr "length" "8")]
1141 )
1142
1143 (define_insn "*subdi_zesidi_di"
1144 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1145 (minus:DI (zero_extend:DI
1146 (match_operand:SI 2 "s_register_operand" "r,r"))
1147 (match_operand:DI 1 "s_register_operand" "0,r")))
1148 (clobber (reg:CC CC_REGNUM))]
1149 "TARGET_ARM"
1150 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1151 [(set_attr "conds" "clob")
1152 (set_attr "length" "8")]
1153 )
1154
1155 (define_insn "*subdi_sesidi_di"
1156 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1157 (minus:DI (sign_extend:DI
1158 (match_operand:SI 2 "s_register_operand" "r,r"))
1159 (match_operand:DI 1 "s_register_operand" "0,r")))
1160 (clobber (reg:CC CC_REGNUM))]
1161 "TARGET_ARM"
1162 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1163 [(set_attr "conds" "clob")
1164 (set_attr "length" "8")]
1165 )
1166
1167 (define_insn "*subdi_zesidi_zesidi"
1168 [(set (match_operand:DI 0 "s_register_operand" "=r")
1169 (minus:DI (zero_extend:DI
1170 (match_operand:SI 1 "s_register_operand" "r"))
1171 (zero_extend:DI
1172 (match_operand:SI 2 "s_register_operand" "r"))))
1173 (clobber (reg:CC CC_REGNUM))]
1174 "TARGET_32BIT"
1175 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1176 [(set_attr "conds" "clob")
1177 (set_attr "length" "8")]
1178 )
1179
1180 (define_expand "subsi3"
1181 [(set (match_operand:SI 0 "s_register_operand" "")
1182 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1183 (match_operand:SI 2 "s_register_operand" "")))]
1184 "TARGET_EITHER"
1185 "
1186 if (GET_CODE (operands[1]) == CONST_INT)
1187 {
1188 if (TARGET_32BIT)
1189 {
1190 arm_split_constant (MINUS, SImode, NULL_RTX,
1191 INTVAL (operands[1]), operands[0],
1192 operands[2], optimize && can_create_pseudo_p ());
1193 DONE;
1194 }
1195 else /* TARGET_THUMB1 */
1196 operands[1] = force_reg (SImode, operands[1]);
1197 }
1198 "
1199 )
1200
1201 (define_insn "thumb1_subsi3_insn"
1202 [(set (match_operand:SI 0 "register_operand" "=l")
1203 (minus:SI (match_operand:SI 1 "register_operand" "l")
1204 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1205 "TARGET_THUMB1"
1206 "sub\\t%0, %1, %2"
1207 [(set_attr "length" "2")
1208 (set_attr "conds" "set")])
1209
1210 ; ??? Check Thumb-2 split length
1211 (define_insn_and_split "*arm_subsi3_insn"
1212 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1213 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1214 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1215 "TARGET_32BIT"
1216 "@
1217 rsb%?\\t%0, %2, %1
1218 sub%?\\t%0, %1, %2
1219 sub%?\\t%0, %1, %2
1220 #
1221 #"
1222 "&& ((GET_CODE (operands[1]) == CONST_INT
1223 && !const_ok_for_arm (INTVAL (operands[1])))
1224 || (GET_CODE (operands[2]) == CONST_INT
1225 && !const_ok_for_arm (INTVAL (operands[2]))))"
1226 [(clobber (const_int 0))]
1227 "
1228 arm_split_constant (MINUS, SImode, curr_insn,
1229 INTVAL (operands[1]), operands[0], operands[2], 0);
1230 DONE;
1231 "
1232 [(set_attr "length" "4,4,4,16,16")
1233 (set_attr "predicable" "yes")]
1234 )
1235
1236 (define_peephole2
1237 [(match_scratch:SI 3 "r")
1238 (set (match_operand:SI 0 "arm_general_register_operand" "")
1239 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1240 (match_operand:SI 2 "arm_general_register_operand" "")))]
1241 "TARGET_32BIT
1242 && !const_ok_for_arm (INTVAL (operands[1]))
1243 && const_ok_for_arm (~INTVAL (operands[1]))"
1244 [(set (match_dup 3) (match_dup 1))
1245 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1246 ""
1247 )
1248
1249 (define_insn "*subsi3_compare0"
1250 [(set (reg:CC_NOOV CC_REGNUM)
1251 (compare:CC_NOOV
1252 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1253 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1254 (const_int 0)))
1255 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1256 (minus:SI (match_dup 1) (match_dup 2)))]
1257 "TARGET_32BIT"
1258 "@
1259 sub%.\\t%0, %1, %2
1260 rsb%.\\t%0, %2, %1"
1261 [(set_attr "conds" "set")]
1262 )
1263
1264 (define_insn "*subsi3_compare"
1265 [(set (reg:CC CC_REGNUM)
1266 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1267 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1268 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1269 (minus:SI (match_dup 1) (match_dup 2)))]
1270 "TARGET_32BIT"
1271 "@
1272 sub%.\\t%0, %1, %2
1273 rsb%.\\t%0, %2, %1"
1274 [(set_attr "conds" "set")]
1275 )
1276
1277 (define_expand "decscc"
1278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1279 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1280 (match_operator:SI 2 "arm_comparison_operator"
1281 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1282 "TARGET_32BIT"
1283 ""
1284 )
1285
1286 (define_insn "*arm_decscc"
1287 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1288 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1289 (match_operator:SI 2 "arm_comparison_operator"
1290 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1291 "TARGET_ARM"
1292 "@
1293 sub%d2\\t%0, %1, #1
1294 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1295 [(set_attr "conds" "use")
1296 (set_attr "length" "*,8")]
1297 )
1298
1299 (define_expand "subsf3"
1300 [(set (match_operand:SF 0 "s_register_operand" "")
1301 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1302 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1303 "TARGET_32BIT && TARGET_HARD_FLOAT"
1304 "
1305 if (TARGET_MAVERICK)
1306 {
1307 if (!cirrus_fp_register (operands[1], SFmode))
1308 operands[1] = force_reg (SFmode, operands[1]);
1309 if (!cirrus_fp_register (operands[2], SFmode))
1310 operands[2] = force_reg (SFmode, operands[2]);
1311 }
1312 ")
1313
1314 (define_expand "subdf3"
1315 [(set (match_operand:DF 0 "s_register_operand" "")
1316 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1317 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1318 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1319 "
1320 if (TARGET_MAVERICK)
1321 {
1322 if (!cirrus_fp_register (operands[1], DFmode))
1323 operands[1] = force_reg (DFmode, operands[1]);
1324 if (!cirrus_fp_register (operands[2], DFmode))
1325 operands[2] = force_reg (DFmode, operands[2]);
1326 }
1327 ")
1328
1329 \f
1330 ;; Multiplication insns
1331
1332 (define_expand "mulsi3"
1333 [(set (match_operand:SI 0 "s_register_operand" "")
1334 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1335 (match_operand:SI 1 "s_register_operand" "")))]
1336 "TARGET_EITHER"
1337 ""
1338 )
1339
1340 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1341 (define_insn "*arm_mulsi3"
1342 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1343 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1344 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1345 "TARGET_32BIT && !arm_arch6"
1346 "mul%?\\t%0, %2, %1"
1347 [(set_attr "insn" "mul")
1348 (set_attr "predicable" "yes")]
1349 )
1350
1351 (define_insn "*arm_mulsi3_v6"
1352 [(set (match_operand:SI 0 "s_register_operand" "=r")
1353 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1354 (match_operand:SI 2 "s_register_operand" "r")))]
1355 "TARGET_32BIT && arm_arch6"
1356 "mul%?\\t%0, %1, %2"
1357 [(set_attr "insn" "mul")
1358 (set_attr "predicable" "yes")]
1359 )
1360
1361 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1362 ; 1 and 2; are the same, because reload will make operand 0 match
1363 ; operand 1 without realizing that this conflicts with operand 2. We fix
1364 ; this by adding another alternative to match this case, and then `reload'
1365 ; it ourselves. This alternative must come first.
1366 (define_insn "*thumb_mulsi3"
1367 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1368 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1369 (match_operand:SI 2 "register_operand" "l,l,l")))]
1370 "TARGET_THUMB1 && !arm_arch6"
1371 "*
1372 if (which_alternative < 2)
1373 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1374 else
1375 return \"mul\\t%0, %2\";
1376 "
1377 [(set_attr "length" "4,4,2")
1378 (set_attr "insn" "mul")]
1379 )
1380
1381 (define_insn "*thumb_mulsi3_v6"
1382 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1383 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1384 (match_operand:SI 2 "register_operand" "l,0,0")))]
1385 "TARGET_THUMB1 && arm_arch6"
1386 "@
1387 mul\\t%0, %2
1388 mul\\t%0, %1
1389 mul\\t%0, %1"
1390 [(set_attr "length" "2")
1391 (set_attr "insn" "mul")]
1392 )
1393
1394 (define_insn "*mulsi3_compare0"
1395 [(set (reg:CC_NOOV CC_REGNUM)
1396 (compare:CC_NOOV (mult:SI
1397 (match_operand:SI 2 "s_register_operand" "r,r")
1398 (match_operand:SI 1 "s_register_operand" "%0,r"))
1399 (const_int 0)))
1400 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1401 (mult:SI (match_dup 2) (match_dup 1)))]
1402 "TARGET_ARM && !arm_arch6"
1403 "mul%.\\t%0, %2, %1"
1404 [(set_attr "conds" "set")
1405 (set_attr "insn" "muls")]
1406 )
1407
1408 (define_insn "*mulsi3_compare0_v6"
1409 [(set (reg:CC_NOOV CC_REGNUM)
1410 (compare:CC_NOOV (mult:SI
1411 (match_operand:SI 2 "s_register_operand" "r")
1412 (match_operand:SI 1 "s_register_operand" "r"))
1413 (const_int 0)))
1414 (set (match_operand:SI 0 "s_register_operand" "=r")
1415 (mult:SI (match_dup 2) (match_dup 1)))]
1416 "TARGET_ARM && arm_arch6 && optimize_size"
1417 "mul%.\\t%0, %2, %1"
1418 [(set_attr "conds" "set")
1419 (set_attr "insn" "muls")]
1420 )
1421
1422 (define_insn "*mulsi_compare0_scratch"
1423 [(set (reg:CC_NOOV CC_REGNUM)
1424 (compare:CC_NOOV (mult:SI
1425 (match_operand:SI 2 "s_register_operand" "r,r")
1426 (match_operand:SI 1 "s_register_operand" "%0,r"))
1427 (const_int 0)))
1428 (clobber (match_scratch:SI 0 "=&r,&r"))]
1429 "TARGET_ARM && !arm_arch6"
1430 "mul%.\\t%0, %2, %1"
1431 [(set_attr "conds" "set")
1432 (set_attr "insn" "muls")]
1433 )
1434
1435 (define_insn "*mulsi_compare0_scratch_v6"
1436 [(set (reg:CC_NOOV CC_REGNUM)
1437 (compare:CC_NOOV (mult:SI
1438 (match_operand:SI 2 "s_register_operand" "r")
1439 (match_operand:SI 1 "s_register_operand" "r"))
1440 (const_int 0)))
1441 (clobber (match_scratch:SI 0 "=r"))]
1442 "TARGET_ARM && arm_arch6 && optimize_size"
1443 "mul%.\\t%0, %2, %1"
1444 [(set_attr "conds" "set")
1445 (set_attr "insn" "muls")]
1446 )
1447
1448 ;; Unnamed templates to match MLA instruction.
1449
1450 (define_insn "*mulsi3addsi"
1451 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1452 (plus:SI
1453 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1454 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1455 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1456 "TARGET_32BIT && !arm_arch6"
1457 "mla%?\\t%0, %2, %1, %3"
1458 [(set_attr "insn" "mla")
1459 (set_attr "predicable" "yes")]
1460 )
1461
1462 (define_insn "*mulsi3addsi_v6"
1463 [(set (match_operand:SI 0 "s_register_operand" "=r")
1464 (plus:SI
1465 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1466 (match_operand:SI 1 "s_register_operand" "r"))
1467 (match_operand:SI 3 "s_register_operand" "r")))]
1468 "TARGET_32BIT && arm_arch6"
1469 "mla%?\\t%0, %2, %1, %3"
1470 [(set_attr "insn" "mla")
1471 (set_attr "predicable" "yes")]
1472 )
1473
1474 (define_insn "*mulsi3addsi_compare0"
1475 [(set (reg:CC_NOOV CC_REGNUM)
1476 (compare:CC_NOOV
1477 (plus:SI (mult:SI
1478 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1479 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1480 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1481 (const_int 0)))
1482 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1483 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1484 (match_dup 3)))]
1485 "TARGET_ARM && arm_arch6"
1486 "mla%.\\t%0, %2, %1, %3"
1487 [(set_attr "conds" "set")
1488 (set_attr "insn" "mlas")]
1489 )
1490
1491 (define_insn "*mulsi3addsi_compare0_v6"
1492 [(set (reg:CC_NOOV CC_REGNUM)
1493 (compare:CC_NOOV
1494 (plus:SI (mult:SI
1495 (match_operand:SI 2 "s_register_operand" "r")
1496 (match_operand:SI 1 "s_register_operand" "r"))
1497 (match_operand:SI 3 "s_register_operand" "r"))
1498 (const_int 0)))
1499 (set (match_operand:SI 0 "s_register_operand" "=r")
1500 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1501 (match_dup 3)))]
1502 "TARGET_ARM && arm_arch6 && optimize_size"
1503 "mla%.\\t%0, %2, %1, %3"
1504 [(set_attr "conds" "set")
1505 (set_attr "insn" "mlas")]
1506 )
1507
1508 (define_insn "*mulsi3addsi_compare0_scratch"
1509 [(set (reg:CC_NOOV CC_REGNUM)
1510 (compare:CC_NOOV
1511 (plus:SI (mult:SI
1512 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1513 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1514 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1515 (const_int 0)))
1516 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1517 "TARGET_ARM && !arm_arch6"
1518 "mla%.\\t%0, %2, %1, %3"
1519 [(set_attr "conds" "set")
1520 (set_attr "insn" "mlas")]
1521 )
1522
1523 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1524 [(set (reg:CC_NOOV CC_REGNUM)
1525 (compare:CC_NOOV
1526 (plus:SI (mult:SI
1527 (match_operand:SI 2 "s_register_operand" "r")
1528 (match_operand:SI 1 "s_register_operand" "r"))
1529 (match_operand:SI 3 "s_register_operand" "r"))
1530 (const_int 0)))
1531 (clobber (match_scratch:SI 0 "=r"))]
1532 "TARGET_ARM && arm_arch6 && optimize_size"
1533 "mla%.\\t%0, %2, %1, %3"
1534 [(set_attr "conds" "set")
1535 (set_attr "insn" "mlas")]
1536 )
1537
1538 (define_insn "*mulsi3subsi"
1539 [(set (match_operand:SI 0 "s_register_operand" "=r")
1540 (minus:SI
1541 (match_operand:SI 3 "s_register_operand" "r")
1542 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1543 (match_operand:SI 1 "s_register_operand" "r"))))]
1544 "TARGET_32BIT && arm_arch_thumb2"
1545 "mls%?\\t%0, %2, %1, %3"
1546 [(set_attr "insn" "mla")
1547 (set_attr "predicable" "yes")]
1548 )
1549
1550 (define_expand "maddsidi4"
1551 [(set (match_operand:DI 0 "s_register_operand" "")
1552 (plus:DI
1553 (mult:DI
1554 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1555 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1556 (match_operand:DI 3 "s_register_operand" "")))]
1557 "TARGET_32BIT && arm_arch3m"
1558 "")
1559
1560 (define_insn "*mulsidi3adddi"
1561 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1562 (plus:DI
1563 (mult:DI
1564 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1565 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1566 (match_operand:DI 1 "s_register_operand" "0")))]
1567 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1568 "smlal%?\\t%Q0, %R0, %3, %2"
1569 [(set_attr "insn" "smlal")
1570 (set_attr "predicable" "yes")]
1571 )
1572
1573 (define_insn "*mulsidi3adddi_v6"
1574 [(set (match_operand:DI 0 "s_register_operand" "=r")
1575 (plus:DI
1576 (mult:DI
1577 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1578 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1579 (match_operand:DI 1 "s_register_operand" "0")))]
1580 "TARGET_32BIT && arm_arch6"
1581 "smlal%?\\t%Q0, %R0, %3, %2"
1582 [(set_attr "insn" "smlal")
1583 (set_attr "predicable" "yes")]
1584 )
1585
1586 ;; 32x32->64 widening multiply.
1587 ;; As with mulsi3, the only difference between the v3-5 and v6+
1588 ;; versions of these patterns is the requirement that the output not
1589 ;; overlap the inputs, but that still means we have to have a named
1590 ;; expander and two different starred insns.
1591
1592 (define_expand "mulsidi3"
1593 [(set (match_operand:DI 0 "s_register_operand" "")
1594 (mult:DI
1595 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1596 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1597 "TARGET_32BIT && arm_arch3m"
1598 ""
1599 )
1600
1601 (define_insn "*mulsidi3_nov6"
1602 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1603 (mult:DI
1604 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1605 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1606 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1607 "smull%?\\t%Q0, %R0, %1, %2"
1608 [(set_attr "insn" "smull")
1609 (set_attr "predicable" "yes")]
1610 )
1611
1612 (define_insn "*mulsidi3_v6"
1613 [(set (match_operand:DI 0 "s_register_operand" "=r")
1614 (mult:DI
1615 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1616 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1617 "TARGET_32BIT && arm_arch6"
1618 "smull%?\\t%Q0, %R0, %1, %2"
1619 [(set_attr "insn" "smull")
1620 (set_attr "predicable" "yes")]
1621 )
1622
1623 (define_expand "umulsidi3"
1624 [(set (match_operand:DI 0 "s_register_operand" "")
1625 (mult:DI
1626 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1627 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1628 "TARGET_32BIT && arm_arch3m"
1629 ""
1630 )
1631
1632 (define_insn "*umulsidi3_nov6"
1633 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1634 (mult:DI
1635 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1636 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1637 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1638 "umull%?\\t%Q0, %R0, %1, %2"
1639 [(set_attr "insn" "umull")
1640 (set_attr "predicable" "yes")]
1641 )
1642
1643 (define_insn "*umulsidi3_v6"
1644 [(set (match_operand:DI 0 "s_register_operand" "=r")
1645 (mult:DI
1646 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1647 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1648 "TARGET_32BIT && arm_arch6"
1649 "umull%?\\t%Q0, %R0, %1, %2"
1650 [(set_attr "insn" "umull")
1651 (set_attr "predicable" "yes")]
1652 )
1653
1654 (define_expand "umaddsidi4"
1655 [(set (match_operand:DI 0 "s_register_operand" "")
1656 (plus:DI
1657 (mult:DI
1658 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1659 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1660 (match_operand:DI 3 "s_register_operand" "")))]
1661 "TARGET_32BIT && arm_arch3m"
1662 "")
1663
1664 (define_insn "*umulsidi3adddi"
1665 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1666 (plus:DI
1667 (mult:DI
1668 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1669 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1670 (match_operand:DI 1 "s_register_operand" "0")))]
1671 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1672 "umlal%?\\t%Q0, %R0, %3, %2"
1673 [(set_attr "insn" "umlal")
1674 (set_attr "predicable" "yes")]
1675 )
1676
1677 (define_insn "*umulsidi3adddi_v6"
1678 [(set (match_operand:DI 0 "s_register_operand" "=r")
1679 (plus:DI
1680 (mult:DI
1681 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1682 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1683 (match_operand:DI 1 "s_register_operand" "0")))]
1684 "TARGET_32BIT && arm_arch6"
1685 "umlal%?\\t%Q0, %R0, %3, %2"
1686 [(set_attr "insn" "umlal")
1687 (set_attr "predicable" "yes")]
1688 )
1689
1690 (define_expand "smulsi3_highpart"
1691 [(parallel
1692 [(set (match_operand:SI 0 "s_register_operand" "")
1693 (truncate:SI
1694 (lshiftrt:DI
1695 (mult:DI
1696 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1697 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1698 (const_int 32))))
1699 (clobber (match_scratch:SI 3 ""))])]
1700 "TARGET_32BIT && arm_arch3m"
1701 ""
1702 )
1703
1704 (define_insn "*smulsi3_highpart_nov6"
1705 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1706 (truncate:SI
1707 (lshiftrt:DI
1708 (mult:DI
1709 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1710 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1711 (const_int 32))))
1712 (clobber (match_scratch:SI 3 "=&r,&r"))]
1713 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1714 "smull%?\\t%3, %0, %2, %1"
1715 [(set_attr "insn" "smull")
1716 (set_attr "predicable" "yes")]
1717 )
1718
1719 (define_insn "*smulsi3_highpart_v6"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r")
1721 (truncate:SI
1722 (lshiftrt:DI
1723 (mult:DI
1724 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1725 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1726 (const_int 32))))
1727 (clobber (match_scratch:SI 3 "=r"))]
1728 "TARGET_32BIT && arm_arch6"
1729 "smull%?\\t%3, %0, %2, %1"
1730 [(set_attr "insn" "smull")
1731 (set_attr "predicable" "yes")]
1732 )
1733
1734 (define_expand "umulsi3_highpart"
1735 [(parallel
1736 [(set (match_operand:SI 0 "s_register_operand" "")
1737 (truncate:SI
1738 (lshiftrt:DI
1739 (mult:DI
1740 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1741 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1742 (const_int 32))))
1743 (clobber (match_scratch:SI 3 ""))])]
1744 "TARGET_32BIT && arm_arch3m"
1745 ""
1746 )
1747
1748 (define_insn "*umulsi3_highpart_nov6"
1749 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1750 (truncate:SI
1751 (lshiftrt:DI
1752 (mult:DI
1753 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1754 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1755 (const_int 32))))
1756 (clobber (match_scratch:SI 3 "=&r,&r"))]
1757 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1758 "umull%?\\t%3, %0, %2, %1"
1759 [(set_attr "insn" "umull")
1760 (set_attr "predicable" "yes")]
1761 )
1762
1763 (define_insn "*umulsi3_highpart_v6"
1764 [(set (match_operand:SI 0 "s_register_operand" "=r")
1765 (truncate:SI
1766 (lshiftrt:DI
1767 (mult:DI
1768 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1769 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1770 (const_int 32))))
1771 (clobber (match_scratch:SI 3 "=r"))]
1772 "TARGET_32BIT && arm_arch6"
1773 "umull%?\\t%3, %0, %2, %1"
1774 [(set_attr "insn" "umull")
1775 (set_attr "predicable" "yes")]
1776 )
1777
1778 (define_insn "mulhisi3"
1779 [(set (match_operand:SI 0 "s_register_operand" "=r")
1780 (mult:SI (sign_extend:SI
1781 (match_operand:HI 1 "s_register_operand" "%r"))
1782 (sign_extend:SI
1783 (match_operand:HI 2 "s_register_operand" "r"))))]
1784 "TARGET_DSP_MULTIPLY"
1785 "smulbb%?\\t%0, %1, %2"
1786 [(set_attr "insn" "smulxy")
1787 (set_attr "predicable" "yes")]
1788 )
1789
1790 (define_insn "*mulhisi3tb"
1791 [(set (match_operand:SI 0 "s_register_operand" "=r")
1792 (mult:SI (ashiftrt:SI
1793 (match_operand:SI 1 "s_register_operand" "r")
1794 (const_int 16))
1795 (sign_extend:SI
1796 (match_operand:HI 2 "s_register_operand" "r"))))]
1797 "TARGET_DSP_MULTIPLY"
1798 "smultb%?\\t%0, %1, %2"
1799 [(set_attr "insn" "smulxy")
1800 (set_attr "predicable" "yes")]
1801 )
1802
1803 (define_insn "*mulhisi3bt"
1804 [(set (match_operand:SI 0 "s_register_operand" "=r")
1805 (mult:SI (sign_extend:SI
1806 (match_operand:HI 1 "s_register_operand" "r"))
1807 (ashiftrt:SI
1808 (match_operand:SI 2 "s_register_operand" "r")
1809 (const_int 16))))]
1810 "TARGET_DSP_MULTIPLY"
1811 "smulbt%?\\t%0, %1, %2"
1812 [(set_attr "insn" "smulxy")
1813 (set_attr "predicable" "yes")]
1814 )
1815
1816 (define_insn "*mulhisi3tt"
1817 [(set (match_operand:SI 0 "s_register_operand" "=r")
1818 (mult:SI (ashiftrt:SI
1819 (match_operand:SI 1 "s_register_operand" "r")
1820 (const_int 16))
1821 (ashiftrt:SI
1822 (match_operand:SI 2 "s_register_operand" "r")
1823 (const_int 16))))]
1824 "TARGET_DSP_MULTIPLY"
1825 "smultt%?\\t%0, %1, %2"
1826 [(set_attr "insn" "smulxy")
1827 (set_attr "predicable" "yes")]
1828 )
1829
1830 (define_insn "maddhisi4"
1831 [(set (match_operand:SI 0 "s_register_operand" "=r")
1832 (plus:SI (mult:SI (sign_extend:SI
1833 (match_operand:HI 1 "s_register_operand" "r"))
1834 (sign_extend:SI
1835 (match_operand:HI 2 "s_register_operand" "r")))
1836 (match_operand:SI 3 "s_register_operand" "r")))]
1837 "TARGET_DSP_MULTIPLY"
1838 "smlabb%?\\t%0, %1, %2, %3"
1839 [(set_attr "insn" "smlaxy")
1840 (set_attr "predicable" "yes")]
1841 )
1842
1843 ;; Note: there is no maddhisi4ibt because this one is canonical form
1844 (define_insn "*maddhisi4tb"
1845 [(set (match_operand:SI 0 "s_register_operand" "=r")
1846 (plus:SI (mult:SI (ashiftrt:SI
1847 (match_operand:SI 1 "s_register_operand" "r")
1848 (const_int 16))
1849 (sign_extend:SI
1850 (match_operand:HI 2 "s_register_operand" "r")))
1851 (match_operand:SI 3 "s_register_operand" "r")))]
1852 "TARGET_DSP_MULTIPLY"
1853 "smlatb%?\\t%0, %1, %2, %3"
1854 [(set_attr "insn" "smlaxy")
1855 (set_attr "predicable" "yes")]
1856 )
1857
1858 (define_insn "*maddhisi4tt"
1859 [(set (match_operand:SI 0 "s_register_operand" "=r")
1860 (plus:SI (mult:SI (ashiftrt:SI
1861 (match_operand:SI 1 "s_register_operand" "r")
1862 (const_int 16))
1863 (ashiftrt:SI
1864 (match_operand:SI 2 "s_register_operand" "r")
1865 (const_int 16)))
1866 (match_operand:SI 3 "s_register_operand" "r")))]
1867 "TARGET_DSP_MULTIPLY"
1868 "smlatt%?\\t%0, %1, %2, %3"
1869 [(set_attr "insn" "smlaxy")
1870 (set_attr "predicable" "yes")]
1871 )
1872
1873 (define_insn "maddhidi4"
1874 [(set (match_operand:DI 0 "s_register_operand" "=r")
1875 (plus:DI
1876 (mult:DI (sign_extend:DI
1877 (match_operand:HI 1 "s_register_operand" "r"))
1878 (sign_extend:DI
1879 (match_operand:HI 2 "s_register_operand" "r")))
1880 (match_operand:DI 3 "s_register_operand" "0")))]
1881 "TARGET_DSP_MULTIPLY"
1882 "smlalbb%?\\t%Q0, %R0, %1, %2"
1883 [(set_attr "insn" "smlalxy")
1884 (set_attr "predicable" "yes")])
1885
1886 ;; Note: there is no maddhidi4ibt because this one is canonical form
1887 (define_insn "*maddhidi4tb"
1888 [(set (match_operand:DI 0 "s_register_operand" "=r")
1889 (plus:DI
1890 (mult:DI (sign_extend:DI
1891 (ashiftrt:SI
1892 (match_operand:SI 1 "s_register_operand" "r")
1893 (const_int 16)))
1894 (sign_extend:DI
1895 (match_operand:HI 2 "s_register_operand" "r")))
1896 (match_operand:DI 3 "s_register_operand" "0")))]
1897 "TARGET_DSP_MULTIPLY"
1898 "smlaltb%?\\t%Q0, %R0, %1, %2"
1899 [(set_attr "insn" "smlalxy")
1900 (set_attr "predicable" "yes")])
1901
1902 (define_insn "*maddhidi4tt"
1903 [(set (match_operand:DI 0 "s_register_operand" "=r")
1904 (plus:DI
1905 (mult:DI (sign_extend:DI
1906 (ashiftrt:SI
1907 (match_operand:SI 1 "s_register_operand" "r")
1908 (const_int 16)))
1909 (sign_extend:DI
1910 (ashiftrt:SI
1911 (match_operand:SI 2 "s_register_operand" "r")
1912 (const_int 16))))
1913 (match_operand:DI 3 "s_register_operand" "0")))]
1914 "TARGET_DSP_MULTIPLY"
1915 "smlaltt%?\\t%Q0, %R0, %1, %2"
1916 [(set_attr "insn" "smlalxy")
1917 (set_attr "predicable" "yes")])
1918
1919 (define_expand "mulsf3"
1920 [(set (match_operand:SF 0 "s_register_operand" "")
1921 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1922 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1923 "TARGET_32BIT && TARGET_HARD_FLOAT"
1924 "
1925 if (TARGET_MAVERICK
1926 && !cirrus_fp_register (operands[2], SFmode))
1927 operands[2] = force_reg (SFmode, operands[2]);
1928 ")
1929
1930 (define_expand "muldf3"
1931 [(set (match_operand:DF 0 "s_register_operand" "")
1932 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1933 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1934 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1935 "
1936 if (TARGET_MAVERICK
1937 && !cirrus_fp_register (operands[2], DFmode))
1938 operands[2] = force_reg (DFmode, operands[2]);
1939 ")
1940 \f
1941 ;; Division insns
1942
1943 (define_expand "divsf3"
1944 [(set (match_operand:SF 0 "s_register_operand" "")
1945 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1946 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1947 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1948 "")
1949
1950 (define_expand "divdf3"
1951 [(set (match_operand:DF 0 "s_register_operand" "")
1952 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1953 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1954 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1955 "")
1956 \f
1957 ;; Modulo insns
1958
1959 (define_expand "modsf3"
1960 [(set (match_operand:SF 0 "s_register_operand" "")
1961 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1962 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1963 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1964 "")
1965
1966 (define_expand "moddf3"
1967 [(set (match_operand:DF 0 "s_register_operand" "")
1968 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1969 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1970 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1971 "")
1972 \f
1973 ;; Boolean and,ior,xor insns
1974
1975 ;; Split up double word logical operations
1976
1977 ;; Split up simple DImode logical operations. Simply perform the logical
1978 ;; operation on the upper and lower halves of the registers.
1979 (define_split
1980 [(set (match_operand:DI 0 "s_register_operand" "")
1981 (match_operator:DI 6 "logical_binary_operator"
1982 [(match_operand:DI 1 "s_register_operand" "")
1983 (match_operand:DI 2 "s_register_operand" "")]))]
1984 "TARGET_32BIT && reload_completed
1985 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1986 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1987 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1988 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1989 "
1990 {
1991 operands[3] = gen_highpart (SImode, operands[0]);
1992 operands[0] = gen_lowpart (SImode, operands[0]);
1993 operands[4] = gen_highpart (SImode, operands[1]);
1994 operands[1] = gen_lowpart (SImode, operands[1]);
1995 operands[5] = gen_highpart (SImode, operands[2]);
1996 operands[2] = gen_lowpart (SImode, operands[2]);
1997 }"
1998 )
1999
2000 (define_split
2001 [(set (match_operand:DI 0 "s_register_operand" "")
2002 (match_operator:DI 6 "logical_binary_operator"
2003 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2004 (match_operand:DI 1 "s_register_operand" "")]))]
2005 "TARGET_32BIT && reload_completed"
2006 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2007 (set (match_dup 3) (match_op_dup:SI 6
2008 [(ashiftrt:SI (match_dup 2) (const_int 31))
2009 (match_dup 4)]))]
2010 "
2011 {
2012 operands[3] = gen_highpart (SImode, operands[0]);
2013 operands[0] = gen_lowpart (SImode, operands[0]);
2014 operands[4] = gen_highpart (SImode, operands[1]);
2015 operands[1] = gen_lowpart (SImode, operands[1]);
2016 operands[5] = gen_highpart (SImode, operands[2]);
2017 operands[2] = gen_lowpart (SImode, operands[2]);
2018 }"
2019 )
2020
2021 ;; The zero extend of operand 2 means we can just copy the high part of
2022 ;; operand1 into operand0.
2023 (define_split
2024 [(set (match_operand:DI 0 "s_register_operand" "")
2025 (ior:DI
2026 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2027 (match_operand:DI 1 "s_register_operand" "")))]
2028 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2029 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2030 (set (match_dup 3) (match_dup 4))]
2031 "
2032 {
2033 operands[4] = gen_highpart (SImode, operands[1]);
2034 operands[3] = gen_highpart (SImode, operands[0]);
2035 operands[0] = gen_lowpart (SImode, operands[0]);
2036 operands[1] = gen_lowpart (SImode, operands[1]);
2037 }"
2038 )
2039
2040 ;; The zero extend of operand 2 means we can just copy the high part of
2041 ;; operand1 into operand0.
2042 (define_split
2043 [(set (match_operand:DI 0 "s_register_operand" "")
2044 (xor:DI
2045 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2046 (match_operand:DI 1 "s_register_operand" "")))]
2047 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2048 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2049 (set (match_dup 3) (match_dup 4))]
2050 "
2051 {
2052 operands[4] = gen_highpart (SImode, operands[1]);
2053 operands[3] = gen_highpart (SImode, operands[0]);
2054 operands[0] = gen_lowpart (SImode, operands[0]);
2055 operands[1] = gen_lowpart (SImode, operands[1]);
2056 }"
2057 )
2058
2059 (define_expand "anddi3"
2060 [(set (match_operand:DI 0 "s_register_operand" "")
2061 (and:DI (match_operand:DI 1 "s_register_operand" "")
2062 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2063 "TARGET_32BIT"
2064 ""
2065 )
2066
2067 (define_insn "*anddi3_insn"
2068 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2069 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2070 (match_operand:DI 2 "s_register_operand" "r,r")))]
2071 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2072 "#"
2073 [(set_attr "length" "8")]
2074 )
2075
2076 (define_insn_and_split "*anddi_zesidi_di"
2077 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2078 (and:DI (zero_extend:DI
2079 (match_operand:SI 2 "s_register_operand" "r,r"))
2080 (match_operand:DI 1 "s_register_operand" "0,r")))]
2081 "TARGET_32BIT"
2082 "#"
2083 "TARGET_32BIT && reload_completed"
2084 ; The zero extend of operand 2 clears the high word of the output
2085 ; operand.
2086 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2087 (set (match_dup 3) (const_int 0))]
2088 "
2089 {
2090 operands[3] = gen_highpart (SImode, operands[0]);
2091 operands[0] = gen_lowpart (SImode, operands[0]);
2092 operands[1] = gen_lowpart (SImode, operands[1]);
2093 }"
2094 [(set_attr "length" "8")]
2095 )
2096
2097 (define_insn "*anddi_sesdi_di"
2098 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2099 (and:DI (sign_extend:DI
2100 (match_operand:SI 2 "s_register_operand" "r,r"))
2101 (match_operand:DI 1 "s_register_operand" "0,r")))]
2102 "TARGET_32BIT"
2103 "#"
2104 [(set_attr "length" "8")]
2105 )
2106
2107 (define_expand "andsi3"
2108 [(set (match_operand:SI 0 "s_register_operand" "")
2109 (and:SI (match_operand:SI 1 "s_register_operand" "")
2110 (match_operand:SI 2 "reg_or_int_operand" "")))]
2111 "TARGET_EITHER"
2112 "
2113 if (TARGET_32BIT)
2114 {
2115 if (GET_CODE (operands[2]) == CONST_INT)
2116 {
2117 if (INTVAL (operands[2]) == 255 && arm_arch6)
2118 {
2119 operands[1] = convert_to_mode (QImode, operands[1], 1);
2120 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2121 operands[1]));
2122 }
2123 else
2124 arm_split_constant (AND, SImode, NULL_RTX,
2125 INTVAL (operands[2]), operands[0],
2126 operands[1],
2127 optimize && can_create_pseudo_p ());
2128
2129 DONE;
2130 }
2131 }
2132 else /* TARGET_THUMB1 */
2133 {
2134 if (GET_CODE (operands[2]) != CONST_INT)
2135 {
2136 rtx tmp = force_reg (SImode, operands[2]);
2137 if (rtx_equal_p (operands[0], operands[1]))
2138 operands[2] = tmp;
2139 else
2140 {
2141 operands[2] = operands[1];
2142 operands[1] = tmp;
2143 }
2144 }
2145 else
2146 {
2147 int i;
2148
2149 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2150 {
2151 operands[2] = force_reg (SImode,
2152 GEN_INT (~INTVAL (operands[2])));
2153
2154 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2155
2156 DONE;
2157 }
2158
2159 for (i = 9; i <= 31; i++)
2160 {
2161 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2162 {
2163 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2164 const0_rtx));
2165 DONE;
2166 }
2167 else if ((((HOST_WIDE_INT) 1) << i) - 1
2168 == ~INTVAL (operands[2]))
2169 {
2170 rtx shift = GEN_INT (i);
2171 rtx reg = gen_reg_rtx (SImode);
2172
2173 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2174 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2175
2176 DONE;
2177 }
2178 }
2179
2180 operands[2] = force_reg (SImode, operands[2]);
2181 }
2182 }
2183 "
2184 )
2185
2186 ; ??? Check split length for Thumb-2
2187 (define_insn_and_split "*arm_andsi3_insn"
2188 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2189 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2190 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2191 "TARGET_32BIT"
2192 "@
2193 and%?\\t%0, %1, %2
2194 bic%?\\t%0, %1, #%B2
2195 #"
2196 "TARGET_32BIT
2197 && GET_CODE (operands[2]) == CONST_INT
2198 && !(const_ok_for_arm (INTVAL (operands[2]))
2199 || const_ok_for_arm (~INTVAL (operands[2])))"
2200 [(clobber (const_int 0))]
2201 "
2202 arm_split_constant (AND, SImode, curr_insn,
2203 INTVAL (operands[2]), operands[0], operands[1], 0);
2204 DONE;
2205 "
2206 [(set_attr "length" "4,4,16")
2207 (set_attr "predicable" "yes")]
2208 )
2209
2210 (define_insn "*thumb1_andsi3_insn"
2211 [(set (match_operand:SI 0 "register_operand" "=l")
2212 (and:SI (match_operand:SI 1 "register_operand" "%0")
2213 (match_operand:SI 2 "register_operand" "l")))]
2214 "TARGET_THUMB1"
2215 "and\\t%0, %2"
2216 [(set_attr "length" "2")
2217 (set_attr "conds" "set")])
2218
2219 (define_insn "*andsi3_compare0"
2220 [(set (reg:CC_NOOV CC_REGNUM)
2221 (compare:CC_NOOV
2222 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2223 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2224 (const_int 0)))
2225 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2226 (and:SI (match_dup 1) (match_dup 2)))]
2227 "TARGET_32BIT"
2228 "@
2229 and%.\\t%0, %1, %2
2230 bic%.\\t%0, %1, #%B2"
2231 [(set_attr "conds" "set")]
2232 )
2233
2234 (define_insn "*andsi3_compare0_scratch"
2235 [(set (reg:CC_NOOV CC_REGNUM)
2236 (compare:CC_NOOV
2237 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2238 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2239 (const_int 0)))
2240 (clobber (match_scratch:SI 2 "=X,r"))]
2241 "TARGET_32BIT"
2242 "@
2243 tst%?\\t%0, %1
2244 bic%.\\t%2, %0, #%B1"
2245 [(set_attr "conds" "set")]
2246 )
2247
2248 (define_insn "*zeroextractsi_compare0_scratch"
2249 [(set (reg:CC_NOOV CC_REGNUM)
2250 (compare:CC_NOOV (zero_extract:SI
2251 (match_operand:SI 0 "s_register_operand" "r")
2252 (match_operand 1 "const_int_operand" "n")
2253 (match_operand 2 "const_int_operand" "n"))
2254 (const_int 0)))]
2255 "TARGET_32BIT
2256 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2257 && INTVAL (operands[1]) > 0
2258 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2259 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2260 "*
2261 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2262 << INTVAL (operands[2]));
2263 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2264 return \"\";
2265 "
2266 [(set_attr "conds" "set")]
2267 )
2268
2269 (define_insn_and_split "*ne_zeroextractsi"
2270 [(set (match_operand:SI 0 "s_register_operand" "=r")
2271 (ne:SI (zero_extract:SI
2272 (match_operand:SI 1 "s_register_operand" "r")
2273 (match_operand:SI 2 "const_int_operand" "n")
2274 (match_operand:SI 3 "const_int_operand" "n"))
2275 (const_int 0)))
2276 (clobber (reg:CC CC_REGNUM))]
2277 "TARGET_32BIT
2278 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2279 && INTVAL (operands[2]) > 0
2280 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2281 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2282 "#"
2283 "TARGET_32BIT
2284 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2285 && INTVAL (operands[2]) > 0
2286 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2287 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2288 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2289 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2290 (const_int 0)))
2291 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2292 (set (match_dup 0)
2293 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2294 (match_dup 0) (const_int 1)))]
2295 "
2296 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2297 << INTVAL (operands[3]));
2298 "
2299 [(set_attr "conds" "clob")
2300 (set (attr "length")
2301 (if_then_else (eq_attr "is_thumb" "yes")
2302 (const_int 12)
2303 (const_int 8)))]
2304 )
2305
2306 (define_insn_and_split "*ne_zeroextractsi_shifted"
2307 [(set (match_operand:SI 0 "s_register_operand" "=r")
2308 (ne:SI (zero_extract:SI
2309 (match_operand:SI 1 "s_register_operand" "r")
2310 (match_operand:SI 2 "const_int_operand" "n")
2311 (const_int 0))
2312 (const_int 0)))
2313 (clobber (reg:CC CC_REGNUM))]
2314 "TARGET_ARM"
2315 "#"
2316 "TARGET_ARM"
2317 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2318 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2319 (const_int 0)))
2320 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2321 (set (match_dup 0)
2322 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2323 (match_dup 0) (const_int 1)))]
2324 "
2325 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2326 "
2327 [(set_attr "conds" "clob")
2328 (set_attr "length" "8")]
2329 )
2330
2331 (define_insn_and_split "*ite_ne_zeroextractsi"
2332 [(set (match_operand:SI 0 "s_register_operand" "=r")
2333 (if_then_else:SI (ne (zero_extract:SI
2334 (match_operand:SI 1 "s_register_operand" "r")
2335 (match_operand:SI 2 "const_int_operand" "n")
2336 (match_operand:SI 3 "const_int_operand" "n"))
2337 (const_int 0))
2338 (match_operand:SI 4 "arm_not_operand" "rIK")
2339 (const_int 0)))
2340 (clobber (reg:CC CC_REGNUM))]
2341 "TARGET_ARM
2342 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2343 && INTVAL (operands[2]) > 0
2344 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2345 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2346 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2347 "#"
2348 "TARGET_ARM
2349 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2350 && INTVAL (operands[2]) > 0
2351 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2352 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2353 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2354 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2355 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2356 (const_int 0)))
2357 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2358 (set (match_dup 0)
2359 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2360 (match_dup 0) (match_dup 4)))]
2361 "
2362 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2363 << INTVAL (operands[3]));
2364 "
2365 [(set_attr "conds" "clob")
2366 (set_attr "length" "8")]
2367 )
2368
2369 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2370 [(set (match_operand:SI 0 "s_register_operand" "=r")
2371 (if_then_else:SI (ne (zero_extract:SI
2372 (match_operand:SI 1 "s_register_operand" "r")
2373 (match_operand:SI 2 "const_int_operand" "n")
2374 (const_int 0))
2375 (const_int 0))
2376 (match_operand:SI 3 "arm_not_operand" "rIK")
2377 (const_int 0)))
2378 (clobber (reg:CC CC_REGNUM))]
2379 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2380 "#"
2381 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2382 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2383 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2384 (const_int 0)))
2385 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2386 (set (match_dup 0)
2387 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2388 (match_dup 0) (match_dup 3)))]
2389 "
2390 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2391 "
2392 [(set_attr "conds" "clob")
2393 (set_attr "length" "8")]
2394 )
2395
2396 (define_split
2397 [(set (match_operand:SI 0 "s_register_operand" "")
2398 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2399 (match_operand:SI 2 "const_int_operand" "")
2400 (match_operand:SI 3 "const_int_operand" "")))
2401 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2402 "TARGET_THUMB1"
2403 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2404 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2405 "{
2406 HOST_WIDE_INT temp = INTVAL (operands[2]);
2407
2408 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2409 operands[3] = GEN_INT (32 - temp);
2410 }"
2411 )
2412
2413 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2414 (define_split
2415 [(set (match_operand:SI 0 "s_register_operand" "")
2416 (match_operator:SI 1 "shiftable_operator"
2417 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2418 (match_operand:SI 3 "const_int_operand" "")
2419 (match_operand:SI 4 "const_int_operand" ""))
2420 (match_operand:SI 5 "s_register_operand" "")]))
2421 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2422 "TARGET_ARM"
2423 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2424 (set (match_dup 0)
2425 (match_op_dup 1
2426 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2427 (match_dup 5)]))]
2428 "{
2429 HOST_WIDE_INT temp = INTVAL (operands[3]);
2430
2431 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2432 operands[4] = GEN_INT (32 - temp);
2433 }"
2434 )
2435
2436 (define_split
2437 [(set (match_operand:SI 0 "s_register_operand" "")
2438 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2439 (match_operand:SI 2 "const_int_operand" "")
2440 (match_operand:SI 3 "const_int_operand" "")))]
2441 "TARGET_THUMB1"
2442 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2443 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2444 "{
2445 HOST_WIDE_INT temp = INTVAL (operands[2]);
2446
2447 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2448 operands[3] = GEN_INT (32 - temp);
2449 }"
2450 )
2451
2452 (define_split
2453 [(set (match_operand:SI 0 "s_register_operand" "")
2454 (match_operator:SI 1 "shiftable_operator"
2455 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2456 (match_operand:SI 3 "const_int_operand" "")
2457 (match_operand:SI 4 "const_int_operand" ""))
2458 (match_operand:SI 5 "s_register_operand" "")]))
2459 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2460 "TARGET_ARM"
2461 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2462 (set (match_dup 0)
2463 (match_op_dup 1
2464 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2465 (match_dup 5)]))]
2466 "{
2467 HOST_WIDE_INT temp = INTVAL (operands[3]);
2468
2469 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2470 operands[4] = GEN_INT (32 - temp);
2471 }"
2472 )
2473
2474 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2475 ;;; represented by the bitfield, then this will produce incorrect results.
2476 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2477 ;;; which have a real bit-field insert instruction, the truncation happens
2478 ;;; in the bit-field insert instruction itself. Since arm does not have a
2479 ;;; bit-field insert instruction, we would have to emit code here to truncate
2480 ;;; the value before we insert. This loses some of the advantage of having
2481 ;;; this insv pattern, so this pattern needs to be reevalutated.
2482
2483 (define_expand "insv"
2484 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2485 (match_operand:SI 1 "general_operand" "")
2486 (match_operand:SI 2 "general_operand" ""))
2487 (match_operand:SI 3 "reg_or_int_operand" ""))]
2488 "TARGET_ARM || arm_arch_thumb2"
2489 "
2490 {
2491 int start_bit = INTVAL (operands[2]);
2492 int width = INTVAL (operands[1]);
2493 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2494 rtx target, subtarget;
2495
2496 if (arm_arch_thumb2)
2497 {
2498 bool use_bfi = TRUE;
2499
2500 if (GET_CODE (operands[3]) == CONST_INT)
2501 {
2502 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2503
2504 if (val == 0)
2505 {
2506 emit_insn (gen_insv_zero (operands[0], operands[1],
2507 operands[2]));
2508 DONE;
2509 }
2510
2511 /* See if the set can be done with a single orr instruction. */
2512 if (val == mask && const_ok_for_arm (val << start_bit))
2513 use_bfi = FALSE;
2514 }
2515
2516 if (use_bfi)
2517 {
2518 if (GET_CODE (operands[3]) != REG)
2519 operands[3] = force_reg (SImode, operands[3]);
2520
2521 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2522 operands[3]));
2523 DONE;
2524 }
2525 }
2526
2527 target = copy_rtx (operands[0]);
2528 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2529 subreg as the final target. */
2530 if (GET_CODE (target) == SUBREG)
2531 {
2532 subtarget = gen_reg_rtx (SImode);
2533 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2534 < GET_MODE_SIZE (SImode))
2535 target = SUBREG_REG (target);
2536 }
2537 else
2538 subtarget = target;
2539
2540 if (GET_CODE (operands[3]) == CONST_INT)
2541 {
2542 /* Since we are inserting a known constant, we may be able to
2543 reduce the number of bits that we have to clear so that
2544 the mask becomes simple. */
2545 /* ??? This code does not check to see if the new mask is actually
2546 simpler. It may not be. */
2547 rtx op1 = gen_reg_rtx (SImode);
2548 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2549 start of this pattern. */
2550 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2551 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2552
2553 emit_insn (gen_andsi3 (op1, operands[0],
2554 gen_int_mode (~mask2, SImode)));
2555 emit_insn (gen_iorsi3 (subtarget, op1,
2556 gen_int_mode (op3_value << start_bit, SImode)));
2557 }
2558 else if (start_bit == 0
2559 && !(const_ok_for_arm (mask)
2560 || const_ok_for_arm (~mask)))
2561 {
2562 /* A Trick, since we are setting the bottom bits in the word,
2563 we can shift operand[3] up, operand[0] down, OR them together
2564 and rotate the result back again. This takes 3 insns, and
2565 the third might be mergeable into another op. */
2566 /* The shift up copes with the possibility that operand[3] is
2567 wider than the bitfield. */
2568 rtx op0 = gen_reg_rtx (SImode);
2569 rtx op1 = gen_reg_rtx (SImode);
2570
2571 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2572 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2573 emit_insn (gen_iorsi3 (op1, op1, op0));
2574 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2575 }
2576 else if ((width + start_bit == 32)
2577 && !(const_ok_for_arm (mask)
2578 || const_ok_for_arm (~mask)))
2579 {
2580 /* Similar trick, but slightly less efficient. */
2581
2582 rtx op0 = gen_reg_rtx (SImode);
2583 rtx op1 = gen_reg_rtx (SImode);
2584
2585 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2586 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2587 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2588 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2589 }
2590 else
2591 {
2592 rtx op0 = gen_int_mode (mask, SImode);
2593 rtx op1 = gen_reg_rtx (SImode);
2594 rtx op2 = gen_reg_rtx (SImode);
2595
2596 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2597 {
2598 rtx tmp = gen_reg_rtx (SImode);
2599
2600 emit_insn (gen_movsi (tmp, op0));
2601 op0 = tmp;
2602 }
2603
2604 /* Mask out any bits in operand[3] that are not needed. */
2605 emit_insn (gen_andsi3 (op1, operands[3], op0));
2606
2607 if (GET_CODE (op0) == CONST_INT
2608 && (const_ok_for_arm (mask << start_bit)
2609 || const_ok_for_arm (~(mask << start_bit))))
2610 {
2611 op0 = gen_int_mode (~(mask << start_bit), SImode);
2612 emit_insn (gen_andsi3 (op2, operands[0], op0));
2613 }
2614 else
2615 {
2616 if (GET_CODE (op0) == CONST_INT)
2617 {
2618 rtx tmp = gen_reg_rtx (SImode);
2619
2620 emit_insn (gen_movsi (tmp, op0));
2621 op0 = tmp;
2622 }
2623
2624 if (start_bit != 0)
2625 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2626
2627 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2628 }
2629
2630 if (start_bit != 0)
2631 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2632
2633 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2634 }
2635
2636 if (subtarget != target)
2637 {
2638 /* If TARGET is still a SUBREG, then it must be wider than a word,
2639 so we must be careful only to set the subword we were asked to. */
2640 if (GET_CODE (target) == SUBREG)
2641 emit_move_insn (target, subtarget);
2642 else
2643 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2644 }
2645
2646 DONE;
2647 }"
2648 )
2649
2650 (define_insn "insv_zero"
2651 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2652 (match_operand:SI 1 "const_int_operand" "M")
2653 (match_operand:SI 2 "const_int_operand" "M"))
2654 (const_int 0))]
2655 "arm_arch_thumb2"
2656 "bfc%?\t%0, %2, %1"
2657 [(set_attr "length" "4")
2658 (set_attr "predicable" "yes")]
2659 )
2660
2661 (define_insn "insv_t2"
2662 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2663 (match_operand:SI 1 "const_int_operand" "M")
2664 (match_operand:SI 2 "const_int_operand" "M"))
2665 (match_operand:SI 3 "s_register_operand" "r"))]
2666 "arm_arch_thumb2"
2667 "bfi%?\t%0, %3, %2, %1"
2668 [(set_attr "length" "4")
2669 (set_attr "predicable" "yes")]
2670 )
2671
2672 ; constants for op 2 will never be given to these patterns.
2673 (define_insn_and_split "*anddi_notdi_di"
2674 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2675 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2676 (match_operand:DI 2 "s_register_operand" "r,0")))]
2677 "TARGET_32BIT"
2678 "#"
2679 "TARGET_32BIT && reload_completed
2680 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2681 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2682 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2683 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2684 "
2685 {
2686 operands[3] = gen_highpart (SImode, operands[0]);
2687 operands[0] = gen_lowpart (SImode, operands[0]);
2688 operands[4] = gen_highpart (SImode, operands[1]);
2689 operands[1] = gen_lowpart (SImode, operands[1]);
2690 operands[5] = gen_highpart (SImode, operands[2]);
2691 operands[2] = gen_lowpart (SImode, operands[2]);
2692 }"
2693 [(set_attr "length" "8")
2694 (set_attr "predicable" "yes")]
2695 )
2696
2697 (define_insn_and_split "*anddi_notzesidi_di"
2698 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2699 (and:DI (not:DI (zero_extend:DI
2700 (match_operand:SI 2 "s_register_operand" "r,r")))
2701 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2702 "TARGET_32BIT"
2703 "@
2704 bic%?\\t%Q0, %Q1, %2
2705 #"
2706 ; (not (zero_extend ...)) allows us to just copy the high word from
2707 ; operand1 to operand0.
2708 "TARGET_32BIT
2709 && reload_completed
2710 && operands[0] != operands[1]"
2711 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2712 (set (match_dup 3) (match_dup 4))]
2713 "
2714 {
2715 operands[3] = gen_highpart (SImode, operands[0]);
2716 operands[0] = gen_lowpart (SImode, operands[0]);
2717 operands[4] = gen_highpart (SImode, operands[1]);
2718 operands[1] = gen_lowpart (SImode, operands[1]);
2719 }"
2720 [(set_attr "length" "4,8")
2721 (set_attr "predicable" "yes")]
2722 )
2723
2724 (define_insn_and_split "*anddi_notsesidi_di"
2725 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2726 (and:DI (not:DI (sign_extend:DI
2727 (match_operand:SI 2 "s_register_operand" "r,r")))
2728 (match_operand:DI 1 "s_register_operand" "0,r")))]
2729 "TARGET_32BIT"
2730 "#"
2731 "TARGET_32BIT && reload_completed"
2732 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2733 (set (match_dup 3) (and:SI (not:SI
2734 (ashiftrt:SI (match_dup 2) (const_int 31)))
2735 (match_dup 4)))]
2736 "
2737 {
2738 operands[3] = gen_highpart (SImode, operands[0]);
2739 operands[0] = gen_lowpart (SImode, operands[0]);
2740 operands[4] = gen_highpart (SImode, operands[1]);
2741 operands[1] = gen_lowpart (SImode, operands[1]);
2742 }"
2743 [(set_attr "length" "8")
2744 (set_attr "predicable" "yes")]
2745 )
2746
2747 (define_insn "andsi_notsi_si"
2748 [(set (match_operand:SI 0 "s_register_operand" "=r")
2749 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2750 (match_operand:SI 1 "s_register_operand" "r")))]
2751 "TARGET_32BIT"
2752 "bic%?\\t%0, %1, %2"
2753 [(set_attr "predicable" "yes")]
2754 )
2755
2756 (define_insn "thumb1_bicsi3"
2757 [(set (match_operand:SI 0 "register_operand" "=l")
2758 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2759 (match_operand:SI 2 "register_operand" "0")))]
2760 "TARGET_THUMB1"
2761 "bic\\t%0, %1"
2762 [(set_attr "length" "2")
2763 (set_attr "conds" "set")])
2764
2765 (define_insn "andsi_not_shiftsi_si"
2766 [(set (match_operand:SI 0 "s_register_operand" "=r")
2767 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2768 [(match_operand:SI 2 "s_register_operand" "r")
2769 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2770 (match_operand:SI 1 "s_register_operand" "r")))]
2771 "TARGET_ARM"
2772 "bic%?\\t%0, %1, %2%S4"
2773 [(set_attr "predicable" "yes")
2774 (set_attr "shift" "2")
2775 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2776 (const_string "alu_shift")
2777 (const_string "alu_shift_reg")))]
2778 )
2779
2780 (define_insn "*andsi_notsi_si_compare0"
2781 [(set (reg:CC_NOOV CC_REGNUM)
2782 (compare:CC_NOOV
2783 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2784 (match_operand:SI 1 "s_register_operand" "r"))
2785 (const_int 0)))
2786 (set (match_operand:SI 0 "s_register_operand" "=r")
2787 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2788 "TARGET_32BIT"
2789 "bic%.\\t%0, %1, %2"
2790 [(set_attr "conds" "set")]
2791 )
2792
2793 (define_insn "*andsi_notsi_si_compare0_scratch"
2794 [(set (reg:CC_NOOV CC_REGNUM)
2795 (compare:CC_NOOV
2796 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2797 (match_operand:SI 1 "s_register_operand" "r"))
2798 (const_int 0)))
2799 (clobber (match_scratch:SI 0 "=r"))]
2800 "TARGET_32BIT"
2801 "bic%.\\t%0, %1, %2"
2802 [(set_attr "conds" "set")]
2803 )
2804
2805 (define_expand "iordi3"
2806 [(set (match_operand:DI 0 "s_register_operand" "")
2807 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2808 (match_operand:DI 2 "neon_logic_op2" "")))]
2809 "TARGET_32BIT"
2810 ""
2811 )
2812
2813 (define_insn "*iordi3_insn"
2814 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2815 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2816 (match_operand:DI 2 "s_register_operand" "r,r")))]
2817 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2818 "#"
2819 [(set_attr "length" "8")
2820 (set_attr "predicable" "yes")]
2821 )
2822
2823 (define_insn "*iordi_zesidi_di"
2824 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2825 (ior:DI (zero_extend:DI
2826 (match_operand:SI 2 "s_register_operand" "r,r"))
2827 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2828 "TARGET_32BIT"
2829 "@
2830 orr%?\\t%Q0, %Q1, %2
2831 #"
2832 [(set_attr "length" "4,8")
2833 (set_attr "predicable" "yes")]
2834 )
2835
2836 (define_insn "*iordi_sesidi_di"
2837 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2838 (ior:DI (sign_extend:DI
2839 (match_operand:SI 2 "s_register_operand" "r,r"))
2840 (match_operand:DI 1 "s_register_operand" "0,r")))]
2841 "TARGET_32BIT"
2842 "#"
2843 [(set_attr "length" "8")
2844 (set_attr "predicable" "yes")]
2845 )
2846
2847 (define_expand "iorsi3"
2848 [(set (match_operand:SI 0 "s_register_operand" "")
2849 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2850 (match_operand:SI 2 "reg_or_int_operand" "")))]
2851 "TARGET_EITHER"
2852 "
2853 if (GET_CODE (operands[2]) == CONST_INT)
2854 {
2855 if (TARGET_32BIT)
2856 {
2857 arm_split_constant (IOR, SImode, NULL_RTX,
2858 INTVAL (operands[2]), operands[0], operands[1],
2859 optimize && can_create_pseudo_p ());
2860 DONE;
2861 }
2862 else /* TARGET_THUMB1 */
2863 {
2864 rtx tmp = force_reg (SImode, operands[2]);
2865 if (rtx_equal_p (operands[0], operands[1]))
2866 operands[2] = tmp;
2867 else
2868 {
2869 operands[2] = operands[1];
2870 operands[1] = tmp;
2871 }
2872 }
2873 }
2874 "
2875 )
2876
2877 (define_insn_and_split "*iorsi3_insn"
2878 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2879 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2880 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2881 "TARGET_32BIT"
2882 "@
2883 orr%?\\t%0, %1, %2
2884 orn%?\\t%0, %1, #%B2
2885 #"
2886 "TARGET_32BIT
2887 && GET_CODE (operands[2]) == CONST_INT
2888 && !(const_ok_for_arm (INTVAL (operands[2]))
2889 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2890 [(clobber (const_int 0))]
2891 {
2892 arm_split_constant (IOR, SImode, curr_insn,
2893 INTVAL (operands[2]), operands[0], operands[1], 0);
2894 DONE;
2895 }
2896 [(set_attr "length" "4,4,16")
2897 (set_attr "arch" "32,t2,32")
2898 (set_attr "predicable" "yes")])
2899
2900 (define_insn "*thumb1_iorsi3_insn"
2901 [(set (match_operand:SI 0 "register_operand" "=l")
2902 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2903 (match_operand:SI 2 "register_operand" "l")))]
2904 "TARGET_THUMB1"
2905 "orr\\t%0, %2"
2906 [(set_attr "length" "2")
2907 (set_attr "conds" "set")])
2908
2909 (define_peephole2
2910 [(match_scratch:SI 3 "r")
2911 (set (match_operand:SI 0 "arm_general_register_operand" "")
2912 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2913 (match_operand:SI 2 "const_int_operand" "")))]
2914 "TARGET_ARM
2915 && !const_ok_for_arm (INTVAL (operands[2]))
2916 && const_ok_for_arm (~INTVAL (operands[2]))"
2917 [(set (match_dup 3) (match_dup 2))
2918 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2919 ""
2920 )
2921
2922 (define_insn "*iorsi3_compare0"
2923 [(set (reg:CC_NOOV CC_REGNUM)
2924 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2925 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2926 (const_int 0)))
2927 (set (match_operand:SI 0 "s_register_operand" "=r")
2928 (ior:SI (match_dup 1) (match_dup 2)))]
2929 "TARGET_32BIT"
2930 "orr%.\\t%0, %1, %2"
2931 [(set_attr "conds" "set")]
2932 )
2933
2934 (define_insn "*iorsi3_compare0_scratch"
2935 [(set (reg:CC_NOOV CC_REGNUM)
2936 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2937 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2938 (const_int 0)))
2939 (clobber (match_scratch:SI 0 "=r"))]
2940 "TARGET_32BIT"
2941 "orr%.\\t%0, %1, %2"
2942 [(set_attr "conds" "set")]
2943 )
2944
2945 (define_expand "xordi3"
2946 [(set (match_operand:DI 0 "s_register_operand" "")
2947 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2948 (match_operand:DI 2 "s_register_operand" "")))]
2949 "TARGET_32BIT"
2950 ""
2951 )
2952
2953 (define_insn "*xordi3_insn"
2954 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2955 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2956 (match_operand:DI 2 "s_register_operand" "r,r")))]
2957 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2958 "#"
2959 [(set_attr "length" "8")
2960 (set_attr "predicable" "yes")]
2961 )
2962
2963 (define_insn "*xordi_zesidi_di"
2964 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2965 (xor:DI (zero_extend:DI
2966 (match_operand:SI 2 "s_register_operand" "r,r"))
2967 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2968 "TARGET_32BIT"
2969 "@
2970 eor%?\\t%Q0, %Q1, %2
2971 #"
2972 [(set_attr "length" "4,8")
2973 (set_attr "predicable" "yes")]
2974 )
2975
2976 (define_insn "*xordi_sesidi_di"
2977 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2978 (xor:DI (sign_extend:DI
2979 (match_operand:SI 2 "s_register_operand" "r,r"))
2980 (match_operand:DI 1 "s_register_operand" "0,r")))]
2981 "TARGET_32BIT"
2982 "#"
2983 [(set_attr "length" "8")
2984 (set_attr "predicable" "yes")]
2985 )
2986
2987 (define_expand "xorsi3"
2988 [(set (match_operand:SI 0 "s_register_operand" "")
2989 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2990 (match_operand:SI 2 "reg_or_int_operand" "")))]
2991 "TARGET_EITHER"
2992 "if (GET_CODE (operands[2]) == CONST_INT)
2993 {
2994 if (TARGET_32BIT)
2995 {
2996 arm_split_constant (XOR, SImode, NULL_RTX,
2997 INTVAL (operands[2]), operands[0], operands[1],
2998 optimize && can_create_pseudo_p ());
2999 DONE;
3000 }
3001 else /* TARGET_THUMB1 */
3002 {
3003 rtx tmp = force_reg (SImode, operands[2]);
3004 if (rtx_equal_p (operands[0], operands[1]))
3005 operands[2] = tmp;
3006 else
3007 {
3008 operands[2] = operands[1];
3009 operands[1] = tmp;
3010 }
3011 }
3012 }"
3013 )
3014
3015 (define_insn "*arm_xorsi3"
3016 [(set (match_operand:SI 0 "s_register_operand" "=r")
3017 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3018 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
3019 "TARGET_32BIT"
3020 "eor%?\\t%0, %1, %2"
3021 [(set_attr "predicable" "yes")]
3022 )
3023
3024 (define_insn "*thumb1_xorsi3_insn"
3025 [(set (match_operand:SI 0 "register_operand" "=l")
3026 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3027 (match_operand:SI 2 "register_operand" "l")))]
3028 "TARGET_THUMB1"
3029 "eor\\t%0, %2"
3030 [(set_attr "length" "2")
3031 (set_attr "conds" "set")])
3032
3033 (define_insn "*xorsi3_compare0"
3034 [(set (reg:CC_NOOV CC_REGNUM)
3035 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
3036 (match_operand:SI 2 "arm_rhs_operand" "rI"))
3037 (const_int 0)))
3038 (set (match_operand:SI 0 "s_register_operand" "=r")
3039 (xor:SI (match_dup 1) (match_dup 2)))]
3040 "TARGET_32BIT"
3041 "eor%.\\t%0, %1, %2"
3042 [(set_attr "conds" "set")]
3043 )
3044
3045 (define_insn "*xorsi3_compare0_scratch"
3046 [(set (reg:CC_NOOV CC_REGNUM)
3047 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
3048 (match_operand:SI 1 "arm_rhs_operand" "rI"))
3049 (const_int 0)))]
3050 "TARGET_32BIT"
3051 "teq%?\\t%0, %1"
3052 [(set_attr "conds" "set")]
3053 )
3054
3055 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3056 ; (NOT D) we can sometimes merge the final NOT into one of the following
3057 ; insns.
3058
3059 (define_split
3060 [(set (match_operand:SI 0 "s_register_operand" "")
3061 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3062 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3063 (match_operand:SI 3 "arm_rhs_operand" "")))
3064 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3065 "TARGET_32BIT"
3066 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3067 (not:SI (match_dup 3))))
3068 (set (match_dup 0) (not:SI (match_dup 4)))]
3069 ""
3070 )
3071
3072 (define_insn "*andsi_iorsi3_notsi"
3073 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3074 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3075 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3076 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3077 "TARGET_32BIT"
3078 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3079 [(set_attr "length" "8")
3080 (set_attr "ce_count" "2")
3081 (set_attr "predicable" "yes")]
3082 )
3083
3084 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3085 ; insns are available?
3086 (define_split
3087 [(set (match_operand:SI 0 "s_register_operand" "")
3088 (match_operator:SI 1 "logical_binary_operator"
3089 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3090 (match_operand:SI 3 "const_int_operand" "")
3091 (match_operand:SI 4 "const_int_operand" ""))
3092 (match_operator:SI 9 "logical_binary_operator"
3093 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3094 (match_operand:SI 6 "const_int_operand" ""))
3095 (match_operand:SI 7 "s_register_operand" "")])]))
3096 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3097 "TARGET_32BIT
3098 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3099 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3100 [(set (match_dup 8)
3101 (match_op_dup 1
3102 [(ashift:SI (match_dup 2) (match_dup 4))
3103 (match_dup 5)]))
3104 (set (match_dup 0)
3105 (match_op_dup 1
3106 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3107 (match_dup 7)]))]
3108 "
3109 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3110 ")
3111
3112 (define_split
3113 [(set (match_operand:SI 0 "s_register_operand" "")
3114 (match_operator:SI 1 "logical_binary_operator"
3115 [(match_operator:SI 9 "logical_binary_operator"
3116 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3117 (match_operand:SI 6 "const_int_operand" ""))
3118 (match_operand:SI 7 "s_register_operand" "")])
3119 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3120 (match_operand:SI 3 "const_int_operand" "")
3121 (match_operand:SI 4 "const_int_operand" ""))]))
3122 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3123 "TARGET_32BIT
3124 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3125 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3126 [(set (match_dup 8)
3127 (match_op_dup 1
3128 [(ashift:SI (match_dup 2) (match_dup 4))
3129 (match_dup 5)]))
3130 (set (match_dup 0)
3131 (match_op_dup 1
3132 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3133 (match_dup 7)]))]
3134 "
3135 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3136 ")
3137
3138 (define_split
3139 [(set (match_operand:SI 0 "s_register_operand" "")
3140 (match_operator:SI 1 "logical_binary_operator"
3141 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3142 (match_operand:SI 3 "const_int_operand" "")
3143 (match_operand:SI 4 "const_int_operand" ""))
3144 (match_operator:SI 9 "logical_binary_operator"
3145 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3146 (match_operand:SI 6 "const_int_operand" ""))
3147 (match_operand:SI 7 "s_register_operand" "")])]))
3148 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3149 "TARGET_32BIT
3150 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3151 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3152 [(set (match_dup 8)
3153 (match_op_dup 1
3154 [(ashift:SI (match_dup 2) (match_dup 4))
3155 (match_dup 5)]))
3156 (set (match_dup 0)
3157 (match_op_dup 1
3158 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3159 (match_dup 7)]))]
3160 "
3161 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3162 ")
3163
3164 (define_split
3165 [(set (match_operand:SI 0 "s_register_operand" "")
3166 (match_operator:SI 1 "logical_binary_operator"
3167 [(match_operator:SI 9 "logical_binary_operator"
3168 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3169 (match_operand:SI 6 "const_int_operand" ""))
3170 (match_operand:SI 7 "s_register_operand" "")])
3171 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3172 (match_operand:SI 3 "const_int_operand" "")
3173 (match_operand:SI 4 "const_int_operand" ""))]))
3174 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3175 "TARGET_32BIT
3176 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3177 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3178 [(set (match_dup 8)
3179 (match_op_dup 1
3180 [(ashift:SI (match_dup 2) (match_dup 4))
3181 (match_dup 5)]))
3182 (set (match_dup 0)
3183 (match_op_dup 1
3184 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3185 (match_dup 7)]))]
3186 "
3187 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3188 ")
3189 \f
3190
3191 ;; Minimum and maximum insns
3192
3193 (define_expand "smaxsi3"
3194 [(parallel [
3195 (set (match_operand:SI 0 "s_register_operand" "")
3196 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3197 (match_operand:SI 2 "arm_rhs_operand" "")))
3198 (clobber (reg:CC CC_REGNUM))])]
3199 "TARGET_32BIT"
3200 "
3201 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3202 {
3203 /* No need for a clobber of the condition code register here. */
3204 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3205 gen_rtx_SMAX (SImode, operands[1],
3206 operands[2])));
3207 DONE;
3208 }
3209 ")
3210
3211 (define_insn "*smax_0"
3212 [(set (match_operand:SI 0 "s_register_operand" "=r")
3213 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3214 (const_int 0)))]
3215 "TARGET_32BIT"
3216 "bic%?\\t%0, %1, %1, asr #31"
3217 [(set_attr "predicable" "yes")]
3218 )
3219
3220 (define_insn "*smax_m1"
3221 [(set (match_operand:SI 0 "s_register_operand" "=r")
3222 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3223 (const_int -1)))]
3224 "TARGET_32BIT"
3225 "orr%?\\t%0, %1, %1, asr #31"
3226 [(set_attr "predicable" "yes")]
3227 )
3228
3229 (define_insn "*arm_smax_insn"
3230 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3231 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3232 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3233 (clobber (reg:CC CC_REGNUM))]
3234 "TARGET_ARM"
3235 "@
3236 cmp\\t%1, %2\;movlt\\t%0, %2
3237 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3238 [(set_attr "conds" "clob")
3239 (set_attr "length" "8,12")]
3240 )
3241
3242 (define_expand "sminsi3"
3243 [(parallel [
3244 (set (match_operand:SI 0 "s_register_operand" "")
3245 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3246 (match_operand:SI 2 "arm_rhs_operand" "")))
3247 (clobber (reg:CC CC_REGNUM))])]
3248 "TARGET_32BIT"
3249 "
3250 if (operands[2] == const0_rtx)
3251 {
3252 /* No need for a clobber of the condition code register here. */
3253 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3254 gen_rtx_SMIN (SImode, operands[1],
3255 operands[2])));
3256 DONE;
3257 }
3258 ")
3259
3260 (define_insn "*smin_0"
3261 [(set (match_operand:SI 0 "s_register_operand" "=r")
3262 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3263 (const_int 0)))]
3264 "TARGET_32BIT"
3265 "and%?\\t%0, %1, %1, asr #31"
3266 [(set_attr "predicable" "yes")]
3267 )
3268
3269 (define_insn "*arm_smin_insn"
3270 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3271 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3272 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3273 (clobber (reg:CC CC_REGNUM))]
3274 "TARGET_ARM"
3275 "@
3276 cmp\\t%1, %2\;movge\\t%0, %2
3277 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3278 [(set_attr "conds" "clob")
3279 (set_attr "length" "8,12")]
3280 )
3281
3282 (define_expand "umaxsi3"
3283 [(parallel [
3284 (set (match_operand:SI 0 "s_register_operand" "")
3285 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3286 (match_operand:SI 2 "arm_rhs_operand" "")))
3287 (clobber (reg:CC CC_REGNUM))])]
3288 "TARGET_32BIT"
3289 ""
3290 )
3291
3292 (define_insn "*arm_umaxsi3"
3293 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3294 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3295 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3296 (clobber (reg:CC CC_REGNUM))]
3297 "TARGET_ARM"
3298 "@
3299 cmp\\t%1, %2\;movcc\\t%0, %2
3300 cmp\\t%1, %2\;movcs\\t%0, %1
3301 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3302 [(set_attr "conds" "clob")
3303 (set_attr "length" "8,8,12")]
3304 )
3305
3306 (define_expand "uminsi3"
3307 [(parallel [
3308 (set (match_operand:SI 0 "s_register_operand" "")
3309 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3310 (match_operand:SI 2 "arm_rhs_operand" "")))
3311 (clobber (reg:CC CC_REGNUM))])]
3312 "TARGET_32BIT"
3313 ""
3314 )
3315
3316 (define_insn "*arm_uminsi3"
3317 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3318 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3319 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3320 (clobber (reg:CC CC_REGNUM))]
3321 "TARGET_ARM"
3322 "@
3323 cmp\\t%1, %2\;movcs\\t%0, %2
3324 cmp\\t%1, %2\;movcc\\t%0, %1
3325 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3326 [(set_attr "conds" "clob")
3327 (set_attr "length" "8,8,12")]
3328 )
3329
3330 (define_insn "*store_minmaxsi"
3331 [(set (match_operand:SI 0 "memory_operand" "=m")
3332 (match_operator:SI 3 "minmax_operator"
3333 [(match_operand:SI 1 "s_register_operand" "r")
3334 (match_operand:SI 2 "s_register_operand" "r")]))
3335 (clobber (reg:CC CC_REGNUM))]
3336 "TARGET_32BIT"
3337 "*
3338 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3339 operands[1], operands[2]);
3340 output_asm_insn (\"cmp\\t%1, %2\", operands);
3341 if (TARGET_THUMB2)
3342 output_asm_insn (\"ite\t%d3\", operands);
3343 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3344 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3345 return \"\";
3346 "
3347 [(set_attr "conds" "clob")
3348 (set (attr "length")
3349 (if_then_else (eq_attr "is_thumb" "yes")
3350 (const_int 14)
3351 (const_int 12)))
3352 (set_attr "type" "store1")]
3353 )
3354
3355 ; Reject the frame pointer in operand[1], since reloading this after
3356 ; it has been eliminated can cause carnage.
3357 (define_insn "*minmax_arithsi"
3358 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3359 (match_operator:SI 4 "shiftable_operator"
3360 [(match_operator:SI 5 "minmax_operator"
3361 [(match_operand:SI 2 "s_register_operand" "r,r")
3362 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3363 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3364 (clobber (reg:CC CC_REGNUM))]
3365 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3366 "*
3367 {
3368 enum rtx_code code = GET_CODE (operands[4]);
3369 bool need_else;
3370
3371 if (which_alternative != 0 || operands[3] != const0_rtx
3372 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3373 need_else = true;
3374 else
3375 need_else = false;
3376
3377 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3378 operands[2], operands[3]);
3379 output_asm_insn (\"cmp\\t%2, %3\", operands);
3380 if (TARGET_THUMB2)
3381 {
3382 if (need_else)
3383 output_asm_insn (\"ite\\t%d5\", operands);
3384 else
3385 output_asm_insn (\"it\\t%d5\", operands);
3386 }
3387 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3388 if (need_else)
3389 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3390 return \"\";
3391 }"
3392 [(set_attr "conds" "clob")
3393 (set (attr "length")
3394 (if_then_else (eq_attr "is_thumb" "yes")
3395 (const_int 14)
3396 (const_int 12)))]
3397 )
3398
3399 \f
3400 ;; Shift and rotation insns
3401
3402 (define_expand "ashldi3"
3403 [(set (match_operand:DI 0 "s_register_operand" "")
3404 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3405 (match_operand:SI 2 "reg_or_int_operand" "")))]
3406 "TARGET_32BIT"
3407 "
3408 if (GET_CODE (operands[2]) == CONST_INT)
3409 {
3410 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3411 {
3412 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3413 DONE;
3414 }
3415 /* Ideally we shouldn't fail here if we could know that operands[1]
3416 ends up already living in an iwmmxt register. Otherwise it's
3417 cheaper to have the alternate code being generated than moving
3418 values to iwmmxt regs and back. */
3419 FAIL;
3420 }
3421 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3422 FAIL;
3423 "
3424 )
3425
3426 (define_insn "arm_ashldi3_1bit"
3427 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3428 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3429 (const_int 1)))
3430 (clobber (reg:CC CC_REGNUM))]
3431 "TARGET_32BIT"
3432 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3433 [(set_attr "conds" "clob")
3434 (set_attr "length" "8")]
3435 )
3436
3437 (define_expand "ashlsi3"
3438 [(set (match_operand:SI 0 "s_register_operand" "")
3439 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3440 (match_operand:SI 2 "arm_rhs_operand" "")))]
3441 "TARGET_EITHER"
3442 "
3443 if (GET_CODE (operands[2]) == CONST_INT
3444 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3445 {
3446 emit_insn (gen_movsi (operands[0], const0_rtx));
3447 DONE;
3448 }
3449 "
3450 )
3451
3452 (define_insn "*thumb1_ashlsi3"
3453 [(set (match_operand:SI 0 "register_operand" "=l,l")
3454 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3455 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3456 "TARGET_THUMB1"
3457 "lsl\\t%0, %1, %2"
3458 [(set_attr "length" "2")
3459 (set_attr "conds" "set")])
3460
3461 (define_expand "ashrdi3"
3462 [(set (match_operand:DI 0 "s_register_operand" "")
3463 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3464 (match_operand:SI 2 "reg_or_int_operand" "")))]
3465 "TARGET_32BIT"
3466 "
3467 if (GET_CODE (operands[2]) == CONST_INT)
3468 {
3469 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3470 {
3471 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3472 DONE;
3473 }
3474 /* Ideally we shouldn't fail here if we could know that operands[1]
3475 ends up already living in an iwmmxt register. Otherwise it's
3476 cheaper to have the alternate code being generated than moving
3477 values to iwmmxt regs and back. */
3478 FAIL;
3479 }
3480 else if (!TARGET_REALLY_IWMMXT)
3481 FAIL;
3482 "
3483 )
3484
3485 (define_insn "arm_ashrdi3_1bit"
3486 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3487 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3488 (const_int 1)))
3489 (clobber (reg:CC CC_REGNUM))]
3490 "TARGET_32BIT"
3491 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3492 [(set_attr "conds" "clob")
3493 (set_attr "insn" "mov")
3494 (set_attr "length" "8")]
3495 )
3496
3497 (define_expand "ashrsi3"
3498 [(set (match_operand:SI 0 "s_register_operand" "")
3499 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3500 (match_operand:SI 2 "arm_rhs_operand" "")))]
3501 "TARGET_EITHER"
3502 "
3503 if (GET_CODE (operands[2]) == CONST_INT
3504 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3505 operands[2] = GEN_INT (31);
3506 "
3507 )
3508
3509 (define_insn "*thumb1_ashrsi3"
3510 [(set (match_operand:SI 0 "register_operand" "=l,l")
3511 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3512 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3513 "TARGET_THUMB1"
3514 "asr\\t%0, %1, %2"
3515 [(set_attr "length" "2")
3516 (set_attr "conds" "set")])
3517
3518 (define_expand "lshrdi3"
3519 [(set (match_operand:DI 0 "s_register_operand" "")
3520 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3521 (match_operand:SI 2 "reg_or_int_operand" "")))]
3522 "TARGET_32BIT"
3523 "
3524 if (GET_CODE (operands[2]) == CONST_INT)
3525 {
3526 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3527 {
3528 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3529 DONE;
3530 }
3531 /* Ideally we shouldn't fail here if we could know that operands[1]
3532 ends up already living in an iwmmxt register. Otherwise it's
3533 cheaper to have the alternate code being generated than moving
3534 values to iwmmxt regs and back. */
3535 FAIL;
3536 }
3537 else if (!TARGET_REALLY_IWMMXT)
3538 FAIL;
3539 "
3540 )
3541
3542 (define_insn "arm_lshrdi3_1bit"
3543 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3544 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3545 (const_int 1)))
3546 (clobber (reg:CC CC_REGNUM))]
3547 "TARGET_32BIT"
3548 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3549 [(set_attr "conds" "clob")
3550 (set_attr "insn" "mov")
3551 (set_attr "length" "8")]
3552 )
3553
3554 (define_expand "lshrsi3"
3555 [(set (match_operand:SI 0 "s_register_operand" "")
3556 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3557 (match_operand:SI 2 "arm_rhs_operand" "")))]
3558 "TARGET_EITHER"
3559 "
3560 if (GET_CODE (operands[2]) == CONST_INT
3561 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3562 {
3563 emit_insn (gen_movsi (operands[0], const0_rtx));
3564 DONE;
3565 }
3566 "
3567 )
3568
3569 (define_insn "*thumb1_lshrsi3"
3570 [(set (match_operand:SI 0 "register_operand" "=l,l")
3571 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3572 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3573 "TARGET_THUMB1"
3574 "lsr\\t%0, %1, %2"
3575 [(set_attr "length" "2")
3576 (set_attr "conds" "set")])
3577
3578 (define_expand "rotlsi3"
3579 [(set (match_operand:SI 0 "s_register_operand" "")
3580 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3581 (match_operand:SI 2 "reg_or_int_operand" "")))]
3582 "TARGET_32BIT"
3583 "
3584 if (GET_CODE (operands[2]) == CONST_INT)
3585 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3586 else
3587 {
3588 rtx reg = gen_reg_rtx (SImode);
3589 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3590 operands[2] = reg;
3591 }
3592 "
3593 )
3594
3595 (define_expand "rotrsi3"
3596 [(set (match_operand:SI 0 "s_register_operand" "")
3597 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3598 (match_operand:SI 2 "arm_rhs_operand" "")))]
3599 "TARGET_EITHER"
3600 "
3601 if (TARGET_32BIT)
3602 {
3603 if (GET_CODE (operands[2]) == CONST_INT
3604 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3605 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3606 }
3607 else /* TARGET_THUMB1 */
3608 {
3609 if (GET_CODE (operands [2]) == CONST_INT)
3610 operands [2] = force_reg (SImode, operands[2]);
3611 }
3612 "
3613 )
3614
3615 (define_insn "*thumb1_rotrsi3"
3616 [(set (match_operand:SI 0 "register_operand" "=l")
3617 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3618 (match_operand:SI 2 "register_operand" "l")))]
3619 "TARGET_THUMB1"
3620 "ror\\t%0, %0, %2"
3621 [(set_attr "length" "2")]
3622 )
3623
3624 (define_insn "*arm_shiftsi3"
3625 [(set (match_operand:SI 0 "s_register_operand" "=r")
3626 (match_operator:SI 3 "shift_operator"
3627 [(match_operand:SI 1 "s_register_operand" "r")
3628 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3629 "TARGET_32BIT"
3630 "* return arm_output_shift(operands, 0);"
3631 [(set_attr "predicable" "yes")
3632 (set_attr "shift" "1")
3633 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3634 (const_string "alu_shift")
3635 (const_string "alu_shift_reg")))]
3636 )
3637
3638 (define_insn "*shiftsi3_compare0"
3639 [(set (reg:CC_NOOV CC_REGNUM)
3640 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3641 [(match_operand:SI 1 "s_register_operand" "r")
3642 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3643 (const_int 0)))
3644 (set (match_operand:SI 0 "s_register_operand" "=r")
3645 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3646 "TARGET_32BIT"
3647 "* return arm_output_shift(operands, 1);"
3648 [(set_attr "conds" "set")
3649 (set_attr "shift" "1")
3650 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3651 (const_string "alu_shift")
3652 (const_string "alu_shift_reg")))]
3653 )
3654
3655 (define_insn "*shiftsi3_compare0_scratch"
3656 [(set (reg:CC_NOOV CC_REGNUM)
3657 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3658 [(match_operand:SI 1 "s_register_operand" "r")
3659 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3660 (const_int 0)))
3661 (clobber (match_scratch:SI 0 "=r"))]
3662 "TARGET_32BIT"
3663 "* return arm_output_shift(operands, 1);"
3664 [(set_attr "conds" "set")
3665 (set_attr "shift" "1")]
3666 )
3667
3668 (define_insn "*not_shiftsi"
3669 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3670 (not:SI (match_operator:SI 3 "shift_operator"
3671 [(match_operand:SI 1 "s_register_operand" "r,r")
3672 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3673 "TARGET_32BIT"
3674 "mvn%?\\t%0, %1%S3"
3675 [(set_attr "predicable" "yes")
3676 (set_attr "shift" "1")
3677 (set_attr "insn" "mvn")
3678 (set_attr "arch" "32,a")
3679 (set_attr "type" "alu_shift,alu_shift_reg")])
3680
3681 (define_insn "*not_shiftsi_compare0"
3682 [(set (reg:CC_NOOV CC_REGNUM)
3683 (compare:CC_NOOV
3684 (not:SI (match_operator:SI 3 "shift_operator"
3685 [(match_operand:SI 1 "s_register_operand" "r,r")
3686 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3687 (const_int 0)))
3688 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3689 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3690 "TARGET_32BIT"
3691 "mvn%.\\t%0, %1%S3"
3692 [(set_attr "conds" "set")
3693 (set_attr "shift" "1")
3694 (set_attr "insn" "mvn")
3695 (set_attr "arch" "32,a")
3696 (set_attr "type" "alu_shift,alu_shift_reg")])
3697
3698 (define_insn "*not_shiftsi_compare0_scratch"
3699 [(set (reg:CC_NOOV CC_REGNUM)
3700 (compare:CC_NOOV
3701 (not:SI (match_operator:SI 3 "shift_operator"
3702 [(match_operand:SI 1 "s_register_operand" "r,r")
3703 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3704 (const_int 0)))
3705 (clobber (match_scratch:SI 0 "=r,r"))]
3706 "TARGET_32BIT"
3707 "mvn%.\\t%0, %1%S3"
3708 [(set_attr "conds" "set")
3709 (set_attr "shift" "1")
3710 (set_attr "insn" "mvn")
3711 (set_attr "arch" "32,a")
3712 (set_attr "type" "alu_shift,alu_shift_reg")])
3713
3714 ;; We don't really have extzv, but defining this using shifts helps
3715 ;; to reduce register pressure later on.
3716
3717 (define_expand "extzv"
3718 [(set (match_dup 4)
3719 (ashift:SI (match_operand:SI 1 "register_operand" "")
3720 (match_operand:SI 2 "const_int_operand" "")))
3721 (set (match_operand:SI 0 "register_operand" "")
3722 (lshiftrt:SI (match_dup 4)
3723 (match_operand:SI 3 "const_int_operand" "")))]
3724 "TARGET_THUMB1 || arm_arch_thumb2"
3725 "
3726 {
3727 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3728 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3729
3730 if (arm_arch_thumb2)
3731 {
3732 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3733 operands[3]));
3734 DONE;
3735 }
3736
3737 operands[3] = GEN_INT (rshift);
3738
3739 if (lshift == 0)
3740 {
3741 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3742 DONE;
3743 }
3744
3745 operands[2] = GEN_INT (lshift);
3746 operands[4] = gen_reg_rtx (SImode);
3747 }"
3748 )
3749
3750 (define_insn "extv"
3751 [(set (match_operand:SI 0 "s_register_operand" "=r")
3752 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3753 (match_operand:SI 2 "const_int_operand" "M")
3754 (match_operand:SI 3 "const_int_operand" "M")))]
3755 "arm_arch_thumb2"
3756 "sbfx%?\t%0, %1, %3, %2"
3757 [(set_attr "length" "4")
3758 (set_attr "predicable" "yes")]
3759 )
3760
3761 (define_insn "extzv_t2"
3762 [(set (match_operand:SI 0 "s_register_operand" "=r")
3763 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3764 (match_operand:SI 2 "const_int_operand" "M")
3765 (match_operand:SI 3 "const_int_operand" "M")))]
3766 "arm_arch_thumb2"
3767 "ubfx%?\t%0, %1, %3, %2"
3768 [(set_attr "length" "4")
3769 (set_attr "predicable" "yes")]
3770 )
3771
3772
3773 ;; Division instructions
3774 (define_insn "divsi3"
3775 [(set (match_operand:SI 0 "s_register_operand" "=r")
3776 (div:SI (match_operand:SI 1 "s_register_operand" "r")
3777 (match_operand:SI 2 "s_register_operand" "r")))]
3778 "TARGET_IDIV"
3779 "sdiv%?\t%0, %1, %2"
3780 [(set_attr "predicable" "yes")
3781 (set_attr "insn" "sdiv")]
3782 )
3783
3784 (define_insn "udivsi3"
3785 [(set (match_operand:SI 0 "s_register_operand" "=r")
3786 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
3787 (match_operand:SI 2 "s_register_operand" "r")))]
3788 "TARGET_IDIV"
3789 "udiv%?\t%0, %1, %2"
3790 [(set_attr "predicable" "yes")
3791 (set_attr "insn" "udiv")]
3792 )
3793
3794 \f
3795 ;; Unary arithmetic insns
3796
3797 (define_expand "negdi2"
3798 [(parallel
3799 [(set (match_operand:DI 0 "s_register_operand" "")
3800 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3801 (clobber (reg:CC CC_REGNUM))])]
3802 "TARGET_EITHER"
3803 ""
3804 )
3805
3806 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3807 ;; The first alternative allows the common case of a *full* overlap.
3808 (define_insn "*arm_negdi2"
3809 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3810 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3811 (clobber (reg:CC CC_REGNUM))]
3812 "TARGET_ARM"
3813 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3814 [(set_attr "conds" "clob")
3815 (set_attr "length" "8")]
3816 )
3817
3818 (define_insn "*thumb1_negdi2"
3819 [(set (match_operand:DI 0 "register_operand" "=&l")
3820 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3821 (clobber (reg:CC CC_REGNUM))]
3822 "TARGET_THUMB1"
3823 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3824 [(set_attr "length" "6")]
3825 )
3826
3827 (define_expand "negsi2"
3828 [(set (match_operand:SI 0 "s_register_operand" "")
3829 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3830 "TARGET_EITHER"
3831 ""
3832 )
3833
3834 (define_insn "*arm_negsi2"
3835 [(set (match_operand:SI 0 "s_register_operand" "=r")
3836 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3837 "TARGET_32BIT"
3838 "rsb%?\\t%0, %1, #0"
3839 [(set_attr "predicable" "yes")]
3840 )
3841
3842 (define_insn "*thumb1_negsi2"
3843 [(set (match_operand:SI 0 "register_operand" "=l")
3844 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3845 "TARGET_THUMB1"
3846 "neg\\t%0, %1"
3847 [(set_attr "length" "2")]
3848 )
3849
3850 (define_expand "negsf2"
3851 [(set (match_operand:SF 0 "s_register_operand" "")
3852 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3853 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3854 ""
3855 )
3856
3857 (define_expand "negdf2"
3858 [(set (match_operand:DF 0 "s_register_operand" "")
3859 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3860 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3861 "")
3862
3863 ;; abssi2 doesn't really clobber the condition codes if a different register
3864 ;; is being set. To keep things simple, assume during rtl manipulations that
3865 ;; it does, but tell the final scan operator the truth. Similarly for
3866 ;; (neg (abs...))
3867
3868 (define_expand "abssi2"
3869 [(parallel
3870 [(set (match_operand:SI 0 "s_register_operand" "")
3871 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3872 (clobber (match_dup 2))])]
3873 "TARGET_EITHER"
3874 "
3875 if (TARGET_THUMB1)
3876 operands[2] = gen_rtx_SCRATCH (SImode);
3877 else
3878 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3879 ")
3880
3881 (define_insn "*arm_abssi2"
3882 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3883 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3884 (clobber (reg:CC CC_REGNUM))]
3885 "TARGET_ARM"
3886 "@
3887 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3888 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3889 [(set_attr "conds" "clob,*")
3890 (set_attr "shift" "1")
3891 ;; predicable can't be set based on the variant, so left as no
3892 (set_attr "length" "8")]
3893 )
3894
3895 (define_insn_and_split "*thumb1_abssi2"
3896 [(set (match_operand:SI 0 "s_register_operand" "=l")
3897 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3898 (clobber (match_scratch:SI 2 "=&l"))]
3899 "TARGET_THUMB1"
3900 "#"
3901 "TARGET_THUMB1 && reload_completed"
3902 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3903 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3904 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3905 ""
3906 [(set_attr "length" "6")]
3907 )
3908
3909 (define_insn "*arm_neg_abssi2"
3910 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3911 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3912 (clobber (reg:CC CC_REGNUM))]
3913 "TARGET_ARM"
3914 "@
3915 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3916 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3917 [(set_attr "conds" "clob,*")
3918 (set_attr "shift" "1")
3919 ;; predicable can't be set based on the variant, so left as no
3920 (set_attr "length" "8")]
3921 )
3922
3923 (define_insn_and_split "*thumb1_neg_abssi2"
3924 [(set (match_operand:SI 0 "s_register_operand" "=l")
3925 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3926 (clobber (match_scratch:SI 2 "=&l"))]
3927 "TARGET_THUMB1"
3928 "#"
3929 "TARGET_THUMB1 && reload_completed"
3930 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3931 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3932 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3933 ""
3934 [(set_attr "length" "6")]
3935 )
3936
3937 (define_expand "abssf2"
3938 [(set (match_operand:SF 0 "s_register_operand" "")
3939 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3940 "TARGET_32BIT && TARGET_HARD_FLOAT"
3941 "")
3942
3943 (define_expand "absdf2"
3944 [(set (match_operand:DF 0 "s_register_operand" "")
3945 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3946 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3947 "")
3948
3949 (define_expand "sqrtsf2"
3950 [(set (match_operand:SF 0 "s_register_operand" "")
3951 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3952 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3953 "")
3954
3955 (define_expand "sqrtdf2"
3956 [(set (match_operand:DF 0 "s_register_operand" "")
3957 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3958 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3959 "")
3960
3961 (define_insn_and_split "one_cmpldi2"
3962 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3963 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3964 "TARGET_32BIT"
3965 "#"
3966 "TARGET_32BIT && reload_completed"
3967 [(set (match_dup 0) (not:SI (match_dup 1)))
3968 (set (match_dup 2) (not:SI (match_dup 3)))]
3969 "
3970 {
3971 operands[2] = gen_highpart (SImode, operands[0]);
3972 operands[0] = gen_lowpart (SImode, operands[0]);
3973 operands[3] = gen_highpart (SImode, operands[1]);
3974 operands[1] = gen_lowpart (SImode, operands[1]);
3975 }"
3976 [(set_attr "length" "8")
3977 (set_attr "predicable" "yes")]
3978 )
3979
3980 (define_expand "one_cmplsi2"
3981 [(set (match_operand:SI 0 "s_register_operand" "")
3982 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3983 "TARGET_EITHER"
3984 ""
3985 )
3986
3987 (define_insn "*arm_one_cmplsi2"
3988 [(set (match_operand:SI 0 "s_register_operand" "=r")
3989 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3990 "TARGET_32BIT"
3991 "mvn%?\\t%0, %1"
3992 [(set_attr "predicable" "yes")
3993 (set_attr "insn" "mvn")]
3994 )
3995
3996 (define_insn "*thumb1_one_cmplsi2"
3997 [(set (match_operand:SI 0 "register_operand" "=l")
3998 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3999 "TARGET_THUMB1"
4000 "mvn\\t%0, %1"
4001 [(set_attr "length" "2")
4002 (set_attr "insn" "mvn")]
4003 )
4004
4005 (define_insn "*notsi_compare0"
4006 [(set (reg:CC_NOOV CC_REGNUM)
4007 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4008 (const_int 0)))
4009 (set (match_operand:SI 0 "s_register_operand" "=r")
4010 (not:SI (match_dup 1)))]
4011 "TARGET_32BIT"
4012 "mvn%.\\t%0, %1"
4013 [(set_attr "conds" "set")
4014 (set_attr "insn" "mvn")]
4015 )
4016
4017 (define_insn "*notsi_compare0_scratch"
4018 [(set (reg:CC_NOOV CC_REGNUM)
4019 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4020 (const_int 0)))
4021 (clobber (match_scratch:SI 0 "=r"))]
4022 "TARGET_32BIT"
4023 "mvn%.\\t%0, %1"
4024 [(set_attr "conds" "set")
4025 (set_attr "insn" "mvn")]
4026 )
4027 \f
4028 ;; Fixed <--> Floating conversion insns
4029
4030 (define_expand "floatsihf2"
4031 [(set (match_operand:HF 0 "general_operand" "")
4032 (float:HF (match_operand:SI 1 "general_operand" "")))]
4033 "TARGET_EITHER"
4034 "
4035 {
4036 rtx op1 = gen_reg_rtx (SFmode);
4037 expand_float (op1, operands[1], 0);
4038 op1 = convert_to_mode (HFmode, op1, 0);
4039 emit_move_insn (operands[0], op1);
4040 DONE;
4041 }"
4042 )
4043
4044 (define_expand "floatdihf2"
4045 [(set (match_operand:HF 0 "general_operand" "")
4046 (float:HF (match_operand:DI 1 "general_operand" "")))]
4047 "TARGET_EITHER"
4048 "
4049 {
4050 rtx op1 = gen_reg_rtx (SFmode);
4051 expand_float (op1, operands[1], 0);
4052 op1 = convert_to_mode (HFmode, op1, 0);
4053 emit_move_insn (operands[0], op1);
4054 DONE;
4055 }"
4056 )
4057
4058 (define_expand "floatsisf2"
4059 [(set (match_operand:SF 0 "s_register_operand" "")
4060 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
4061 "TARGET_32BIT && TARGET_HARD_FLOAT"
4062 "
4063 if (TARGET_MAVERICK)
4064 {
4065 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
4066 DONE;
4067 }
4068 ")
4069
4070 (define_expand "floatsidf2"
4071 [(set (match_operand:DF 0 "s_register_operand" "")
4072 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
4073 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4074 "
4075 if (TARGET_MAVERICK)
4076 {
4077 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
4078 DONE;
4079 }
4080 ")
4081
4082 (define_expand "fix_trunchfsi2"
4083 [(set (match_operand:SI 0 "general_operand" "")
4084 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4085 "TARGET_EITHER"
4086 "
4087 {
4088 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4089 expand_fix (operands[0], op1, 0);
4090 DONE;
4091 }"
4092 )
4093
4094 (define_expand "fix_trunchfdi2"
4095 [(set (match_operand:DI 0 "general_operand" "")
4096 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
4097 "TARGET_EITHER"
4098 "
4099 {
4100 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4101 expand_fix (operands[0], op1, 0);
4102 DONE;
4103 }"
4104 )
4105
4106 (define_expand "fix_truncsfsi2"
4107 [(set (match_operand:SI 0 "s_register_operand" "")
4108 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
4109 "TARGET_32BIT && TARGET_HARD_FLOAT"
4110 "
4111 if (TARGET_MAVERICK)
4112 {
4113 if (!cirrus_fp_register (operands[0], SImode))
4114 operands[0] = force_reg (SImode, operands[0]);
4115 if (!cirrus_fp_register (operands[1], SFmode))
4116 operands[1] = force_reg (SFmode, operands[0]);
4117 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
4118 DONE;
4119 }
4120 ")
4121
4122 (define_expand "fix_truncdfsi2"
4123 [(set (match_operand:SI 0 "s_register_operand" "")
4124 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
4125 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4126 "
4127 if (TARGET_MAVERICK)
4128 {
4129 if (!cirrus_fp_register (operands[1], DFmode))
4130 operands[1] = force_reg (DFmode, operands[0]);
4131 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
4132 DONE;
4133 }
4134 ")
4135
4136 ;; Truncation insns
4137
4138 (define_expand "truncdfsf2"
4139 [(set (match_operand:SF 0 "s_register_operand" "")
4140 (float_truncate:SF
4141 (match_operand:DF 1 "s_register_operand" "")))]
4142 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4143 ""
4144 )
4145
4146 /* DFmode -> HFmode conversions have to go through SFmode. */
4147 (define_expand "truncdfhf2"
4148 [(set (match_operand:HF 0 "general_operand" "")
4149 (float_truncate:HF
4150 (match_operand:DF 1 "general_operand" "")))]
4151 "TARGET_EITHER"
4152 "
4153 {
4154 rtx op1;
4155 op1 = convert_to_mode (SFmode, operands[1], 0);
4156 op1 = convert_to_mode (HFmode, op1, 0);
4157 emit_move_insn (operands[0], op1);
4158 DONE;
4159 }"
4160 )
4161 \f
4162 ;; Zero and sign extension instructions.
4163
4164 (define_insn "zero_extend<mode>di2"
4165 [(set (match_operand:DI 0 "s_register_operand" "=r")
4166 (zero_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4167 "<qhs_extenddi_cstr>")))]
4168 "TARGET_32BIT <qhs_zextenddi_cond>"
4169 "#"
4170 [(set_attr "length" "8")
4171 (set_attr "ce_count" "2")
4172 (set_attr "predicable" "yes")]
4173 )
4174
4175 (define_insn "extend<mode>di2"
4176 [(set (match_operand:DI 0 "s_register_operand" "=r")
4177 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4178 "<qhs_extenddi_cstr>")))]
4179 "TARGET_32BIT <qhs_sextenddi_cond>"
4180 "#"
4181 [(set_attr "length" "8")
4182 (set_attr "ce_count" "2")
4183 (set_attr "shift" "1")
4184 (set_attr "predicable" "yes")]
4185 )
4186
4187 ;; Splits for all extensions to DImode
4188 (define_split
4189 [(set (match_operand:DI 0 "s_register_operand" "")
4190 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4191 "TARGET_32BIT"
4192 [(set (match_dup 0) (match_dup 1))]
4193 {
4194 rtx lo_part = gen_lowpart (SImode, operands[0]);
4195 enum machine_mode src_mode = GET_MODE (operands[1]);
4196
4197 if (REG_P (operands[0])
4198 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4199 emit_clobber (operands[0]);
4200 if (!REG_P (lo_part) || src_mode != SImode
4201 || !rtx_equal_p (lo_part, operands[1]))
4202 {
4203 if (src_mode == SImode)
4204 emit_move_insn (lo_part, operands[1]);
4205 else
4206 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4207 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4208 operands[1] = lo_part;
4209 }
4210 operands[0] = gen_highpart (SImode, operands[0]);
4211 operands[1] = const0_rtx;
4212 })
4213
4214 (define_split
4215 [(set (match_operand:DI 0 "s_register_operand" "")
4216 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4217 "TARGET_32BIT"
4218 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4219 {
4220 rtx lo_part = gen_lowpart (SImode, operands[0]);
4221 enum machine_mode src_mode = GET_MODE (operands[1]);
4222
4223 if (REG_P (operands[0])
4224 && !reg_overlap_mentioned_p (operands[0], operands[1]))
4225 emit_clobber (operands[0]);
4226
4227 if (!REG_P (lo_part) || src_mode != SImode
4228 || !rtx_equal_p (lo_part, operands[1]))
4229 {
4230 if (src_mode == SImode)
4231 emit_move_insn (lo_part, operands[1]);
4232 else
4233 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4234 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4235 operands[1] = lo_part;
4236 }
4237 operands[0] = gen_highpart (SImode, operands[0]);
4238 })
4239
4240 (define_expand "zero_extendhisi2"
4241 [(set (match_operand:SI 0 "s_register_operand" "")
4242 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4243 "TARGET_EITHER"
4244 {
4245 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4246 {
4247 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4248 DONE;
4249 }
4250 if (!arm_arch6 && !MEM_P (operands[1]))
4251 {
4252 rtx t = gen_lowpart (SImode, operands[1]);
4253 rtx tmp = gen_reg_rtx (SImode);
4254 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4255 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4256 DONE;
4257 }
4258 })
4259
4260 (define_split
4261 [(set (match_operand:SI 0 "s_register_operand" "")
4262 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4263 "!TARGET_THUMB2 && !arm_arch6"
4264 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4265 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4266 {
4267 operands[2] = gen_lowpart (SImode, operands[1]);
4268 })
4269
4270 (define_insn "*thumb1_zero_extendhisi2"
4271 [(set (match_operand:SI 0 "register_operand" "=l,l")
4272 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4273 "TARGET_THUMB1"
4274 {
4275 rtx mem;
4276
4277 if (which_alternative == 0 && arm_arch6)
4278 return "uxth\t%0, %1";
4279 if (which_alternative == 0)
4280 return "#";
4281
4282 mem = XEXP (operands[1], 0);
4283
4284 if (GET_CODE (mem) == CONST)
4285 mem = XEXP (mem, 0);
4286
4287 if (GET_CODE (mem) == PLUS)
4288 {
4289 rtx a = XEXP (mem, 0);
4290
4291 /* This can happen due to bugs in reload. */
4292 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4293 {
4294 rtx ops[2];
4295 ops[0] = operands[0];
4296 ops[1] = a;
4297
4298 output_asm_insn ("mov\t%0, %1", ops);
4299
4300 XEXP (mem, 0) = operands[0];
4301 }
4302 }
4303
4304 return "ldrh\t%0, %1";
4305 }
4306 [(set_attr_alternative "length"
4307 [(if_then_else (eq_attr "is_arch6" "yes")
4308 (const_int 2) (const_int 4))
4309 (const_int 4)])
4310 (set_attr "type" "alu_shift,load_byte")]
4311 )
4312
4313 (define_insn "*arm_zero_extendhisi2"
4314 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4315 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4316 "TARGET_ARM && arm_arch4 && !arm_arch6"
4317 "@
4318 #
4319 ldr%(h%)\\t%0, %1"
4320 [(set_attr "type" "alu_shift,load_byte")
4321 (set_attr "predicable" "yes")]
4322 )
4323
4324 (define_insn "*arm_zero_extendhisi2_v6"
4325 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4326 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4327 "TARGET_ARM && arm_arch6"
4328 "@
4329 uxth%?\\t%0, %1
4330 ldr%(h%)\\t%0, %1"
4331 [(set_attr "type" "alu_shift,load_byte")
4332 (set_attr "predicable" "yes")]
4333 )
4334
4335 (define_insn "*arm_zero_extendhisi2addsi"
4336 [(set (match_operand:SI 0 "s_register_operand" "=r")
4337 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4338 (match_operand:SI 2 "s_register_operand" "r")))]
4339 "TARGET_INT_SIMD"
4340 "uxtah%?\\t%0, %2, %1"
4341 [(set_attr "type" "alu_shift")
4342 (set_attr "predicable" "yes")]
4343 )
4344
4345 (define_expand "zero_extendqisi2"
4346 [(set (match_operand:SI 0 "s_register_operand" "")
4347 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4348 "TARGET_EITHER"
4349 {
4350 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4351 {
4352 emit_insn (gen_andsi3 (operands[0],
4353 gen_lowpart (SImode, operands[1]),
4354 GEN_INT (255)));
4355 DONE;
4356 }
4357 if (!arm_arch6 && !MEM_P (operands[1]))
4358 {
4359 rtx t = gen_lowpart (SImode, operands[1]);
4360 rtx tmp = gen_reg_rtx (SImode);
4361 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4362 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4363 DONE;
4364 }
4365 })
4366
4367 (define_split
4368 [(set (match_operand:SI 0 "s_register_operand" "")
4369 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4370 "!arm_arch6"
4371 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4372 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4373 {
4374 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4375 if (TARGET_ARM)
4376 {
4377 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4378 DONE;
4379 }
4380 })
4381
4382 (define_insn "*thumb1_zero_extendqisi2"
4383 [(set (match_operand:SI 0 "register_operand" "=l,l")
4384 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4385 "TARGET_THUMB1 && !arm_arch6"
4386 "@
4387 #
4388 ldrb\\t%0, %1"
4389 [(set_attr "length" "4,2")
4390 (set_attr "type" "alu_shift,load_byte")
4391 (set_attr "pool_range" "*,32")]
4392 )
4393
4394 (define_insn "*thumb1_zero_extendqisi2_v6"
4395 [(set (match_operand:SI 0 "register_operand" "=l,l")
4396 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4397 "TARGET_THUMB1 && arm_arch6"
4398 "@
4399 uxtb\\t%0, %1
4400 ldrb\\t%0, %1"
4401 [(set_attr "length" "2")
4402 (set_attr "type" "alu_shift,load_byte")]
4403 )
4404
4405 (define_insn "*arm_zero_extendqisi2"
4406 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4407 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4408 "TARGET_ARM && !arm_arch6"
4409 "@
4410 #
4411 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4412 [(set_attr "length" "8,4")
4413 (set_attr "type" "alu_shift,load_byte")
4414 (set_attr "predicable" "yes")]
4415 )
4416
4417 (define_insn "*arm_zero_extendqisi2_v6"
4418 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4419 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4420 "TARGET_ARM && arm_arch6"
4421 "@
4422 uxtb%(%)\\t%0, %1
4423 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4424 [(set_attr "type" "alu_shift,load_byte")
4425 (set_attr "predicable" "yes")]
4426 )
4427
4428 (define_insn "*arm_zero_extendqisi2addsi"
4429 [(set (match_operand:SI 0 "s_register_operand" "=r")
4430 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4431 (match_operand:SI 2 "s_register_operand" "r")))]
4432 "TARGET_INT_SIMD"
4433 "uxtab%?\\t%0, %2, %1"
4434 [(set_attr "predicable" "yes")
4435 (set_attr "insn" "xtab")
4436 (set_attr "type" "alu_shift")]
4437 )
4438
4439 (define_split
4440 [(set (match_operand:SI 0 "s_register_operand" "")
4441 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4442 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4443 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4444 [(set (match_dup 2) (match_dup 1))
4445 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4446 ""
4447 )
4448
4449 (define_split
4450 [(set (match_operand:SI 0 "s_register_operand" "")
4451 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4452 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4453 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4454 [(set (match_dup 2) (match_dup 1))
4455 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4456 ""
4457 )
4458
4459
4460 (define_split
4461 [(set (match_operand:SI 0 "s_register_operand" "")
4462 (ior_xor:SI (and:SI (ashift:SI
4463 (match_operand:SI 1 "s_register_operand" "")
4464 (match_operand:SI 2 "const_int_operand" ""))
4465 (match_operand:SI 3 "const_int_operand" ""))
4466 (zero_extend:SI
4467 (match_operator 5 "subreg_lowpart_operator"
4468 [(match_operand:SI 4 "s_register_operand" "")]))))]
4469 "TARGET_32BIT
4470 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4471 == (GET_MODE_MASK (GET_MODE (operands[5]))
4472 & (GET_MODE_MASK (GET_MODE (operands[5]))
4473 << (INTVAL (operands[2])))))"
4474 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4475 (match_dup 4)))
4476 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4477 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4478 )
4479
4480 (define_insn "*compareqi_eq0"
4481 [(set (reg:CC_Z CC_REGNUM)
4482 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4483 (const_int 0)))]
4484 "TARGET_32BIT"
4485 "tst\\t%0, #255"
4486 [(set_attr "conds" "set")]
4487 )
4488
4489 (define_expand "extendhisi2"
4490 [(set (match_operand:SI 0 "s_register_operand" "")
4491 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4492 "TARGET_EITHER"
4493 {
4494 if (TARGET_THUMB1)
4495 {
4496 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4497 DONE;
4498 }
4499 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4500 {
4501 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4502 DONE;
4503 }
4504
4505 if (!arm_arch6 && !MEM_P (operands[1]))
4506 {
4507 rtx t = gen_lowpart (SImode, operands[1]);
4508 rtx tmp = gen_reg_rtx (SImode);
4509 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4510 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4511 DONE;
4512 }
4513 })
4514
4515 (define_split
4516 [(parallel
4517 [(set (match_operand:SI 0 "register_operand" "")
4518 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4519 (clobber (match_scratch:SI 2 ""))])]
4520 "!arm_arch6"
4521 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4522 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4523 {
4524 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4525 })
4526
4527 ;; We used to have an early-clobber on the scratch register here.
4528 ;; However, there's a bug somewhere in reload which means that this
4529 ;; can be partially ignored during spill allocation if the memory
4530 ;; address also needs reloading; this causes us to die later on when
4531 ;; we try to verify the operands. Fortunately, we don't really need
4532 ;; the early-clobber: we can always use operand 0 if operand 2
4533 ;; overlaps the address.
4534 (define_insn "thumb1_extendhisi2"
4535 [(set (match_operand:SI 0 "register_operand" "=l,l")
4536 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4537 (clobber (match_scratch:SI 2 "=X,l"))]
4538 "TARGET_THUMB1"
4539 "*
4540 {
4541 rtx ops[4];
4542 rtx mem;
4543
4544 if (which_alternative == 0 && !arm_arch6)
4545 return \"#\";
4546 if (which_alternative == 0)
4547 return \"sxth\\t%0, %1\";
4548
4549 mem = XEXP (operands[1], 0);
4550
4551 /* This code used to try to use 'V', and fix the address only if it was
4552 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4553 range of QImode offsets, and offsettable_address_p does a QImode
4554 address check. */
4555
4556 if (GET_CODE (mem) == CONST)
4557 mem = XEXP (mem, 0);
4558
4559 if (GET_CODE (mem) == LABEL_REF)
4560 return \"ldr\\t%0, %1\";
4561
4562 if (GET_CODE (mem) == PLUS)
4563 {
4564 rtx a = XEXP (mem, 0);
4565 rtx b = XEXP (mem, 1);
4566
4567 if (GET_CODE (a) == LABEL_REF
4568 && GET_CODE (b) == CONST_INT)
4569 return \"ldr\\t%0, %1\";
4570
4571 if (GET_CODE (b) == REG)
4572 return \"ldrsh\\t%0, %1\";
4573
4574 ops[1] = a;
4575 ops[2] = b;
4576 }
4577 else
4578 {
4579 ops[1] = mem;
4580 ops[2] = const0_rtx;
4581 }
4582
4583 gcc_assert (GET_CODE (ops[1]) == REG);
4584
4585 ops[0] = operands[0];
4586 if (reg_mentioned_p (operands[2], ops[1]))
4587 ops[3] = ops[0];
4588 else
4589 ops[3] = operands[2];
4590 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4591 return \"\";
4592 }"
4593 [(set_attr_alternative "length"
4594 [(if_then_else (eq_attr "is_arch6" "yes")
4595 (const_int 2) (const_int 4))
4596 (const_int 4)])
4597 (set_attr "type" "alu_shift,load_byte")
4598 (set_attr "pool_range" "*,1020")]
4599 )
4600
4601 ;; This pattern will only be used when ldsh is not available
4602 (define_expand "extendhisi2_mem"
4603 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4604 (set (match_dup 3)
4605 (zero_extend:SI (match_dup 7)))
4606 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4607 (set (match_operand:SI 0 "" "")
4608 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4609 "TARGET_ARM"
4610 "
4611 {
4612 rtx mem1, mem2;
4613 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4614
4615 mem1 = change_address (operands[1], QImode, addr);
4616 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4617 operands[0] = gen_lowpart (SImode, operands[0]);
4618 operands[1] = mem1;
4619 operands[2] = gen_reg_rtx (SImode);
4620 operands[3] = gen_reg_rtx (SImode);
4621 operands[6] = gen_reg_rtx (SImode);
4622 operands[7] = mem2;
4623
4624 if (BYTES_BIG_ENDIAN)
4625 {
4626 operands[4] = operands[2];
4627 operands[5] = operands[3];
4628 }
4629 else
4630 {
4631 operands[4] = operands[3];
4632 operands[5] = operands[2];
4633 }
4634 }"
4635 )
4636
4637 (define_split
4638 [(set (match_operand:SI 0 "register_operand" "")
4639 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4640 "!arm_arch6"
4641 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4642 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4643 {
4644 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4645 })
4646
4647 (define_insn "*arm_extendhisi2"
4648 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4649 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4650 "TARGET_ARM && arm_arch4 && !arm_arch6"
4651 "@
4652 #
4653 ldr%(sh%)\\t%0, %1"
4654 [(set_attr "length" "8,4")
4655 (set_attr "type" "alu_shift,load_byte")
4656 (set_attr "predicable" "yes")
4657 (set_attr "pool_range" "*,256")
4658 (set_attr "neg_pool_range" "*,244")]
4659 )
4660
4661 ;; ??? Check Thumb-2 pool range
4662 (define_insn "*arm_extendhisi2_v6"
4663 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4664 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4665 "TARGET_32BIT && arm_arch6"
4666 "@
4667 sxth%?\\t%0, %1
4668 ldr%(sh%)\\t%0, %1"
4669 [(set_attr "type" "alu_shift,load_byte")
4670 (set_attr "predicable" "yes")
4671 (set_attr "pool_range" "*,256")
4672 (set_attr "neg_pool_range" "*,244")]
4673 )
4674
4675 (define_insn "*arm_extendhisi2addsi"
4676 [(set (match_operand:SI 0 "s_register_operand" "=r")
4677 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4678 (match_operand:SI 2 "s_register_operand" "r")))]
4679 "TARGET_INT_SIMD"
4680 "sxtah%?\\t%0, %2, %1"
4681 )
4682
4683 (define_expand "extendqihi2"
4684 [(set (match_dup 2)
4685 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4686 (const_int 24)))
4687 (set (match_operand:HI 0 "s_register_operand" "")
4688 (ashiftrt:SI (match_dup 2)
4689 (const_int 24)))]
4690 "TARGET_ARM"
4691 "
4692 {
4693 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4694 {
4695 emit_insn (gen_rtx_SET (VOIDmode,
4696 operands[0],
4697 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4698 DONE;
4699 }
4700 if (!s_register_operand (operands[1], QImode))
4701 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4702 operands[0] = gen_lowpart (SImode, operands[0]);
4703 operands[1] = gen_lowpart (SImode, operands[1]);
4704 operands[2] = gen_reg_rtx (SImode);
4705 }"
4706 )
4707
4708 (define_insn "*arm_extendqihi_insn"
4709 [(set (match_operand:HI 0 "s_register_operand" "=r")
4710 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4711 "TARGET_ARM && arm_arch4"
4712 "ldr%(sb%)\\t%0, %1"
4713 [(set_attr "type" "load_byte")
4714 (set_attr "predicable" "yes")
4715 (set_attr "pool_range" "256")
4716 (set_attr "neg_pool_range" "244")]
4717 )
4718
4719 (define_expand "extendqisi2"
4720 [(set (match_operand:SI 0 "s_register_operand" "")
4721 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4722 "TARGET_EITHER"
4723 {
4724 if (!arm_arch4 && MEM_P (operands[1]))
4725 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4726
4727 if (!arm_arch6 && !MEM_P (operands[1]))
4728 {
4729 rtx t = gen_lowpart (SImode, operands[1]);
4730 rtx tmp = gen_reg_rtx (SImode);
4731 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4732 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4733 DONE;
4734 }
4735 })
4736
4737 (define_split
4738 [(set (match_operand:SI 0 "register_operand" "")
4739 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4740 "!arm_arch6"
4741 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4742 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4743 {
4744 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4745 })
4746
4747 (define_insn "*arm_extendqisi"
4748 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4749 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4750 "TARGET_ARM && arm_arch4 && !arm_arch6"
4751 "@
4752 #
4753 ldr%(sb%)\\t%0, %1"
4754 [(set_attr "length" "8,4")
4755 (set_attr "type" "alu_shift,load_byte")
4756 (set_attr "predicable" "yes")
4757 (set_attr "pool_range" "*,256")
4758 (set_attr "neg_pool_range" "*,244")]
4759 )
4760
4761 (define_insn "*arm_extendqisi_v6"
4762 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4763 (sign_extend:SI
4764 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4765 "TARGET_ARM && arm_arch6"
4766 "@
4767 sxtb%?\\t%0, %1
4768 ldr%(sb%)\\t%0, %1"
4769 [(set_attr "type" "alu_shift,load_byte")
4770 (set_attr "predicable" "yes")
4771 (set_attr "pool_range" "*,256")
4772 (set_attr "neg_pool_range" "*,244")]
4773 )
4774
4775 (define_insn "*arm_extendqisi2addsi"
4776 [(set (match_operand:SI 0 "s_register_operand" "=r")
4777 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4778 (match_operand:SI 2 "s_register_operand" "r")))]
4779 "TARGET_INT_SIMD"
4780 "sxtab%?\\t%0, %2, %1"
4781 [(set_attr "type" "alu_shift")
4782 (set_attr "insn" "xtab")
4783 (set_attr "predicable" "yes")]
4784 )
4785
4786 (define_split
4787 [(set (match_operand:SI 0 "register_operand" "")
4788 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4789 "TARGET_THUMB1 && reload_completed"
4790 [(set (match_dup 0) (match_dup 2))
4791 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4792 {
4793 rtx addr = XEXP (operands[1], 0);
4794
4795 if (GET_CODE (addr) == CONST)
4796 addr = XEXP (addr, 0);
4797
4798 if (GET_CODE (addr) == PLUS
4799 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4800 /* No split necessary. */
4801 FAIL;
4802
4803 if (GET_CODE (addr) == PLUS
4804 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4805 FAIL;
4806
4807 if (reg_overlap_mentioned_p (operands[0], addr))
4808 {
4809 rtx t = gen_lowpart (QImode, operands[0]);
4810 emit_move_insn (t, operands[1]);
4811 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4812 DONE;
4813 }
4814
4815 if (REG_P (addr))
4816 {
4817 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4818 operands[2] = const0_rtx;
4819 }
4820 else if (GET_CODE (addr) != PLUS)
4821 FAIL;
4822 else if (REG_P (XEXP (addr, 0)))
4823 {
4824 operands[2] = XEXP (addr, 1);
4825 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4826 }
4827 else
4828 {
4829 operands[2] = XEXP (addr, 0);
4830 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4831 }
4832
4833 operands[3] = change_address (operands[1], QImode, addr);
4834 })
4835
4836 (define_peephole2
4837 [(set (match_operand:SI 0 "register_operand" "")
4838 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4839 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4840 (set (match_operand:SI 3 "register_operand" "")
4841 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4842 "TARGET_THUMB1
4843 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4844 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4845 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4846 && (peep2_reg_dead_p (3, operands[0])
4847 || rtx_equal_p (operands[0], operands[3]))
4848 && (peep2_reg_dead_p (3, operands[2])
4849 || rtx_equal_p (operands[2], operands[3]))"
4850 [(set (match_dup 2) (match_dup 1))
4851 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4852 {
4853 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4854 operands[4] = change_address (operands[4], QImode, addr);
4855 })
4856
4857 (define_insn "thumb1_extendqisi2"
4858 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4859 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4860 "TARGET_THUMB1"
4861 {
4862 rtx addr;
4863
4864 if (which_alternative == 0 && arm_arch6)
4865 return "sxtb\\t%0, %1";
4866 if (which_alternative == 0)
4867 return "#";
4868
4869 addr = XEXP (operands[1], 0);
4870 if (GET_CODE (addr) == PLUS
4871 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4872 return "ldrsb\\t%0, %1";
4873
4874 return "#";
4875 }
4876 [(set_attr_alternative "length"
4877 [(if_then_else (eq_attr "is_arch6" "yes")
4878 (const_int 2) (const_int 4))
4879 (const_int 2)
4880 (if_then_else (eq_attr "is_arch6" "yes")
4881 (const_int 4) (const_int 6))])
4882 (set_attr "type" "alu_shift,load_byte,load_byte")]
4883 )
4884
4885 (define_expand "extendsfdf2"
4886 [(set (match_operand:DF 0 "s_register_operand" "")
4887 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4888 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4889 ""
4890 )
4891
4892 /* HFmode -> DFmode conversions have to go through SFmode. */
4893 (define_expand "extendhfdf2"
4894 [(set (match_operand:DF 0 "general_operand" "")
4895 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4896 "TARGET_EITHER"
4897 "
4898 {
4899 rtx op1;
4900 op1 = convert_to_mode (SFmode, operands[1], 0);
4901 op1 = convert_to_mode (DFmode, op1, 0);
4902 emit_insn (gen_movdf (operands[0], op1));
4903 DONE;
4904 }"
4905 )
4906 \f
4907 ;; Move insns (including loads and stores)
4908
4909 ;; XXX Just some ideas about movti.
4910 ;; I don't think these are a good idea on the arm, there just aren't enough
4911 ;; registers
4912 ;;(define_expand "loadti"
4913 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4914 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4915 ;; "" "")
4916
4917 ;;(define_expand "storeti"
4918 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4919 ;; (match_operand:TI 1 "s_register_operand" ""))]
4920 ;; "" "")
4921
4922 ;;(define_expand "movti"
4923 ;; [(set (match_operand:TI 0 "general_operand" "")
4924 ;; (match_operand:TI 1 "general_operand" ""))]
4925 ;; ""
4926 ;; "
4927 ;;{
4928 ;; rtx insn;
4929 ;;
4930 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4931 ;; operands[1] = copy_to_reg (operands[1]);
4932 ;; if (GET_CODE (operands[0]) == MEM)
4933 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4934 ;; else if (GET_CODE (operands[1]) == MEM)
4935 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4936 ;; else
4937 ;; FAIL;
4938 ;;
4939 ;; emit_insn (insn);
4940 ;; DONE;
4941 ;;}")
4942
4943 ;; Recognize garbage generated above.
4944
4945 ;;(define_insn ""
4946 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4947 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4948 ;; ""
4949 ;; "*
4950 ;; {
4951 ;; register mem = (which_alternative < 3);
4952 ;; register const char *template;
4953 ;;
4954 ;; operands[mem] = XEXP (operands[mem], 0);
4955 ;; switch (which_alternative)
4956 ;; {
4957 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4958 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4959 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4960 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4961 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4962 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4963 ;; }
4964 ;; output_asm_insn (template, operands);
4965 ;; return \"\";
4966 ;; }")
4967
4968 (define_expand "movdi"
4969 [(set (match_operand:DI 0 "general_operand" "")
4970 (match_operand:DI 1 "general_operand" ""))]
4971 "TARGET_EITHER"
4972 "
4973 if (can_create_pseudo_p ())
4974 {
4975 if (GET_CODE (operands[0]) != REG)
4976 operands[1] = force_reg (DImode, operands[1]);
4977 }
4978 "
4979 )
4980
4981 (define_insn "*arm_movdi"
4982 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4983 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4984 "TARGET_32BIT
4985 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4986 && !TARGET_IWMMXT
4987 && ( register_operand (operands[0], DImode)
4988 || register_operand (operands[1], DImode))"
4989 "*
4990 switch (which_alternative)
4991 {
4992 case 0:
4993 case 1:
4994 case 2:
4995 return \"#\";
4996 default:
4997 return output_move_double (operands, true, NULL);
4998 }
4999 "
5000 [(set_attr "length" "8,12,16,8,8")
5001 (set_attr "type" "*,*,*,load2,store2")
5002 (set_attr "arm_pool_range" "*,*,*,1020,*")
5003 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
5004 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
5005 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5006 )
5007
5008 (define_split
5009 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5010 (match_operand:ANY64 1 "const_double_operand" ""))]
5011 "TARGET_32BIT
5012 && reload_completed
5013 && (arm_const_double_inline_cost (operands[1])
5014 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5015 [(const_int 0)]
5016 "
5017 arm_split_constant (SET, SImode, curr_insn,
5018 INTVAL (gen_lowpart (SImode, operands[1])),
5019 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5020 arm_split_constant (SET, SImode, curr_insn,
5021 INTVAL (gen_highpart_mode (SImode,
5022 GET_MODE (operands[0]),
5023 operands[1])),
5024 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5025 DONE;
5026 "
5027 )
5028
5029 ; If optimizing for size, or if we have load delay slots, then
5030 ; we want to split the constant into two separate operations.
5031 ; In both cases this may split a trivial part into a single data op
5032 ; leaving a single complex constant to load. We can also get longer
5033 ; offsets in a LDR which means we get better chances of sharing the pool
5034 ; entries. Finally, we can normally do a better job of scheduling
5035 ; LDR instructions than we can with LDM.
5036 ; This pattern will only match if the one above did not.
5037 (define_split
5038 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5039 (match_operand:ANY64 1 "const_double_operand" ""))]
5040 "TARGET_ARM && reload_completed
5041 && arm_const_double_by_parts (operands[1])"
5042 [(set (match_dup 0) (match_dup 1))
5043 (set (match_dup 2) (match_dup 3))]
5044 "
5045 operands[2] = gen_highpart (SImode, operands[0]);
5046 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5047 operands[1]);
5048 operands[0] = gen_lowpart (SImode, operands[0]);
5049 operands[1] = gen_lowpart (SImode, operands[1]);
5050 "
5051 )
5052
5053 (define_split
5054 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5055 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5056 "TARGET_EITHER && reload_completed"
5057 [(set (match_dup 0) (match_dup 1))
5058 (set (match_dup 2) (match_dup 3))]
5059 "
5060 operands[2] = gen_highpart (SImode, operands[0]);
5061 operands[3] = gen_highpart (SImode, operands[1]);
5062 operands[0] = gen_lowpart (SImode, operands[0]);
5063 operands[1] = gen_lowpart (SImode, operands[1]);
5064
5065 /* Handle a partial overlap. */
5066 if (rtx_equal_p (operands[0], operands[3]))
5067 {
5068 rtx tmp0 = operands[0];
5069 rtx tmp1 = operands[1];
5070
5071 operands[0] = operands[2];
5072 operands[1] = operands[3];
5073 operands[2] = tmp0;
5074 operands[3] = tmp1;
5075 }
5076 "
5077 )
5078
5079 ;; We can't actually do base+index doubleword loads if the index and
5080 ;; destination overlap. Split here so that we at least have chance to
5081 ;; schedule.
5082 (define_split
5083 [(set (match_operand:DI 0 "s_register_operand" "")
5084 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5085 (match_operand:SI 2 "s_register_operand" ""))))]
5086 "TARGET_LDRD
5087 && reg_overlap_mentioned_p (operands[0], operands[1])
5088 && reg_overlap_mentioned_p (operands[0], operands[2])"
5089 [(set (match_dup 4)
5090 (plus:SI (match_dup 1)
5091 (match_dup 2)))
5092 (set (match_dup 0)
5093 (mem:DI (match_dup 4)))]
5094 "
5095 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5096 "
5097 )
5098
5099 ;;; ??? This should have alternatives for constants.
5100 ;;; ??? This was originally identical to the movdf_insn pattern.
5101 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5102 ;;; thumb_reorg with a memory reference.
5103 (define_insn "*thumb1_movdi_insn"
5104 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5105 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
5106 "TARGET_THUMB1
5107 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5108 && ( register_operand (operands[0], DImode)
5109 || register_operand (operands[1], DImode))"
5110 "*
5111 {
5112 switch (which_alternative)
5113 {
5114 default:
5115 case 0:
5116 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5117 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5118 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5119 case 1:
5120 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5121 case 2:
5122 operands[1] = GEN_INT (- INTVAL (operands[1]));
5123 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5124 case 3:
5125 return \"ldmia\\t%1, {%0, %H0}\";
5126 case 4:
5127 return \"stmia\\t%0, {%1, %H1}\";
5128 case 5:
5129 return thumb_load_double_from_address (operands);
5130 case 6:
5131 operands[2] = gen_rtx_MEM (SImode,
5132 plus_constant (XEXP (operands[0], 0), 4));
5133 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5134 return \"\";
5135 case 7:
5136 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5137 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5138 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5139 }
5140 }"
5141 [(set_attr "length" "4,4,6,2,2,6,4,4")
5142 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5143 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5144 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5145 )
5146
5147 (define_expand "movsi"
5148 [(set (match_operand:SI 0 "general_operand" "")
5149 (match_operand:SI 1 "general_operand" ""))]
5150 "TARGET_EITHER"
5151 "
5152 {
5153 rtx base, offset, tmp;
5154
5155 if (TARGET_32BIT)
5156 {
5157 /* Everything except mem = const or mem = mem can be done easily. */
5158 if (GET_CODE (operands[0]) == MEM)
5159 operands[1] = force_reg (SImode, operands[1]);
5160 if (arm_general_register_operand (operands[0], SImode)
5161 && GET_CODE (operands[1]) == CONST_INT
5162 && !(const_ok_for_arm (INTVAL (operands[1]))
5163 || const_ok_for_arm (~INTVAL (operands[1]))))
5164 {
5165 arm_split_constant (SET, SImode, NULL_RTX,
5166 INTVAL (operands[1]), operands[0], NULL_RTX,
5167 optimize && can_create_pseudo_p ());
5168 DONE;
5169 }
5170
5171 if (TARGET_USE_MOVT && !target_word_relocations
5172 && GET_CODE (operands[1]) == SYMBOL_REF
5173 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5174 {
5175 arm_emit_movpair (operands[0], operands[1]);
5176 DONE;
5177 }
5178 }
5179 else /* TARGET_THUMB1... */
5180 {
5181 if (can_create_pseudo_p ())
5182 {
5183 if (GET_CODE (operands[0]) != REG)
5184 operands[1] = force_reg (SImode, operands[1]);
5185 }
5186 }
5187
5188 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5189 {
5190 split_const (operands[1], &base, &offset);
5191 if (GET_CODE (base) == SYMBOL_REF
5192 && !offset_within_block_p (base, INTVAL (offset)))
5193 {
5194 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5195 emit_move_insn (tmp, base);
5196 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5197 DONE;
5198 }
5199 }
5200
5201 /* Recognize the case where operand[1] is a reference to thread-local
5202 data and load its address to a register. */
5203 if (arm_tls_referenced_p (operands[1]))
5204 {
5205 rtx tmp = operands[1];
5206 rtx addend = NULL;
5207
5208 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5209 {
5210 addend = XEXP (XEXP (tmp, 0), 1);
5211 tmp = XEXP (XEXP (tmp, 0), 0);
5212 }
5213
5214 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5215 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5216
5217 tmp = legitimize_tls_address (tmp,
5218 !can_create_pseudo_p () ? operands[0] : 0);
5219 if (addend)
5220 {
5221 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5222 tmp = force_operand (tmp, operands[0]);
5223 }
5224 operands[1] = tmp;
5225 }
5226 else if (flag_pic
5227 && (CONSTANT_P (operands[1])
5228 || symbol_mentioned_p (operands[1])
5229 || label_mentioned_p (operands[1])))
5230 operands[1] = legitimize_pic_address (operands[1], SImode,
5231 (!can_create_pseudo_p ()
5232 ? operands[0]
5233 : 0));
5234 }
5235 "
5236 )
5237
5238 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5239 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5240 ;; so this does not matter.
5241 (define_insn "*arm_movt"
5242 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5243 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5244 (match_operand:SI 2 "general_operand" "i")))]
5245 "arm_arch_thumb2"
5246 "movt%?\t%0, #:upper16:%c2"
5247 [(set_attr "predicable" "yes")
5248 (set_attr "length" "4")]
5249 )
5250
5251 (define_insn "*arm_movsi_insn"
5252 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5253 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5254 "TARGET_ARM && ! TARGET_IWMMXT
5255 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5256 && ( register_operand (operands[0], SImode)
5257 || register_operand (operands[1], SImode))"
5258 "@
5259 mov%?\\t%0, %1
5260 mov%?\\t%0, %1
5261 mvn%?\\t%0, #%B1
5262 movw%?\\t%0, %1
5263 ldr%?\\t%0, %1
5264 str%?\\t%1, %0"
5265 [(set_attr "type" "*,*,*,*,load1,store1")
5266 (set_attr "insn" "mov,mov,mvn,mov,*,*")
5267 (set_attr "predicable" "yes")
5268 (set_attr "pool_range" "*,*,*,*,4096,*")
5269 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5270 )
5271
5272 (define_split
5273 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5274 (match_operand:SI 1 "const_int_operand" ""))]
5275 "TARGET_32BIT
5276 && (!(const_ok_for_arm (INTVAL (operands[1]))
5277 || const_ok_for_arm (~INTVAL (operands[1]))))"
5278 [(clobber (const_int 0))]
5279 "
5280 arm_split_constant (SET, SImode, NULL_RTX,
5281 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5282 DONE;
5283 "
5284 )
5285
5286 (define_insn "*thumb1_movsi_insn"
5287 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5288 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
5289 "TARGET_THUMB1
5290 && ( register_operand (operands[0], SImode)
5291 || register_operand (operands[1], SImode))"
5292 "@
5293 mov %0, %1
5294 mov %0, %1
5295 #
5296 #
5297 ldmia\\t%1, {%0}
5298 stmia\\t%0, {%1}
5299 ldr\\t%0, %1
5300 str\\t%1, %0
5301 mov\\t%0, %1"
5302 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5303 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5304 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5305 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5306
5307 (define_split
5308 [(set (match_operand:SI 0 "register_operand" "")
5309 (match_operand:SI 1 "const_int_operand" ""))]
5310 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5311 [(set (match_dup 2) (match_dup 1))
5312 (set (match_dup 0) (neg:SI (match_dup 2)))]
5313 "
5314 {
5315 operands[1] = GEN_INT (- INTVAL (operands[1]));
5316 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5317 }"
5318 )
5319
5320 (define_split
5321 [(set (match_operand:SI 0 "register_operand" "")
5322 (match_operand:SI 1 "const_int_operand" ""))]
5323 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5324 [(set (match_dup 2) (match_dup 1))
5325 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5326 "
5327 {
5328 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5329 unsigned HOST_WIDE_INT mask = 0xff;
5330 int i;
5331
5332 for (i = 0; i < 25; i++)
5333 if ((val & (mask << i)) == val)
5334 break;
5335
5336 /* Don't split if the shift is zero. */
5337 if (i == 0)
5338 FAIL;
5339
5340 operands[1] = GEN_INT (val >> i);
5341 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5342 operands[3] = GEN_INT (i);
5343 }"
5344 )
5345
5346 ;; When generating pic, we need to load the symbol offset into a register.
5347 ;; So that the optimizer does not confuse this with a normal symbol load
5348 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5349 ;; since that is the only type of relocation we can use.
5350
5351 ;; Wrap calculation of the whole PIC address in a single pattern for the
5352 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5353 ;; a PIC address involves two loads from memory, so we want to CSE it
5354 ;; as often as possible.
5355 ;; This pattern will be split into one of the pic_load_addr_* patterns
5356 ;; and a move after GCSE optimizations.
5357 ;;
5358 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5359 (define_expand "calculate_pic_address"
5360 [(set (match_operand:SI 0 "register_operand" "")
5361 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5362 (unspec:SI [(match_operand:SI 2 "" "")]
5363 UNSPEC_PIC_SYM))))]
5364 "flag_pic"
5365 )
5366
5367 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5368 (define_split
5369 [(set (match_operand:SI 0 "register_operand" "")
5370 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5371 (unspec:SI [(match_operand:SI 2 "" "")]
5372 UNSPEC_PIC_SYM))))]
5373 "flag_pic"
5374 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5375 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5376 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5377 )
5378
5379 ;; The rather odd constraints on the following are to force reload to leave
5380 ;; the insn alone, and to force the minipool generation pass to then move
5381 ;; the GOT symbol to memory.
5382
5383 (define_insn "pic_load_addr_32bit"
5384 [(set (match_operand:SI 0 "s_register_operand" "=r")
5385 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5386 "TARGET_32BIT && flag_pic"
5387 "ldr%?\\t%0, %1"
5388 [(set_attr "type" "load1")
5389 (set_attr "pool_range" "4096")
5390 (set (attr "neg_pool_range")
5391 (if_then_else (eq_attr "is_thumb" "no")
5392 (const_int 4084)
5393 (const_int 0)))]
5394 )
5395
5396 (define_insn "pic_load_addr_thumb1"
5397 [(set (match_operand:SI 0 "s_register_operand" "=l")
5398 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5399 "TARGET_THUMB1 && flag_pic"
5400 "ldr\\t%0, %1"
5401 [(set_attr "type" "load1")
5402 (set (attr "pool_range") (const_int 1024))]
5403 )
5404
5405 (define_insn "pic_add_dot_plus_four"
5406 [(set (match_operand:SI 0 "register_operand" "=r")
5407 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5408 (const_int 4)
5409 (match_operand 2 "" "")]
5410 UNSPEC_PIC_BASE))]
5411 "TARGET_THUMB"
5412 "*
5413 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5414 INTVAL (operands[2]));
5415 return \"add\\t%0, %|pc\";
5416 "
5417 [(set_attr "length" "2")]
5418 )
5419
5420 (define_insn "pic_add_dot_plus_eight"
5421 [(set (match_operand:SI 0 "register_operand" "=r")
5422 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5423 (const_int 8)
5424 (match_operand 2 "" "")]
5425 UNSPEC_PIC_BASE))]
5426 "TARGET_ARM"
5427 "*
5428 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5429 INTVAL (operands[2]));
5430 return \"add%?\\t%0, %|pc, %1\";
5431 "
5432 [(set_attr "predicable" "yes")]
5433 )
5434
5435 (define_insn "tls_load_dot_plus_eight"
5436 [(set (match_operand:SI 0 "register_operand" "=r")
5437 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5438 (const_int 8)
5439 (match_operand 2 "" "")]
5440 UNSPEC_PIC_BASE)))]
5441 "TARGET_ARM"
5442 "*
5443 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5444 INTVAL (operands[2]));
5445 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5446 "
5447 [(set_attr "predicable" "yes")]
5448 )
5449
5450 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5451 ;; followed by a load. These sequences can be crunched down to
5452 ;; tls_load_dot_plus_eight by a peephole.
5453
5454 (define_peephole2
5455 [(set (match_operand:SI 0 "register_operand" "")
5456 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5457 (const_int 8)
5458 (match_operand 1 "" "")]
5459 UNSPEC_PIC_BASE))
5460 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5461 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5462 [(set (match_dup 2)
5463 (mem:SI (unspec:SI [(match_dup 3)
5464 (const_int 8)
5465 (match_dup 1)]
5466 UNSPEC_PIC_BASE)))]
5467 ""
5468 )
5469
5470 (define_insn "pic_offset_arm"
5471 [(set (match_operand:SI 0 "register_operand" "=r")
5472 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5473 (unspec:SI [(match_operand:SI 2 "" "X")]
5474 UNSPEC_PIC_OFFSET))))]
5475 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5476 "ldr%?\\t%0, [%1,%2]"
5477 [(set_attr "type" "load1")]
5478 )
5479
5480 (define_expand "builtin_setjmp_receiver"
5481 [(label_ref (match_operand 0 "" ""))]
5482 "flag_pic"
5483 "
5484 {
5485 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5486 register. */
5487 if (arm_pic_register != INVALID_REGNUM)
5488 arm_load_pic_register (1UL << 3);
5489 DONE;
5490 }")
5491
5492 ;; If copying one reg to another we can set the condition codes according to
5493 ;; its value. Such a move is common after a return from subroutine and the
5494 ;; result is being tested against zero.
5495
5496 (define_insn "*movsi_compare0"
5497 [(set (reg:CC CC_REGNUM)
5498 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5499 (const_int 0)))
5500 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5501 (match_dup 1))]
5502 "TARGET_32BIT"
5503 "@
5504 cmp%?\\t%0, #0
5505 sub%.\\t%0, %1, #0"
5506 [(set_attr "conds" "set")]
5507 )
5508
5509 ;; Subroutine to store a half word from a register into memory.
5510 ;; Operand 0 is the source register (HImode)
5511 ;; Operand 1 is the destination address in a register (SImode)
5512
5513 ;; In both this routine and the next, we must be careful not to spill
5514 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5515 ;; can generate unrecognizable rtl.
5516
5517 (define_expand "storehi"
5518 [;; store the low byte
5519 (set (match_operand 1 "" "") (match_dup 3))
5520 ;; extract the high byte
5521 (set (match_dup 2)
5522 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5523 ;; store the high byte
5524 (set (match_dup 4) (match_dup 5))]
5525 "TARGET_ARM"
5526 "
5527 {
5528 rtx op1 = operands[1];
5529 rtx addr = XEXP (op1, 0);
5530 enum rtx_code code = GET_CODE (addr);
5531
5532 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5533 || code == MINUS)
5534 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5535
5536 operands[4] = adjust_address (op1, QImode, 1);
5537 operands[1] = adjust_address (operands[1], QImode, 0);
5538 operands[3] = gen_lowpart (QImode, operands[0]);
5539 operands[0] = gen_lowpart (SImode, operands[0]);
5540 operands[2] = gen_reg_rtx (SImode);
5541 operands[5] = gen_lowpart (QImode, operands[2]);
5542 }"
5543 )
5544
5545 (define_expand "storehi_bigend"
5546 [(set (match_dup 4) (match_dup 3))
5547 (set (match_dup 2)
5548 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5549 (set (match_operand 1 "" "") (match_dup 5))]
5550 "TARGET_ARM"
5551 "
5552 {
5553 rtx op1 = operands[1];
5554 rtx addr = XEXP (op1, 0);
5555 enum rtx_code code = GET_CODE (addr);
5556
5557 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5558 || code == MINUS)
5559 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5560
5561 operands[4] = adjust_address (op1, QImode, 1);
5562 operands[1] = adjust_address (operands[1], QImode, 0);
5563 operands[3] = gen_lowpart (QImode, operands[0]);
5564 operands[0] = gen_lowpart (SImode, operands[0]);
5565 operands[2] = gen_reg_rtx (SImode);
5566 operands[5] = gen_lowpart (QImode, operands[2]);
5567 }"
5568 )
5569
5570 ;; Subroutine to store a half word integer constant into memory.
5571 (define_expand "storeinthi"
5572 [(set (match_operand 0 "" "")
5573 (match_operand 1 "" ""))
5574 (set (match_dup 3) (match_dup 2))]
5575 "TARGET_ARM"
5576 "
5577 {
5578 HOST_WIDE_INT value = INTVAL (operands[1]);
5579 rtx addr = XEXP (operands[0], 0);
5580 rtx op0 = operands[0];
5581 enum rtx_code code = GET_CODE (addr);
5582
5583 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5584 || code == MINUS)
5585 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5586
5587 operands[1] = gen_reg_rtx (SImode);
5588 if (BYTES_BIG_ENDIAN)
5589 {
5590 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5591 if ((value & 255) == ((value >> 8) & 255))
5592 operands[2] = operands[1];
5593 else
5594 {
5595 operands[2] = gen_reg_rtx (SImode);
5596 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5597 }
5598 }
5599 else
5600 {
5601 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5602 if ((value & 255) == ((value >> 8) & 255))
5603 operands[2] = operands[1];
5604 else
5605 {
5606 operands[2] = gen_reg_rtx (SImode);
5607 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5608 }
5609 }
5610
5611 operands[3] = adjust_address (op0, QImode, 1);
5612 operands[0] = adjust_address (operands[0], QImode, 0);
5613 operands[2] = gen_lowpart (QImode, operands[2]);
5614 operands[1] = gen_lowpart (QImode, operands[1]);
5615 }"
5616 )
5617
5618 (define_expand "storehi_single_op"
5619 [(set (match_operand:HI 0 "memory_operand" "")
5620 (match_operand:HI 1 "general_operand" ""))]
5621 "TARGET_32BIT && arm_arch4"
5622 "
5623 if (!s_register_operand (operands[1], HImode))
5624 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5625 "
5626 )
5627
5628 (define_expand "movhi"
5629 [(set (match_operand:HI 0 "general_operand" "")
5630 (match_operand:HI 1 "general_operand" ""))]
5631 "TARGET_EITHER"
5632 "
5633 if (TARGET_ARM)
5634 {
5635 if (can_create_pseudo_p ())
5636 {
5637 if (GET_CODE (operands[0]) == MEM)
5638 {
5639 if (arm_arch4)
5640 {
5641 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5642 DONE;
5643 }
5644 if (GET_CODE (operands[1]) == CONST_INT)
5645 emit_insn (gen_storeinthi (operands[0], operands[1]));
5646 else
5647 {
5648 if (GET_CODE (operands[1]) == MEM)
5649 operands[1] = force_reg (HImode, operands[1]);
5650 if (BYTES_BIG_ENDIAN)
5651 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5652 else
5653 emit_insn (gen_storehi (operands[1], operands[0]));
5654 }
5655 DONE;
5656 }
5657 /* Sign extend a constant, and keep it in an SImode reg. */
5658 else if (GET_CODE (operands[1]) == CONST_INT)
5659 {
5660 rtx reg = gen_reg_rtx (SImode);
5661 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5662
5663 /* If the constant is already valid, leave it alone. */
5664 if (!const_ok_for_arm (val))
5665 {
5666 /* If setting all the top bits will make the constant
5667 loadable in a single instruction, then set them.
5668 Otherwise, sign extend the number. */
5669
5670 if (const_ok_for_arm (~(val | ~0xffff)))
5671 val |= ~0xffff;
5672 else if (val & 0x8000)
5673 val |= ~0xffff;
5674 }
5675
5676 emit_insn (gen_movsi (reg, GEN_INT (val)));
5677 operands[1] = gen_lowpart (HImode, reg);
5678 }
5679 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5680 && GET_CODE (operands[1]) == MEM)
5681 {
5682 rtx reg = gen_reg_rtx (SImode);
5683
5684 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5685 operands[1] = gen_lowpart (HImode, reg);
5686 }
5687 else if (!arm_arch4)
5688 {
5689 if (GET_CODE (operands[1]) == MEM)
5690 {
5691 rtx base;
5692 rtx offset = const0_rtx;
5693 rtx reg = gen_reg_rtx (SImode);
5694
5695 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5696 || (GET_CODE (base) == PLUS
5697 && (GET_CODE (offset = XEXP (base, 1))
5698 == CONST_INT)
5699 && ((INTVAL(offset) & 1) != 1)
5700 && GET_CODE (base = XEXP (base, 0)) == REG))
5701 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5702 {
5703 rtx new_rtx;
5704
5705 new_rtx = widen_memory_access (operands[1], SImode,
5706 ((INTVAL (offset) & ~3)
5707 - INTVAL (offset)));
5708 emit_insn (gen_movsi (reg, new_rtx));
5709 if (((INTVAL (offset) & 2) != 0)
5710 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5711 {
5712 rtx reg2 = gen_reg_rtx (SImode);
5713
5714 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5715 reg = reg2;
5716 }
5717 }
5718 else
5719 emit_insn (gen_movhi_bytes (reg, operands[1]));
5720
5721 operands[1] = gen_lowpart (HImode, reg);
5722 }
5723 }
5724 }
5725 /* Handle loading a large integer during reload. */
5726 else if (GET_CODE (operands[1]) == CONST_INT
5727 && !const_ok_for_arm (INTVAL (operands[1]))
5728 && !const_ok_for_arm (~INTVAL (operands[1])))
5729 {
5730 /* Writing a constant to memory needs a scratch, which should
5731 be handled with SECONDARY_RELOADs. */
5732 gcc_assert (GET_CODE (operands[0]) == REG);
5733
5734 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5735 emit_insn (gen_movsi (operands[0], operands[1]));
5736 DONE;
5737 }
5738 }
5739 else if (TARGET_THUMB2)
5740 {
5741 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5742 if (can_create_pseudo_p ())
5743 {
5744 if (GET_CODE (operands[0]) != REG)
5745 operands[1] = force_reg (HImode, operands[1]);
5746 /* Zero extend a constant, and keep it in an SImode reg. */
5747 else if (GET_CODE (operands[1]) == CONST_INT)
5748 {
5749 rtx reg = gen_reg_rtx (SImode);
5750 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5751
5752 emit_insn (gen_movsi (reg, GEN_INT (val)));
5753 operands[1] = gen_lowpart (HImode, reg);
5754 }
5755 }
5756 }
5757 else /* TARGET_THUMB1 */
5758 {
5759 if (can_create_pseudo_p ())
5760 {
5761 if (GET_CODE (operands[1]) == CONST_INT)
5762 {
5763 rtx reg = gen_reg_rtx (SImode);
5764
5765 emit_insn (gen_movsi (reg, operands[1]));
5766 operands[1] = gen_lowpart (HImode, reg);
5767 }
5768
5769 /* ??? We shouldn't really get invalid addresses here, but this can
5770 happen if we are passed a SP (never OK for HImode/QImode) or
5771 virtual register (also rejected as illegitimate for HImode/QImode)
5772 relative address. */
5773 /* ??? This should perhaps be fixed elsewhere, for instance, in
5774 fixup_stack_1, by checking for other kinds of invalid addresses,
5775 e.g. a bare reference to a virtual register. This may confuse the
5776 alpha though, which must handle this case differently. */
5777 if (GET_CODE (operands[0]) == MEM
5778 && !memory_address_p (GET_MODE (operands[0]),
5779 XEXP (operands[0], 0)))
5780 operands[0]
5781 = replace_equiv_address (operands[0],
5782 copy_to_reg (XEXP (operands[0], 0)));
5783
5784 if (GET_CODE (operands[1]) == MEM
5785 && !memory_address_p (GET_MODE (operands[1]),
5786 XEXP (operands[1], 0)))
5787 operands[1]
5788 = replace_equiv_address (operands[1],
5789 copy_to_reg (XEXP (operands[1], 0)));
5790
5791 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5792 {
5793 rtx reg = gen_reg_rtx (SImode);
5794
5795 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5796 operands[1] = gen_lowpart (HImode, reg);
5797 }
5798
5799 if (GET_CODE (operands[0]) == MEM)
5800 operands[1] = force_reg (HImode, operands[1]);
5801 }
5802 else if (GET_CODE (operands[1]) == CONST_INT
5803 && !satisfies_constraint_I (operands[1]))
5804 {
5805 /* Handle loading a large integer during reload. */
5806
5807 /* Writing a constant to memory needs a scratch, which should
5808 be handled with SECONDARY_RELOADs. */
5809 gcc_assert (GET_CODE (operands[0]) == REG);
5810
5811 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5812 emit_insn (gen_movsi (operands[0], operands[1]));
5813 DONE;
5814 }
5815 }
5816 "
5817 )
5818
5819 (define_insn "*thumb1_movhi_insn"
5820 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5821 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5822 "TARGET_THUMB1
5823 && ( register_operand (operands[0], HImode)
5824 || register_operand (operands[1], HImode))"
5825 "*
5826 switch (which_alternative)
5827 {
5828 case 0: return \"add %0, %1, #0\";
5829 case 2: return \"strh %1, %0\";
5830 case 3: return \"mov %0, %1\";
5831 case 4: return \"mov %0, %1\";
5832 case 5: return \"mov %0, %1\";
5833 default: gcc_unreachable ();
5834 case 1:
5835 /* The stack pointer can end up being taken as an index register.
5836 Catch this case here and deal with it. */
5837 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5838 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5839 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5840 {
5841 rtx ops[2];
5842 ops[0] = operands[0];
5843 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5844
5845 output_asm_insn (\"mov %0, %1\", ops);
5846
5847 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5848
5849 }
5850 return \"ldrh %0, %1\";
5851 }"
5852 [(set_attr "length" "2,4,2,2,2,2")
5853 (set_attr "type" "*,load1,store1,*,*,*")
5854 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5855
5856
5857 (define_expand "movhi_bytes"
5858 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5859 (set (match_dup 3)
5860 (zero_extend:SI (match_dup 6)))
5861 (set (match_operand:SI 0 "" "")
5862 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5863 "TARGET_ARM"
5864 "
5865 {
5866 rtx mem1, mem2;
5867 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5868
5869 mem1 = change_address (operands[1], QImode, addr);
5870 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5871 operands[0] = gen_lowpart (SImode, operands[0]);
5872 operands[1] = mem1;
5873 operands[2] = gen_reg_rtx (SImode);
5874 operands[3] = gen_reg_rtx (SImode);
5875 operands[6] = mem2;
5876
5877 if (BYTES_BIG_ENDIAN)
5878 {
5879 operands[4] = operands[2];
5880 operands[5] = operands[3];
5881 }
5882 else
5883 {
5884 operands[4] = operands[3];
5885 operands[5] = operands[2];
5886 }
5887 }"
5888 )
5889
5890 (define_expand "movhi_bigend"
5891 [(set (match_dup 2)
5892 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5893 (const_int 16)))
5894 (set (match_dup 3)
5895 (ashiftrt:SI (match_dup 2) (const_int 16)))
5896 (set (match_operand:HI 0 "s_register_operand" "")
5897 (match_dup 4))]
5898 "TARGET_ARM"
5899 "
5900 operands[2] = gen_reg_rtx (SImode);
5901 operands[3] = gen_reg_rtx (SImode);
5902 operands[4] = gen_lowpart (HImode, operands[3]);
5903 "
5904 )
5905
5906 ;; Pattern to recognize insn generated default case above
5907 (define_insn "*movhi_insn_arch4"
5908 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5909 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
5910 "TARGET_ARM
5911 && arm_arch4
5912 && (register_operand (operands[0], HImode)
5913 || register_operand (operands[1], HImode))"
5914 "@
5915 mov%?\\t%0, %1\\t%@ movhi
5916 mvn%?\\t%0, #%B1\\t%@ movhi
5917 str%(h%)\\t%1, %0\\t%@ movhi
5918 ldr%(h%)\\t%0, %1\\t%@ movhi"
5919 [(set_attr "type" "*,*,store1,load1")
5920 (set_attr "predicable" "yes")
5921 (set_attr "insn" "mov,mvn,*,*")
5922 (set_attr "pool_range" "*,*,*,256")
5923 (set_attr "neg_pool_range" "*,*,*,244")]
5924 )
5925
5926 (define_insn "*movhi_bytes"
5927 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5928 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5929 "TARGET_ARM"
5930 "@
5931 mov%?\\t%0, %1\\t%@ movhi
5932 mvn%?\\t%0, #%B1\\t%@ movhi"
5933 [(set_attr "predicable" "yes")
5934 (set_attr "insn" "mov,mvn")]
5935 )
5936
5937 (define_expand "thumb_movhi_clobber"
5938 [(set (match_operand:HI 0 "memory_operand" "")
5939 (match_operand:HI 1 "register_operand" ""))
5940 (clobber (match_operand:DI 2 "register_operand" ""))]
5941 "TARGET_THUMB1"
5942 "
5943 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5944 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5945 {
5946 emit_insn (gen_movhi (operands[0], operands[1]));
5947 DONE;
5948 }
5949 /* XXX Fixme, need to handle other cases here as well. */
5950 gcc_unreachable ();
5951 "
5952 )
5953
5954 ;; We use a DImode scratch because we may occasionally need an additional
5955 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5956 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5957 (define_expand "reload_outhi"
5958 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5959 (match_operand:HI 1 "s_register_operand" "r")
5960 (match_operand:DI 2 "s_register_operand" "=&l")])]
5961 "TARGET_EITHER"
5962 "if (TARGET_ARM)
5963 arm_reload_out_hi (operands);
5964 else
5965 thumb_reload_out_hi (operands);
5966 DONE;
5967 "
5968 )
5969
5970 (define_expand "reload_inhi"
5971 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5972 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5973 (match_operand:DI 2 "s_register_operand" "=&r")])]
5974 "TARGET_EITHER"
5975 "
5976 if (TARGET_ARM)
5977 arm_reload_in_hi (operands);
5978 else
5979 thumb_reload_out_hi (operands);
5980 DONE;
5981 ")
5982
5983 (define_expand "movqi"
5984 [(set (match_operand:QI 0 "general_operand" "")
5985 (match_operand:QI 1 "general_operand" ""))]
5986 "TARGET_EITHER"
5987 "
5988 /* Everything except mem = const or mem = mem can be done easily */
5989
5990 if (can_create_pseudo_p ())
5991 {
5992 if (GET_CODE (operands[1]) == CONST_INT)
5993 {
5994 rtx reg = gen_reg_rtx (SImode);
5995
5996 /* For thumb we want an unsigned immediate, then we are more likely
5997 to be able to use a movs insn. */
5998 if (TARGET_THUMB)
5999 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6000
6001 emit_insn (gen_movsi (reg, operands[1]));
6002 operands[1] = gen_lowpart (QImode, reg);
6003 }
6004
6005 if (TARGET_THUMB)
6006 {
6007 /* ??? We shouldn't really get invalid addresses here, but this can
6008 happen if we are passed a SP (never OK for HImode/QImode) or
6009 virtual register (also rejected as illegitimate for HImode/QImode)
6010 relative address. */
6011 /* ??? This should perhaps be fixed elsewhere, for instance, in
6012 fixup_stack_1, by checking for other kinds of invalid addresses,
6013 e.g. a bare reference to a virtual register. This may confuse the
6014 alpha though, which must handle this case differently. */
6015 if (GET_CODE (operands[0]) == MEM
6016 && !memory_address_p (GET_MODE (operands[0]),
6017 XEXP (operands[0], 0)))
6018 operands[0]
6019 = replace_equiv_address (operands[0],
6020 copy_to_reg (XEXP (operands[0], 0)));
6021 if (GET_CODE (operands[1]) == MEM
6022 && !memory_address_p (GET_MODE (operands[1]),
6023 XEXP (operands[1], 0)))
6024 operands[1]
6025 = replace_equiv_address (operands[1],
6026 copy_to_reg (XEXP (operands[1], 0)));
6027 }
6028
6029 if (GET_CODE (operands[1]) == MEM && optimize > 0)
6030 {
6031 rtx reg = gen_reg_rtx (SImode);
6032
6033 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6034 operands[1] = gen_lowpart (QImode, reg);
6035 }
6036
6037 if (GET_CODE (operands[0]) == MEM)
6038 operands[1] = force_reg (QImode, operands[1]);
6039 }
6040 else if (TARGET_THUMB
6041 && GET_CODE (operands[1]) == CONST_INT
6042 && !satisfies_constraint_I (operands[1]))
6043 {
6044 /* Handle loading a large integer during reload. */
6045
6046 /* Writing a constant to memory needs a scratch, which should
6047 be handled with SECONDARY_RELOADs. */
6048 gcc_assert (GET_CODE (operands[0]) == REG);
6049
6050 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6051 emit_insn (gen_movsi (operands[0], operands[1]));
6052 DONE;
6053 }
6054 "
6055 )
6056
6057
6058 (define_insn "*arm_movqi_insn"
6059 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
6060 (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
6061 "TARGET_32BIT
6062 && ( register_operand (operands[0], QImode)
6063 || register_operand (operands[1], QImode))"
6064 "@
6065 mov%?\\t%0, %1
6066 mvn%?\\t%0, #%B1
6067 ldr%(b%)\\t%0, %1
6068 str%(b%)\\t%1, %0
6069 ldr%(b%)\\t%0, %1
6070 str%(b%)\\t%1, %0"
6071 [(set_attr "type" "*,*,load1,store1,load1,store1")
6072 (set_attr "insn" "mov,mvn,*,*,*,*")
6073 (set_attr "predicable" "yes")
6074 (set_attr "arch" "any,any,t2,t2,any,any")
6075 (set_attr "length" "4,4,2,2,4,4")]
6076 )
6077
6078 (define_insn "*thumb1_movqi_insn"
6079 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6080 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
6081 "TARGET_THUMB1
6082 && ( register_operand (operands[0], QImode)
6083 || register_operand (operands[1], QImode))"
6084 "@
6085 add\\t%0, %1, #0
6086 ldrb\\t%0, %1
6087 strb\\t%1, %0
6088 mov\\t%0, %1
6089 mov\\t%0, %1
6090 mov\\t%0, %1"
6091 [(set_attr "length" "2")
6092 (set_attr "type" "*,load1,store1,*,*,*")
6093 (set_attr "insn" "*,*,*,mov,mov,mov")
6094 (set_attr "pool_range" "*,32,*,*,*,*")
6095 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6096
6097 ;; HFmode moves
6098 (define_expand "movhf"
6099 [(set (match_operand:HF 0 "general_operand" "")
6100 (match_operand:HF 1 "general_operand" ""))]
6101 "TARGET_EITHER"
6102 "
6103 if (TARGET_32BIT)
6104 {
6105 if (GET_CODE (operands[0]) == MEM)
6106 operands[1] = force_reg (HFmode, operands[1]);
6107 }
6108 else /* TARGET_THUMB1 */
6109 {
6110 if (can_create_pseudo_p ())
6111 {
6112 if (GET_CODE (operands[0]) != REG)
6113 operands[1] = force_reg (HFmode, operands[1]);
6114 }
6115 }
6116 "
6117 )
6118
6119 (define_insn "*arm32_movhf"
6120 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6121 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6122 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6123 && ( s_register_operand (operands[0], HFmode)
6124 || s_register_operand (operands[1], HFmode))"
6125 "*
6126 switch (which_alternative)
6127 {
6128 case 0: /* ARM register from memory */
6129 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6130 case 1: /* memory from ARM register */
6131 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6132 case 2: /* ARM register from ARM register */
6133 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6134 case 3: /* ARM register from constant */
6135 {
6136 REAL_VALUE_TYPE r;
6137 long bits;
6138 rtx ops[4];
6139
6140 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6141 bits = real_to_target (NULL, &r, HFmode);
6142 ops[0] = operands[0];
6143 ops[1] = GEN_INT (bits);
6144 ops[2] = GEN_INT (bits & 0xff00);
6145 ops[3] = GEN_INT (bits & 0x00ff);
6146
6147 if (arm_arch_thumb2)
6148 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6149 else
6150 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6151 return \"\";
6152 }
6153 default:
6154 gcc_unreachable ();
6155 }
6156 "
6157 [(set_attr "conds" "unconditional")
6158 (set_attr "type" "load1,store1,*,*")
6159 (set_attr "insn" "*,*,mov,mov")
6160 (set_attr "length" "4,4,4,8")
6161 (set_attr "predicable" "yes")]
6162 )
6163
6164 (define_insn "*thumb1_movhf"
6165 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6166 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6167 "TARGET_THUMB1
6168 && ( s_register_operand (operands[0], HFmode)
6169 || s_register_operand (operands[1], HFmode))"
6170 "*
6171 switch (which_alternative)
6172 {
6173 case 1:
6174 {
6175 rtx addr;
6176 gcc_assert (GET_CODE(operands[1]) == MEM);
6177 addr = XEXP (operands[1], 0);
6178 if (GET_CODE (addr) == LABEL_REF
6179 || (GET_CODE (addr) == CONST
6180 && GET_CODE (XEXP (addr, 0)) == PLUS
6181 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6182 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6183 {
6184 /* Constant pool entry. */
6185 return \"ldr\\t%0, %1\";
6186 }
6187 return \"ldrh\\t%0, %1\";
6188 }
6189 case 2: return \"strh\\t%1, %0\";
6190 default: return \"mov\\t%0, %1\";
6191 }
6192 "
6193 [(set_attr "length" "2")
6194 (set_attr "type" "*,load1,store1,*,*")
6195 (set_attr "insn" "mov,*,*,mov,mov")
6196 (set_attr "pool_range" "*,1020,*,*,*")
6197 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6198
6199 (define_expand "movsf"
6200 [(set (match_operand:SF 0 "general_operand" "")
6201 (match_operand:SF 1 "general_operand" ""))]
6202 "TARGET_EITHER"
6203 "
6204 if (TARGET_32BIT)
6205 {
6206 if (GET_CODE (operands[0]) == MEM)
6207 operands[1] = force_reg (SFmode, operands[1]);
6208 }
6209 else /* TARGET_THUMB1 */
6210 {
6211 if (can_create_pseudo_p ())
6212 {
6213 if (GET_CODE (operands[0]) != REG)
6214 operands[1] = force_reg (SFmode, operands[1]);
6215 }
6216 }
6217 "
6218 )
6219
6220 ;; Transform a floating-point move of a constant into a core register into
6221 ;; an SImode operation.
6222 (define_split
6223 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6224 (match_operand:SF 1 "immediate_operand" ""))]
6225 "TARGET_EITHER
6226 && reload_completed
6227 && GET_CODE (operands[1]) == CONST_DOUBLE"
6228 [(set (match_dup 2) (match_dup 3))]
6229 "
6230 operands[2] = gen_lowpart (SImode, operands[0]);
6231 operands[3] = gen_lowpart (SImode, operands[1]);
6232 if (operands[2] == 0 || operands[3] == 0)
6233 FAIL;
6234 "
6235 )
6236
6237 (define_insn "*arm_movsf_soft_insn"
6238 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6239 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6240 "TARGET_32BIT
6241 && TARGET_SOFT_FLOAT
6242 && (GET_CODE (operands[0]) != MEM
6243 || register_operand (operands[1], SFmode))"
6244 "@
6245 mov%?\\t%0, %1
6246 ldr%?\\t%0, %1\\t%@ float
6247 str%?\\t%1, %0\\t%@ float"
6248 [(set_attr "predicable" "yes")
6249 (set_attr "type" "*,load1,store1")
6250 (set_attr "insn" "mov,*,*")
6251 (set_attr "pool_range" "*,4096,*")
6252 (set_attr "arm_neg_pool_range" "*,4084,*")
6253 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6254 )
6255
6256 ;;; ??? This should have alternatives for constants.
6257 (define_insn "*thumb1_movsf_insn"
6258 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6259 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6260 "TARGET_THUMB1
6261 && ( register_operand (operands[0], SFmode)
6262 || register_operand (operands[1], SFmode))"
6263 "@
6264 add\\t%0, %1, #0
6265 ldmia\\t%1, {%0}
6266 stmia\\t%0, {%1}
6267 ldr\\t%0, %1
6268 str\\t%1, %0
6269 mov\\t%0, %1
6270 mov\\t%0, %1"
6271 [(set_attr "length" "2")
6272 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6273 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6274 (set_attr "insn" "*,*,*,*,*,mov,mov")
6275 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6276 )
6277
6278 (define_expand "movdf"
6279 [(set (match_operand:DF 0 "general_operand" "")
6280 (match_operand:DF 1 "general_operand" ""))]
6281 "TARGET_EITHER"
6282 "
6283 if (TARGET_32BIT)
6284 {
6285 if (GET_CODE (operands[0]) == MEM)
6286 operands[1] = force_reg (DFmode, operands[1]);
6287 }
6288 else /* TARGET_THUMB */
6289 {
6290 if (can_create_pseudo_p ())
6291 {
6292 if (GET_CODE (operands[0]) != REG)
6293 operands[1] = force_reg (DFmode, operands[1]);
6294 }
6295 }
6296 "
6297 )
6298
6299 ;; Reloading a df mode value stored in integer regs to memory can require a
6300 ;; scratch reg.
6301 (define_expand "reload_outdf"
6302 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6303 (match_operand:DF 1 "s_register_operand" "r")
6304 (match_operand:SI 2 "s_register_operand" "=&r")]
6305 "TARGET_THUMB2"
6306 "
6307 {
6308 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6309
6310 if (code == REG)
6311 operands[2] = XEXP (operands[0], 0);
6312 else if (code == POST_INC || code == PRE_DEC)
6313 {
6314 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6315 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6316 emit_insn (gen_movdi (operands[0], operands[1]));
6317 DONE;
6318 }
6319 else if (code == PRE_INC)
6320 {
6321 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6322
6323 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6324 operands[2] = reg;
6325 }
6326 else if (code == POST_DEC)
6327 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6328 else
6329 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6330 XEXP (XEXP (operands[0], 0), 1)));
6331
6332 emit_insn (gen_rtx_SET (VOIDmode,
6333 replace_equiv_address (operands[0], operands[2]),
6334 operands[1]));
6335
6336 if (code == POST_DEC)
6337 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6338
6339 DONE;
6340 }"
6341 )
6342
6343 (define_insn "*movdf_soft_insn"
6344 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6345 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6346 "TARGET_32BIT && TARGET_SOFT_FLOAT
6347 && ( register_operand (operands[0], DFmode)
6348 || register_operand (operands[1], DFmode))"
6349 "*
6350 switch (which_alternative)
6351 {
6352 case 0:
6353 case 1:
6354 case 2:
6355 return \"#\";
6356 default:
6357 return output_move_double (operands, true, NULL);
6358 }
6359 "
6360 [(set_attr "length" "8,12,16,8,8")
6361 (set_attr "type" "*,*,*,load2,store2")
6362 (set_attr "pool_range" "*,*,*,1020,*")
6363 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6364 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6365 )
6366
6367 ;;; ??? This should have alternatives for constants.
6368 ;;; ??? This was originally identical to the movdi_insn pattern.
6369 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6370 ;;; thumb_reorg with a memory reference.
6371 (define_insn "*thumb_movdf_insn"
6372 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6373 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6374 "TARGET_THUMB1
6375 && ( register_operand (operands[0], DFmode)
6376 || register_operand (operands[1], DFmode))"
6377 "*
6378 switch (which_alternative)
6379 {
6380 default:
6381 case 0:
6382 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6383 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6384 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6385 case 1:
6386 return \"ldmia\\t%1, {%0, %H0}\";
6387 case 2:
6388 return \"stmia\\t%0, {%1, %H1}\";
6389 case 3:
6390 return thumb_load_double_from_address (operands);
6391 case 4:
6392 operands[2] = gen_rtx_MEM (SImode,
6393 plus_constant (XEXP (operands[0], 0), 4));
6394 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6395 return \"\";
6396 case 5:
6397 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6398 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6399 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6400 }
6401 "
6402 [(set_attr "length" "4,2,2,6,4,4")
6403 (set_attr "type" "*,load2,store2,load2,store2,*")
6404 (set_attr "insn" "*,*,*,*,*,mov")
6405 (set_attr "pool_range" "*,*,*,1020,*,*")]
6406 )
6407
6408 (define_expand "movxf"
6409 [(set (match_operand:XF 0 "general_operand" "")
6410 (match_operand:XF 1 "general_operand" ""))]
6411 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6412 "
6413 if (GET_CODE (operands[0]) == MEM)
6414 operands[1] = force_reg (XFmode, operands[1]);
6415 "
6416 )
6417
6418 \f
6419
6420 ;; load- and store-multiple insns
6421 ;; The arm can load/store any set of registers, provided that they are in
6422 ;; ascending order, but these expanders assume a contiguous set.
6423
6424 (define_expand "load_multiple"
6425 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6426 (match_operand:SI 1 "" ""))
6427 (use (match_operand:SI 2 "" ""))])]
6428 "TARGET_32BIT"
6429 {
6430 HOST_WIDE_INT offset = 0;
6431
6432 /* Support only fixed point registers. */
6433 if (GET_CODE (operands[2]) != CONST_INT
6434 || INTVAL (operands[2]) > 14
6435 || INTVAL (operands[2]) < 2
6436 || GET_CODE (operands[1]) != MEM
6437 || GET_CODE (operands[0]) != REG
6438 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6439 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6440 FAIL;
6441
6442 operands[3]
6443 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6444 INTVAL (operands[2]),
6445 force_reg (SImode, XEXP (operands[1], 0)),
6446 FALSE, operands[1], &offset);
6447 })
6448
6449 (define_expand "store_multiple"
6450 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6451 (match_operand:SI 1 "" ""))
6452 (use (match_operand:SI 2 "" ""))])]
6453 "TARGET_32BIT"
6454 {
6455 HOST_WIDE_INT offset = 0;
6456
6457 /* Support only fixed point registers. */
6458 if (GET_CODE (operands[2]) != CONST_INT
6459 || INTVAL (operands[2]) > 14
6460 || INTVAL (operands[2]) < 2
6461 || GET_CODE (operands[1]) != REG
6462 || GET_CODE (operands[0]) != MEM
6463 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6464 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6465 FAIL;
6466
6467 operands[3]
6468 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6469 INTVAL (operands[2]),
6470 force_reg (SImode, XEXP (operands[0], 0)),
6471 FALSE, operands[0], &offset);
6472 })
6473
6474
6475 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6476 ;; We could let this apply for blocks of less than this, but it clobbers so
6477 ;; many registers that there is then probably a better way.
6478
6479 (define_expand "movmemqi"
6480 [(match_operand:BLK 0 "general_operand" "")
6481 (match_operand:BLK 1 "general_operand" "")
6482 (match_operand:SI 2 "const_int_operand" "")
6483 (match_operand:SI 3 "const_int_operand" "")]
6484 "TARGET_EITHER"
6485 "
6486 if (TARGET_32BIT)
6487 {
6488 if (arm_gen_movmemqi (operands))
6489 DONE;
6490 FAIL;
6491 }
6492 else /* TARGET_THUMB1 */
6493 {
6494 if ( INTVAL (operands[3]) != 4
6495 || INTVAL (operands[2]) > 48)
6496 FAIL;
6497
6498 thumb_expand_movmemqi (operands);
6499 DONE;
6500 }
6501 "
6502 )
6503
6504 ;; Thumb block-move insns
6505
6506 (define_insn "movmem12b"
6507 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6508 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6509 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6510 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6511 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6512 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6513 (set (match_operand:SI 0 "register_operand" "=l")
6514 (plus:SI (match_dup 2) (const_int 12)))
6515 (set (match_operand:SI 1 "register_operand" "=l")
6516 (plus:SI (match_dup 3) (const_int 12)))
6517 (clobber (match_scratch:SI 4 "=&l"))
6518 (clobber (match_scratch:SI 5 "=&l"))
6519 (clobber (match_scratch:SI 6 "=&l"))]
6520 "TARGET_THUMB1"
6521 "* return thumb_output_move_mem_multiple (3, operands);"
6522 [(set_attr "length" "4")
6523 ; This isn't entirely accurate... It loads as well, but in terms of
6524 ; scheduling the following insn it is better to consider it as a store
6525 (set_attr "type" "store3")]
6526 )
6527
6528 (define_insn "movmem8b"
6529 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6530 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6531 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6532 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6533 (set (match_operand:SI 0 "register_operand" "=l")
6534 (plus:SI (match_dup 2) (const_int 8)))
6535 (set (match_operand:SI 1 "register_operand" "=l")
6536 (plus:SI (match_dup 3) (const_int 8)))
6537 (clobber (match_scratch:SI 4 "=&l"))
6538 (clobber (match_scratch:SI 5 "=&l"))]
6539 "TARGET_THUMB1"
6540 "* return thumb_output_move_mem_multiple (2, operands);"
6541 [(set_attr "length" "4")
6542 ; This isn't entirely accurate... It loads as well, but in terms of
6543 ; scheduling the following insn it is better to consider it as a store
6544 (set_attr "type" "store2")]
6545 )
6546
6547 \f
6548
6549 ;; Compare & branch insns
6550 ;; The range calculations are based as follows:
6551 ;; For forward branches, the address calculation returns the address of
6552 ;; the next instruction. This is 2 beyond the branch instruction.
6553 ;; For backward branches, the address calculation returns the address of
6554 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6555 ;; instruction for the shortest sequence, and 4 before the branch instruction
6556 ;; if we have to jump around an unconditional branch.
6557 ;; To the basic branch range the PC offset must be added (this is +4).
6558 ;; So for forward branches we have
6559 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6560 ;; And for backward branches we have
6561 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6562 ;;
6563 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6564 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6565
6566 (define_expand "cbranchsi4"
6567 [(set (pc) (if_then_else
6568 (match_operator 0 "arm_comparison_operator"
6569 [(match_operand:SI 1 "s_register_operand" "")
6570 (match_operand:SI 2 "nonmemory_operand" "")])
6571 (label_ref (match_operand 3 "" ""))
6572 (pc)))]
6573 "TARGET_THUMB1 || TARGET_32BIT"
6574 "
6575 if (!TARGET_THUMB1)
6576 {
6577 if (!arm_add_operand (operands[2], SImode))
6578 operands[2] = force_reg (SImode, operands[2]);
6579 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6580 operands[3]));
6581 DONE;
6582 }
6583 if (thumb1_cmpneg_operand (operands[2], SImode))
6584 {
6585 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6586 operands[3], operands[0]));
6587 DONE;
6588 }
6589 if (!thumb1_cmp_operand (operands[2], SImode))
6590 operands[2] = force_reg (SImode, operands[2]);
6591 ")
6592
6593 ;; A pattern to recognize a special situation and optimize for it.
6594 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6595 ;; due to the available addressing modes. Hence, convert a signed comparison
6596 ;; with zero into an unsigned comparison with 127 if possible.
6597 (define_expand "cbranchqi4"
6598 [(set (pc) (if_then_else
6599 (match_operator 0 "lt_ge_comparison_operator"
6600 [(match_operand:QI 1 "memory_operand" "")
6601 (match_operand:QI 2 "const0_operand" "")])
6602 (label_ref (match_operand 3 "" ""))
6603 (pc)))]
6604 "TARGET_THUMB1"
6605 {
6606 rtx xops[4];
6607 xops[1] = gen_reg_rtx (SImode);
6608 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6609 xops[2] = GEN_INT (127);
6610 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6611 VOIDmode, xops[1], xops[2]);
6612 xops[3] = operands[3];
6613 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6614 DONE;
6615 })
6616
6617 (define_expand "cbranchsf4"
6618 [(set (pc) (if_then_else
6619 (match_operator 0 "arm_comparison_operator"
6620 [(match_operand:SF 1 "s_register_operand" "")
6621 (match_operand:SF 2 "arm_float_compare_operand" "")])
6622 (label_ref (match_operand 3 "" ""))
6623 (pc)))]
6624 "TARGET_32BIT && TARGET_HARD_FLOAT"
6625 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6626 operands[3])); DONE;"
6627 )
6628
6629 (define_expand "cbranchdf4"
6630 [(set (pc) (if_then_else
6631 (match_operator 0 "arm_comparison_operator"
6632 [(match_operand:DF 1 "s_register_operand" "")
6633 (match_operand:DF 2 "arm_float_compare_operand" "")])
6634 (label_ref (match_operand 3 "" ""))
6635 (pc)))]
6636 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6637 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6638 operands[3])); DONE;"
6639 )
6640
6641 (define_expand "cbranchdi4"
6642 [(set (pc) (if_then_else
6643 (match_operator 0 "arm_comparison_operator"
6644 [(match_operand:DI 1 "cmpdi_operand" "")
6645 (match_operand:DI 2 "cmpdi_operand" "")])
6646 (label_ref (match_operand 3 "" ""))
6647 (pc)))]
6648 "TARGET_32BIT"
6649 "{
6650 rtx swap = NULL_RTX;
6651 enum rtx_code code = GET_CODE (operands[0]);
6652
6653 /* We should not have two constants. */
6654 gcc_assert (GET_MODE (operands[1]) == DImode
6655 || GET_MODE (operands[2]) == DImode);
6656
6657 /* Flip unimplemented DImode comparisons to a form that
6658 arm_gen_compare_reg can handle. */
6659 switch (code)
6660 {
6661 case GT:
6662 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6663 case LE:
6664 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6665 case GTU:
6666 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6667 case LEU:
6668 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6669 default:
6670 break;
6671 }
6672 if (swap)
6673 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6674 operands[3]));
6675 else
6676 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6677 operands[3]));
6678 DONE;
6679 }"
6680 )
6681
6682 (define_insn "cbranchsi4_insn"
6683 [(set (pc) (if_then_else
6684 (match_operator 0 "arm_comparison_operator"
6685 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6686 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6687 (label_ref (match_operand 3 "" ""))
6688 (pc)))]
6689 "TARGET_THUMB1"
6690 {
6691 rtx t = cfun->machine->thumb1_cc_insn;
6692 if (t != NULL_RTX)
6693 {
6694 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6695 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6696 t = NULL_RTX;
6697 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6698 {
6699 if (!noov_comparison_operator (operands[0], VOIDmode))
6700 t = NULL_RTX;
6701 }
6702 else if (cfun->machine->thumb1_cc_mode != CCmode)
6703 t = NULL_RTX;
6704 }
6705 if (t == NULL_RTX)
6706 {
6707 output_asm_insn ("cmp\t%1, %2", operands);
6708 cfun->machine->thumb1_cc_insn = insn;
6709 cfun->machine->thumb1_cc_op0 = operands[1];
6710 cfun->machine->thumb1_cc_op1 = operands[2];
6711 cfun->machine->thumb1_cc_mode = CCmode;
6712 }
6713 else
6714 /* Ensure we emit the right type of condition code on the jump. */
6715 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6716 CC_REGNUM);
6717
6718 switch (get_attr_length (insn))
6719 {
6720 case 4: return \"b%d0\\t%l3\";
6721 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6722 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6723 }
6724 }
6725 [(set (attr "far_jump")
6726 (if_then_else
6727 (eq_attr "length" "8")
6728 (const_string "yes")
6729 (const_string "no")))
6730 (set (attr "length")
6731 (if_then_else
6732 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6733 (le (minus (match_dup 3) (pc)) (const_int 256)))
6734 (const_int 4)
6735 (if_then_else
6736 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6737 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6738 (const_int 6)
6739 (const_int 8))))]
6740 )
6741
6742 (define_insn "cbranchsi4_scratch"
6743 [(set (pc) (if_then_else
6744 (match_operator 4 "arm_comparison_operator"
6745 [(match_operand:SI 1 "s_register_operand" "l,0")
6746 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6747 (label_ref (match_operand 3 "" ""))
6748 (pc)))
6749 (clobber (match_scratch:SI 0 "=l,l"))]
6750 "TARGET_THUMB1"
6751 "*
6752 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6753
6754 switch (get_attr_length (insn))
6755 {
6756 case 4: return \"b%d4\\t%l3\";
6757 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6758 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6759 }
6760 "
6761 [(set (attr "far_jump")
6762 (if_then_else
6763 (eq_attr "length" "8")
6764 (const_string "yes")
6765 (const_string "no")))
6766 (set (attr "length")
6767 (if_then_else
6768 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6769 (le (minus (match_dup 3) (pc)) (const_int 256)))
6770 (const_int 4)
6771 (if_then_else
6772 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6773 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6774 (const_int 6)
6775 (const_int 8))))]
6776 )
6777
6778 ;; Two peepholes to generate subtract of 0 instead of a move if the
6779 ;; condition codes will be useful.
6780 (define_peephole2
6781 [(set (match_operand:SI 0 "low_register_operand" "")
6782 (match_operand:SI 1 "low_register_operand" ""))
6783 (set (pc)
6784 (if_then_else (match_operator 2 "arm_comparison_operator"
6785 [(match_dup 1) (const_int 0)])
6786 (label_ref (match_operand 3 "" ""))
6787 (pc)))]
6788 "TARGET_THUMB1"
6789 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6790 (set (pc)
6791 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6792 (label_ref (match_dup 3))
6793 (pc)))]
6794 "")
6795
6796 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6797 ;; merge cases like this because the op1 is a hard register in
6798 ;; arm_class_likely_spilled_p.
6799 (define_peephole2
6800 [(set (match_operand:SI 0 "low_register_operand" "")
6801 (match_operand:SI 1 "low_register_operand" ""))
6802 (set (pc)
6803 (if_then_else (match_operator 2 "arm_comparison_operator"
6804 [(match_dup 0) (const_int 0)])
6805 (label_ref (match_operand 3 "" ""))
6806 (pc)))]
6807 "TARGET_THUMB1"
6808 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6809 (set (pc)
6810 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6811 (label_ref (match_dup 3))
6812 (pc)))]
6813 "")
6814
6815 (define_insn "*negated_cbranchsi4"
6816 [(set (pc)
6817 (if_then_else
6818 (match_operator 0 "equality_operator"
6819 [(match_operand:SI 1 "s_register_operand" "l")
6820 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6821 (label_ref (match_operand 3 "" ""))
6822 (pc)))]
6823 "TARGET_THUMB1"
6824 "*
6825 output_asm_insn (\"cmn\\t%1, %2\", operands);
6826 switch (get_attr_length (insn))
6827 {
6828 case 4: return \"b%d0\\t%l3\";
6829 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6830 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6831 }
6832 "
6833 [(set (attr "far_jump")
6834 (if_then_else
6835 (eq_attr "length" "8")
6836 (const_string "yes")
6837 (const_string "no")))
6838 (set (attr "length")
6839 (if_then_else
6840 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6841 (le (minus (match_dup 3) (pc)) (const_int 256)))
6842 (const_int 4)
6843 (if_then_else
6844 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6845 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6846 (const_int 6)
6847 (const_int 8))))]
6848 )
6849
6850 (define_insn "*tbit_cbranch"
6851 [(set (pc)
6852 (if_then_else
6853 (match_operator 0 "equality_operator"
6854 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6855 (const_int 1)
6856 (match_operand:SI 2 "const_int_operand" "i"))
6857 (const_int 0)])
6858 (label_ref (match_operand 3 "" ""))
6859 (pc)))
6860 (clobber (match_scratch:SI 4 "=l"))]
6861 "TARGET_THUMB1"
6862 "*
6863 {
6864 rtx op[3];
6865 op[0] = operands[4];
6866 op[1] = operands[1];
6867 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6868
6869 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6870 switch (get_attr_length (insn))
6871 {
6872 case 4: return \"b%d0\\t%l3\";
6873 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6874 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6875 }
6876 }"
6877 [(set (attr "far_jump")
6878 (if_then_else
6879 (eq_attr "length" "8")
6880 (const_string "yes")
6881 (const_string "no")))
6882 (set (attr "length")
6883 (if_then_else
6884 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6885 (le (minus (match_dup 3) (pc)) (const_int 256)))
6886 (const_int 4)
6887 (if_then_else
6888 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6889 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6890 (const_int 6)
6891 (const_int 8))))]
6892 )
6893
6894 (define_insn "*tlobits_cbranch"
6895 [(set (pc)
6896 (if_then_else
6897 (match_operator 0 "equality_operator"
6898 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6899 (match_operand:SI 2 "const_int_operand" "i")
6900 (const_int 0))
6901 (const_int 0)])
6902 (label_ref (match_operand 3 "" ""))
6903 (pc)))
6904 (clobber (match_scratch:SI 4 "=l"))]
6905 "TARGET_THUMB1"
6906 "*
6907 {
6908 rtx op[3];
6909 op[0] = operands[4];
6910 op[1] = operands[1];
6911 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6912
6913 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6914 switch (get_attr_length (insn))
6915 {
6916 case 4: return \"b%d0\\t%l3\";
6917 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6918 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6919 }
6920 }"
6921 [(set (attr "far_jump")
6922 (if_then_else
6923 (eq_attr "length" "8")
6924 (const_string "yes")
6925 (const_string "no")))
6926 (set (attr "length")
6927 (if_then_else
6928 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6929 (le (minus (match_dup 3) (pc)) (const_int 256)))
6930 (const_int 4)
6931 (if_then_else
6932 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6933 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6934 (const_int 6)
6935 (const_int 8))))]
6936 )
6937
6938 (define_insn "*tstsi3_cbranch"
6939 [(set (pc)
6940 (if_then_else
6941 (match_operator 3 "equality_operator"
6942 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6943 (match_operand:SI 1 "s_register_operand" "l"))
6944 (const_int 0)])
6945 (label_ref (match_operand 2 "" ""))
6946 (pc)))]
6947 "TARGET_THUMB1"
6948 "*
6949 {
6950 output_asm_insn (\"tst\\t%0, %1\", operands);
6951 switch (get_attr_length (insn))
6952 {
6953 case 4: return \"b%d3\\t%l2\";
6954 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6955 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6956 }
6957 }"
6958 [(set (attr "far_jump")
6959 (if_then_else
6960 (eq_attr "length" "8")
6961 (const_string "yes")
6962 (const_string "no")))
6963 (set (attr "length")
6964 (if_then_else
6965 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6966 (le (minus (match_dup 2) (pc)) (const_int 256)))
6967 (const_int 4)
6968 (if_then_else
6969 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6970 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6971 (const_int 6)
6972 (const_int 8))))]
6973 )
6974
6975 (define_insn "*cbranchne_decr1"
6976 [(set (pc)
6977 (if_then_else (match_operator 3 "equality_operator"
6978 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6979 (const_int 0)])
6980 (label_ref (match_operand 4 "" ""))
6981 (pc)))
6982 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6983 (plus:SI (match_dup 2) (const_int -1)))
6984 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6985 "TARGET_THUMB1"
6986 "*
6987 {
6988 rtx cond[2];
6989 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6990 ? GEU : LTU),
6991 VOIDmode, operands[2], const1_rtx);
6992 cond[1] = operands[4];
6993
6994 if (which_alternative == 0)
6995 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6996 else if (which_alternative == 1)
6997 {
6998 /* We must provide an alternative for a hi reg because reload
6999 cannot handle output reloads on a jump instruction, but we
7000 can't subtract into that. Fortunately a mov from lo to hi
7001 does not clobber the condition codes. */
7002 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7003 output_asm_insn (\"mov\\t%0, %1\", operands);
7004 }
7005 else
7006 {
7007 /* Similarly, but the target is memory. */
7008 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7009 output_asm_insn (\"str\\t%1, %0\", operands);
7010 }
7011
7012 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7013 {
7014 case 4:
7015 output_asm_insn (\"b%d0\\t%l1\", cond);
7016 return \"\";
7017 case 6:
7018 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7019 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7020 default:
7021 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7022 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7023 }
7024 }
7025 "
7026 [(set (attr "far_jump")
7027 (if_then_else
7028 (ior (and (eq (symbol_ref ("which_alternative"))
7029 (const_int 0))
7030 (eq_attr "length" "8"))
7031 (eq_attr "length" "10"))
7032 (const_string "yes")
7033 (const_string "no")))
7034 (set_attr_alternative "length"
7035 [
7036 ;; Alternative 0
7037 (if_then_else
7038 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7039 (le (minus (match_dup 4) (pc)) (const_int 256)))
7040 (const_int 4)
7041 (if_then_else
7042 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7043 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7044 (const_int 6)
7045 (const_int 8)))
7046 ;; Alternative 1
7047 (if_then_else
7048 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7049 (le (minus (match_dup 4) (pc)) (const_int 256)))
7050 (const_int 6)
7051 (if_then_else
7052 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7053 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7054 (const_int 8)
7055 (const_int 10)))
7056 ;; Alternative 2
7057 (if_then_else
7058 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7059 (le (minus (match_dup 4) (pc)) (const_int 256)))
7060 (const_int 6)
7061 (if_then_else
7062 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7063 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7064 (const_int 8)
7065 (const_int 10)))
7066 ;; Alternative 3
7067 (if_then_else
7068 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7069 (le (minus (match_dup 4) (pc)) (const_int 256)))
7070 (const_int 6)
7071 (if_then_else
7072 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7073 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7074 (const_int 8)
7075 (const_int 10)))])]
7076 )
7077
7078 (define_insn "*addsi3_cbranch"
7079 [(set (pc)
7080 (if_then_else
7081 (match_operator 4 "arm_comparison_operator"
7082 [(plus:SI
7083 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7084 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7085 (const_int 0)])
7086 (label_ref (match_operand 5 "" ""))
7087 (pc)))
7088 (set
7089 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7090 (plus:SI (match_dup 2) (match_dup 3)))
7091 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7092 "TARGET_THUMB1
7093 && (GET_CODE (operands[4]) == EQ
7094 || GET_CODE (operands[4]) == NE
7095 || GET_CODE (operands[4]) == GE
7096 || GET_CODE (operands[4]) == LT)"
7097 "*
7098 {
7099 rtx cond[3];
7100
7101 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7102 cond[1] = operands[2];
7103 cond[2] = operands[3];
7104
7105 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7106 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7107 else
7108 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7109
7110 if (which_alternative >= 2
7111 && which_alternative < 4)
7112 output_asm_insn (\"mov\\t%0, %1\", operands);
7113 else if (which_alternative >= 4)
7114 output_asm_insn (\"str\\t%1, %0\", operands);
7115
7116 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7117 {
7118 case 4:
7119 return \"b%d4\\t%l5\";
7120 case 6:
7121 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7122 default:
7123 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7124 }
7125 }
7126 "
7127 [(set (attr "far_jump")
7128 (if_then_else
7129 (ior (and (lt (symbol_ref ("which_alternative"))
7130 (const_int 2))
7131 (eq_attr "length" "8"))
7132 (eq_attr "length" "10"))
7133 (const_string "yes")
7134 (const_string "no")))
7135 (set (attr "length")
7136 (if_then_else
7137 (lt (symbol_ref ("which_alternative"))
7138 (const_int 2))
7139 (if_then_else
7140 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7141 (le (minus (match_dup 5) (pc)) (const_int 256)))
7142 (const_int 4)
7143 (if_then_else
7144 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7145 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7146 (const_int 6)
7147 (const_int 8)))
7148 (if_then_else
7149 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7150 (le (minus (match_dup 5) (pc)) (const_int 256)))
7151 (const_int 6)
7152 (if_then_else
7153 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7154 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7155 (const_int 8)
7156 (const_int 10)))))]
7157 )
7158
7159 (define_insn "*addsi3_cbranch_scratch"
7160 [(set (pc)
7161 (if_then_else
7162 (match_operator 3 "arm_comparison_operator"
7163 [(plus:SI
7164 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7165 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7166 (const_int 0)])
7167 (label_ref (match_operand 4 "" ""))
7168 (pc)))
7169 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7170 "TARGET_THUMB1
7171 && (GET_CODE (operands[3]) == EQ
7172 || GET_CODE (operands[3]) == NE
7173 || GET_CODE (operands[3]) == GE
7174 || GET_CODE (operands[3]) == LT)"
7175 "*
7176 {
7177 switch (which_alternative)
7178 {
7179 case 0:
7180 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7181 break;
7182 case 1:
7183 output_asm_insn (\"cmn\t%1, %2\", operands);
7184 break;
7185 case 2:
7186 if (INTVAL (operands[2]) < 0)
7187 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7188 else
7189 output_asm_insn (\"add\t%0, %1, %2\", operands);
7190 break;
7191 case 3:
7192 if (INTVAL (operands[2]) < 0)
7193 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7194 else
7195 output_asm_insn (\"add\t%0, %0, %2\", operands);
7196 break;
7197 }
7198
7199 switch (get_attr_length (insn))
7200 {
7201 case 4:
7202 return \"b%d3\\t%l4\";
7203 case 6:
7204 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7205 default:
7206 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7207 }
7208 }
7209 "
7210 [(set (attr "far_jump")
7211 (if_then_else
7212 (eq_attr "length" "8")
7213 (const_string "yes")
7214 (const_string "no")))
7215 (set (attr "length")
7216 (if_then_else
7217 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7218 (le (minus (match_dup 4) (pc)) (const_int 256)))
7219 (const_int 4)
7220 (if_then_else
7221 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7222 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7223 (const_int 6)
7224 (const_int 8))))]
7225 )
7226
7227
7228 ;; Comparison and test insns
7229
7230 (define_insn "*arm_cmpsi_insn"
7231 [(set (reg:CC CC_REGNUM)
7232 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7233 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
7234 "TARGET_32BIT"
7235 "@
7236 cmp%?\\t%0, %1
7237 cmp%?\\t%0, %1
7238 cmp%?\\t%0, %1
7239 cmn%?\\t%0, #%n1"
7240 [(set_attr "conds" "set")
7241 (set_attr "arch" "t2,t2,any,any")
7242 (set_attr "length" "2,2,4,4")]
7243 )
7244
7245 (define_insn "*cmpsi_shiftsi"
7246 [(set (reg:CC CC_REGNUM)
7247 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7248 (match_operator:SI 3 "shift_operator"
7249 [(match_operand:SI 1 "s_register_operand" "r,r")
7250 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7251 "TARGET_32BIT"
7252 "cmp%?\\t%0, %1%S3"
7253 [(set_attr "conds" "set")
7254 (set_attr "shift" "1")
7255 (set_attr "arch" "32,a")
7256 (set_attr "type" "alu_shift,alu_shift_reg")])
7257
7258 (define_insn "*cmpsi_shiftsi_swp"
7259 [(set (reg:CC_SWP CC_REGNUM)
7260 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7261 [(match_operand:SI 1 "s_register_operand" "r,r")
7262 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7263 (match_operand:SI 0 "s_register_operand" "r,r")))]
7264 "TARGET_32BIT"
7265 "cmp%?\\t%0, %1%S3"
7266 [(set_attr "conds" "set")
7267 (set_attr "shift" "1")
7268 (set_attr "arch" "32,a")
7269 (set_attr "type" "alu_shift,alu_shift_reg")])
7270
7271 (define_insn "*arm_cmpsi_negshiftsi_si"
7272 [(set (reg:CC_Z CC_REGNUM)
7273 (compare:CC_Z
7274 (neg:SI (match_operator:SI 1 "shift_operator"
7275 [(match_operand:SI 2 "s_register_operand" "r")
7276 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7277 (match_operand:SI 0 "s_register_operand" "r")))]
7278 "TARGET_ARM"
7279 "cmn%?\\t%0, %2%S1"
7280 [(set_attr "conds" "set")
7281 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7282 (const_string "alu_shift")
7283 (const_string "alu_shift_reg")))]
7284 )
7285
7286 ;; DImode comparisons. The generic code generates branches that
7287 ;; if-conversion can not reduce to a conditional compare, so we do
7288 ;; that directly.
7289
7290 (define_insn "*arm_cmpdi_insn"
7291 [(set (reg:CC_NCV CC_REGNUM)
7292 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7293 (match_operand:DI 1 "arm_di_operand" "rDi")))
7294 (clobber (match_scratch:SI 2 "=r"))]
7295 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7296 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7297 [(set_attr "conds" "set")
7298 (set_attr "length" "8")]
7299 )
7300
7301 (define_insn "*arm_cmpdi_unsigned"
7302 [(set (reg:CC_CZ CC_REGNUM)
7303 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7304 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7305 "TARGET_ARM"
7306 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7307 [(set_attr "conds" "set")
7308 (set_attr "length" "8")]
7309 )
7310
7311 (define_insn "*arm_cmpdi_zero"
7312 [(set (reg:CC_Z CC_REGNUM)
7313 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7314 (const_int 0)))
7315 (clobber (match_scratch:SI 1 "=r"))]
7316 "TARGET_32BIT"
7317 "orr%.\\t%1, %Q0, %R0"
7318 [(set_attr "conds" "set")]
7319 )
7320
7321 (define_insn "*thumb_cmpdi_zero"
7322 [(set (reg:CC_Z CC_REGNUM)
7323 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7324 (const_int 0)))
7325 (clobber (match_scratch:SI 1 "=l"))]
7326 "TARGET_THUMB1"
7327 "orr\\t%1, %Q0, %R0"
7328 [(set_attr "conds" "set")
7329 (set_attr "length" "2")]
7330 )
7331
7332 ;; Cirrus SF compare instruction
7333 (define_insn "*cirrus_cmpsf"
7334 [(set (reg:CCFP CC_REGNUM)
7335 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7336 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7337 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7338 "cfcmps%?\\tr15, %V0, %V1"
7339 [(set_attr "type" "mav_farith")
7340 (set_attr "cirrus" "compare")]
7341 )
7342
7343 ;; Cirrus DF compare instruction
7344 (define_insn "*cirrus_cmpdf"
7345 [(set (reg:CCFP CC_REGNUM)
7346 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7347 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7348 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7349 "cfcmpd%?\\tr15, %V0, %V1"
7350 [(set_attr "type" "mav_farith")
7351 (set_attr "cirrus" "compare")]
7352 )
7353
7354 (define_insn "*cirrus_cmpdi"
7355 [(set (reg:CC CC_REGNUM)
7356 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7357 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7358 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7359 "cfcmp64%?\\tr15, %V0, %V1"
7360 [(set_attr "type" "mav_farith")
7361 (set_attr "cirrus" "compare")]
7362 )
7363
7364 ; This insn allows redundant compares to be removed by cse, nothing should
7365 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7366 ; is deleted later on. The match_dup will match the mode here, so that
7367 ; mode changes of the condition codes aren't lost by this even though we don't
7368 ; specify what they are.
7369
7370 (define_insn "*deleted_compare"
7371 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7372 "TARGET_32BIT"
7373 "\\t%@ deleted compare"
7374 [(set_attr "conds" "set")
7375 (set_attr "length" "0")]
7376 )
7377
7378 \f
7379 ;; Conditional branch insns
7380
7381 (define_expand "cbranch_cc"
7382 [(set (pc)
7383 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7384 (match_operand 2 "" "")])
7385 (label_ref (match_operand 3 "" ""))
7386 (pc)))]
7387 "TARGET_32BIT"
7388 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7389 operands[1], operands[2]);
7390 operands[2] = const0_rtx;"
7391 )
7392
7393 ;;
7394 ;; Patterns to match conditional branch insns.
7395 ;;
7396
7397 (define_insn "*arm_cond_branch"
7398 [(set (pc)
7399 (if_then_else (match_operator 1 "arm_comparison_operator"
7400 [(match_operand 2 "cc_register" "") (const_int 0)])
7401 (label_ref (match_operand 0 "" ""))
7402 (pc)))]
7403 "TARGET_32BIT"
7404 "*
7405 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7406 {
7407 arm_ccfsm_state += 2;
7408 return \"\";
7409 }
7410 return \"b%d1\\t%l0\";
7411 "
7412 [(set_attr "conds" "use")
7413 (set_attr "type" "branch")
7414 (set (attr "length")
7415 (if_then_else
7416 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7417 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7418 (le (minus (match_dup 0) (pc)) (const_int 256))))
7419 (const_int 2)
7420 (const_int 4)))]
7421 )
7422
7423 (define_insn "*arm_cond_branch_reversed"
7424 [(set (pc)
7425 (if_then_else (match_operator 1 "arm_comparison_operator"
7426 [(match_operand 2 "cc_register" "") (const_int 0)])
7427 (pc)
7428 (label_ref (match_operand 0 "" ""))))]
7429 "TARGET_32BIT"
7430 "*
7431 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7432 {
7433 arm_ccfsm_state += 2;
7434 return \"\";
7435 }
7436 return \"b%D1\\t%l0\";
7437 "
7438 [(set_attr "conds" "use")
7439 (set_attr "type" "branch")
7440 (set (attr "length")
7441 (if_then_else
7442 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7443 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7444 (le (minus (match_dup 0) (pc)) (const_int 256))))
7445 (const_int 2)
7446 (const_int 4)))]
7447 )
7448
7449 \f
7450
7451 ; scc insns
7452
7453 (define_expand "cstore_cc"
7454 [(set (match_operand:SI 0 "s_register_operand" "")
7455 (match_operator:SI 1 "" [(match_operand 2 "" "")
7456 (match_operand 3 "" "")]))]
7457 "TARGET_32BIT"
7458 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7459 operands[2], operands[3]);
7460 operands[3] = const0_rtx;"
7461 )
7462
7463 (define_insn "*mov_scc"
7464 [(set (match_operand:SI 0 "s_register_operand" "=r")
7465 (match_operator:SI 1 "arm_comparison_operator"
7466 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7467 "TARGET_ARM"
7468 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7469 [(set_attr "conds" "use")
7470 (set_attr "insn" "mov")
7471 (set_attr "length" "8")]
7472 )
7473
7474 (define_insn "*mov_negscc"
7475 [(set (match_operand:SI 0 "s_register_operand" "=r")
7476 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7477 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7478 "TARGET_ARM"
7479 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7480 [(set_attr "conds" "use")
7481 (set_attr "insn" "mov")
7482 (set_attr "length" "8")]
7483 )
7484
7485 (define_insn "*mov_notscc"
7486 [(set (match_operand:SI 0 "s_register_operand" "=r")
7487 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7488 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7489 "TARGET_ARM"
7490 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7491 [(set_attr "conds" "use")
7492 (set_attr "insn" "mov")
7493 (set_attr "length" "8")]
7494 )
7495
7496 (define_expand "cstoresi4"
7497 [(set (match_operand:SI 0 "s_register_operand" "")
7498 (match_operator:SI 1 "arm_comparison_operator"
7499 [(match_operand:SI 2 "s_register_operand" "")
7500 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7501 "TARGET_32BIT || TARGET_THUMB1"
7502 "{
7503 rtx op3, scratch, scratch2;
7504
7505 if (!TARGET_THUMB1)
7506 {
7507 if (!arm_add_operand (operands[3], SImode))
7508 operands[3] = force_reg (SImode, operands[3]);
7509 emit_insn (gen_cstore_cc (operands[0], operands[1],
7510 operands[2], operands[3]));
7511 DONE;
7512 }
7513
7514 if (operands[3] == const0_rtx)
7515 {
7516 switch (GET_CODE (operands[1]))
7517 {
7518 case EQ:
7519 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7520 break;
7521
7522 case NE:
7523 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7524 break;
7525
7526 case LE:
7527 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7528 NULL_RTX, 0, OPTAB_WIDEN);
7529 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7530 NULL_RTX, 0, OPTAB_WIDEN);
7531 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7532 operands[0], 1, OPTAB_WIDEN);
7533 break;
7534
7535 case GE:
7536 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7537 NULL_RTX, 1);
7538 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7539 NULL_RTX, 1, OPTAB_WIDEN);
7540 break;
7541
7542 case GT:
7543 scratch = expand_binop (SImode, ashr_optab, operands[2],
7544 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7545 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7546 NULL_RTX, 0, OPTAB_WIDEN);
7547 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7548 0, OPTAB_WIDEN);
7549 break;
7550
7551 /* LT is handled by generic code. No need for unsigned with 0. */
7552 default:
7553 FAIL;
7554 }
7555 DONE;
7556 }
7557
7558 switch (GET_CODE (operands[1]))
7559 {
7560 case EQ:
7561 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7562 NULL_RTX, 0, OPTAB_WIDEN);
7563 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7564 break;
7565
7566 case NE:
7567 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7568 NULL_RTX, 0, OPTAB_WIDEN);
7569 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7570 break;
7571
7572 case LE:
7573 op3 = force_reg (SImode, operands[3]);
7574
7575 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7576 NULL_RTX, 1, OPTAB_WIDEN);
7577 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7578 NULL_RTX, 0, OPTAB_WIDEN);
7579 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7580 op3, operands[2]));
7581 break;
7582
7583 case GE:
7584 op3 = operands[3];
7585 if (!thumb1_cmp_operand (op3, SImode))
7586 op3 = force_reg (SImode, op3);
7587 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7588 NULL_RTX, 0, OPTAB_WIDEN);
7589 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7590 NULL_RTX, 1, OPTAB_WIDEN);
7591 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7592 operands[2], op3));
7593 break;
7594
7595 case LEU:
7596 op3 = force_reg (SImode, operands[3]);
7597 scratch = force_reg (SImode, const0_rtx);
7598 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7599 op3, operands[2]));
7600 break;
7601
7602 case GEU:
7603 op3 = operands[3];
7604 if (!thumb1_cmp_operand (op3, SImode))
7605 op3 = force_reg (SImode, op3);
7606 scratch = force_reg (SImode, const0_rtx);
7607 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7608 operands[2], op3));
7609 break;
7610
7611 case LTU:
7612 op3 = operands[3];
7613 if (!thumb1_cmp_operand (op3, SImode))
7614 op3 = force_reg (SImode, op3);
7615 scratch = gen_reg_rtx (SImode);
7616 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7617 break;
7618
7619 case GTU:
7620 op3 = force_reg (SImode, operands[3]);
7621 scratch = gen_reg_rtx (SImode);
7622 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7623 break;
7624
7625 /* No good sequences for GT, LT. */
7626 default:
7627 FAIL;
7628 }
7629 DONE;
7630 }")
7631
7632 (define_expand "cstoresf4"
7633 [(set (match_operand:SI 0 "s_register_operand" "")
7634 (match_operator:SI 1 "arm_comparison_operator"
7635 [(match_operand:SF 2 "s_register_operand" "")
7636 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7637 "TARGET_32BIT && TARGET_HARD_FLOAT"
7638 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7639 operands[2], operands[3])); DONE;"
7640 )
7641
7642 (define_expand "cstoredf4"
7643 [(set (match_operand:SI 0 "s_register_operand" "")
7644 (match_operator:SI 1 "arm_comparison_operator"
7645 [(match_operand:DF 2 "s_register_operand" "")
7646 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7647 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7648 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7649 operands[2], operands[3])); DONE;"
7650 )
7651
7652 (define_expand "cstoredi4"
7653 [(set (match_operand:SI 0 "s_register_operand" "")
7654 (match_operator:SI 1 "arm_comparison_operator"
7655 [(match_operand:DI 2 "cmpdi_operand" "")
7656 (match_operand:DI 3 "cmpdi_operand" "")]))]
7657 "TARGET_32BIT"
7658 "{
7659 rtx swap = NULL_RTX;
7660 enum rtx_code code = GET_CODE (operands[1]);
7661
7662 /* We should not have two constants. */
7663 gcc_assert (GET_MODE (operands[2]) == DImode
7664 || GET_MODE (operands[3]) == DImode);
7665
7666 /* Flip unimplemented DImode comparisons to a form that
7667 arm_gen_compare_reg can handle. */
7668 switch (code)
7669 {
7670 case GT:
7671 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7672 case LE:
7673 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7674 case GTU:
7675 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7676 case LEU:
7677 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7678 default:
7679 break;
7680 }
7681 if (swap)
7682 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7683 operands[2]));
7684 else
7685 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7686 operands[3]));
7687 DONE;
7688 }"
7689 )
7690
7691 (define_expand "cstoresi_eq0_thumb1"
7692 [(parallel
7693 [(set (match_operand:SI 0 "s_register_operand" "")
7694 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7695 (const_int 0)))
7696 (clobber (match_dup:SI 2))])]
7697 "TARGET_THUMB1"
7698 "operands[2] = gen_reg_rtx (SImode);"
7699 )
7700
7701 (define_expand "cstoresi_ne0_thumb1"
7702 [(parallel
7703 [(set (match_operand:SI 0 "s_register_operand" "")
7704 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7705 (const_int 0)))
7706 (clobber (match_dup:SI 2))])]
7707 "TARGET_THUMB1"
7708 "operands[2] = gen_reg_rtx (SImode);"
7709 )
7710
7711 (define_insn "*cstoresi_eq0_thumb1_insn"
7712 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7713 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7714 (const_int 0)))
7715 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7716 "TARGET_THUMB1"
7717 "@
7718 neg\\t%0, %1\;adc\\t%0, %0, %1
7719 neg\\t%2, %1\;adc\\t%0, %1, %2"
7720 [(set_attr "length" "4")]
7721 )
7722
7723 (define_insn "*cstoresi_ne0_thumb1_insn"
7724 [(set (match_operand:SI 0 "s_register_operand" "=l")
7725 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7726 (const_int 0)))
7727 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7728 "TARGET_THUMB1"
7729 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7730 [(set_attr "length" "4")]
7731 )
7732
7733 ;; Used as part of the expansion of thumb ltu and gtu sequences
7734 (define_insn "cstoresi_nltu_thumb1"
7735 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7736 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7737 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7738 "TARGET_THUMB1"
7739 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7740 [(set_attr "length" "4")]
7741 )
7742
7743 (define_insn_and_split "cstoresi_ltu_thumb1"
7744 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7745 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7746 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7747 "TARGET_THUMB1"
7748 "#"
7749 "TARGET_THUMB1"
7750 [(set (match_dup 3)
7751 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7752 (set (match_dup 0) (neg:SI (match_dup 3)))]
7753 "operands[3] = gen_reg_rtx (SImode);"
7754 [(set_attr "length" "4")]
7755 )
7756
7757 ;; Used as part of the expansion of thumb les sequence.
7758 (define_insn "thumb1_addsi3_addgeu"
7759 [(set (match_operand:SI 0 "s_register_operand" "=l")
7760 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7761 (match_operand:SI 2 "s_register_operand" "l"))
7762 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7763 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7764 "TARGET_THUMB1"
7765 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7766 [(set_attr "length" "4")]
7767 )
7768
7769 \f
7770 ;; Conditional move insns
7771
7772 (define_expand "movsicc"
7773 [(set (match_operand:SI 0 "s_register_operand" "")
7774 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7775 (match_operand:SI 2 "arm_not_operand" "")
7776 (match_operand:SI 3 "arm_not_operand" "")))]
7777 "TARGET_32BIT"
7778 "
7779 {
7780 enum rtx_code code = GET_CODE (operands[1]);
7781 rtx ccreg;
7782
7783 if (code == UNEQ || code == LTGT)
7784 FAIL;
7785
7786 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7787 XEXP (operands[1], 1));
7788 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7789 }"
7790 )
7791
7792 (define_expand "movsfcc"
7793 [(set (match_operand:SF 0 "s_register_operand" "")
7794 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7795 (match_operand:SF 2 "s_register_operand" "")
7796 (match_operand:SF 3 "nonmemory_operand" "")))]
7797 "TARGET_32BIT && TARGET_HARD_FLOAT"
7798 "
7799 {
7800 enum rtx_code code = GET_CODE (operands[1]);
7801 rtx ccreg;
7802
7803 if (code == UNEQ || code == LTGT)
7804 FAIL;
7805
7806 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7807 Otherwise, ensure it is a valid FP add operand */
7808 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7809 || (!arm_float_add_operand (operands[3], SFmode)))
7810 operands[3] = force_reg (SFmode, operands[3]);
7811
7812 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7813 XEXP (operands[1], 1));
7814 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7815 }"
7816 )
7817
7818 (define_expand "movdfcc"
7819 [(set (match_operand:DF 0 "s_register_operand" "")
7820 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7821 (match_operand:DF 2 "s_register_operand" "")
7822 (match_operand:DF 3 "arm_float_add_operand" "")))]
7823 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7824 "
7825 {
7826 enum rtx_code code = GET_CODE (operands[1]);
7827 rtx ccreg;
7828
7829 if (code == UNEQ || code == LTGT)
7830 FAIL;
7831
7832 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7833 XEXP (operands[1], 1));
7834 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7835 }"
7836 )
7837
7838 (define_insn "*movsicc_insn"
7839 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7840 (if_then_else:SI
7841 (match_operator 3 "arm_comparison_operator"
7842 [(match_operand 4 "cc_register" "") (const_int 0)])
7843 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7844 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7845 "TARGET_ARM"
7846 "@
7847 mov%D3\\t%0, %2
7848 mvn%D3\\t%0, #%B2
7849 mov%d3\\t%0, %1
7850 mvn%d3\\t%0, #%B1
7851 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7852 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7853 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7854 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7855 [(set_attr "length" "4,4,4,4,8,8,8,8")
7856 (set_attr "conds" "use")
7857 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")]
7858 )
7859
7860 (define_insn "*movsfcc_soft_insn"
7861 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7862 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7863 [(match_operand 4 "cc_register" "") (const_int 0)])
7864 (match_operand:SF 1 "s_register_operand" "0,r")
7865 (match_operand:SF 2 "s_register_operand" "r,0")))]
7866 "TARGET_ARM && TARGET_SOFT_FLOAT"
7867 "@
7868 mov%D3\\t%0, %2
7869 mov%d3\\t%0, %1"
7870 [(set_attr "conds" "use")
7871 (set_attr "insn" "mov")]
7872 )
7873
7874 \f
7875 ;; Jump and linkage insns
7876
7877 (define_expand "jump"
7878 [(set (pc)
7879 (label_ref (match_operand 0 "" "")))]
7880 "TARGET_EITHER"
7881 ""
7882 )
7883
7884 (define_insn "*arm_jump"
7885 [(set (pc)
7886 (label_ref (match_operand 0 "" "")))]
7887 "TARGET_32BIT"
7888 "*
7889 {
7890 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7891 {
7892 arm_ccfsm_state += 2;
7893 return \"\";
7894 }
7895 return \"b%?\\t%l0\";
7896 }
7897 "
7898 [(set_attr "predicable" "yes")
7899 (set (attr "length")
7900 (if_then_else
7901 (and (ne (symbol_ref "TARGET_THUMB2") (const_int 0))
7902 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7903 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7904 (const_int 2)
7905 (const_int 4)))]
7906 )
7907
7908 (define_insn "*thumb_jump"
7909 [(set (pc)
7910 (label_ref (match_operand 0 "" "")))]
7911 "TARGET_THUMB1"
7912 "*
7913 if (get_attr_length (insn) == 2)
7914 return \"b\\t%l0\";
7915 return \"bl\\t%l0\\t%@ far jump\";
7916 "
7917 [(set (attr "far_jump")
7918 (if_then_else
7919 (eq_attr "length" "4")
7920 (const_string "yes")
7921 (const_string "no")))
7922 (set (attr "length")
7923 (if_then_else
7924 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7925 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7926 (const_int 2)
7927 (const_int 4)))]
7928 )
7929
7930 (define_expand "call"
7931 [(parallel [(call (match_operand 0 "memory_operand" "")
7932 (match_operand 1 "general_operand" ""))
7933 (use (match_operand 2 "" ""))
7934 (clobber (reg:SI LR_REGNUM))])]
7935 "TARGET_EITHER"
7936 "
7937 {
7938 rtx callee, pat;
7939
7940 /* In an untyped call, we can get NULL for operand 2. */
7941 if (operands[2] == NULL_RTX)
7942 operands[2] = const0_rtx;
7943
7944 /* Decide if we should generate indirect calls by loading the
7945 32-bit address of the callee into a register before performing the
7946 branch and link. */
7947 callee = XEXP (operands[0], 0);
7948 if (GET_CODE (callee) == SYMBOL_REF
7949 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7950 : !REG_P (callee))
7951 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7952
7953 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7954 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7955 DONE;
7956 }"
7957 )
7958
7959 (define_expand "call_internal"
7960 [(parallel [(call (match_operand 0 "memory_operand" "")
7961 (match_operand 1 "general_operand" ""))
7962 (use (match_operand 2 "" ""))
7963 (clobber (reg:SI LR_REGNUM))])])
7964
7965 (define_insn "*call_reg_armv5"
7966 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7967 (match_operand 1 "" ""))
7968 (use (match_operand 2 "" ""))
7969 (clobber (reg:SI LR_REGNUM))]
7970 "TARGET_ARM && arm_arch5"
7971 "blx%?\\t%0"
7972 [(set_attr "type" "call")]
7973 )
7974
7975 (define_insn "*call_reg_arm"
7976 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7977 (match_operand 1 "" ""))
7978 (use (match_operand 2 "" ""))
7979 (clobber (reg:SI LR_REGNUM))]
7980 "TARGET_ARM && !arm_arch5"
7981 "*
7982 return output_call (operands);
7983 "
7984 ;; length is worst case, normally it is only two
7985 [(set_attr "length" "12")
7986 (set_attr "type" "call")]
7987 )
7988
7989
7990 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7991 ;; considered a function call by the branch predictor of some cores (PR40887).
7992 ;; Falls back to blx rN (*call_reg_armv5).
7993
7994 (define_insn "*call_mem"
7995 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7996 (match_operand 1 "" ""))
7997 (use (match_operand 2 "" ""))
7998 (clobber (reg:SI LR_REGNUM))]
7999 "TARGET_ARM && !arm_arch5"
8000 "*
8001 return output_call_mem (operands);
8002 "
8003 [(set_attr "length" "12")
8004 (set_attr "type" "call")]
8005 )
8006
8007 (define_insn "*call_reg_thumb1_v5"
8008 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8009 (match_operand 1 "" ""))
8010 (use (match_operand 2 "" ""))
8011 (clobber (reg:SI LR_REGNUM))]
8012 "TARGET_THUMB1 && arm_arch5"
8013 "blx\\t%0"
8014 [(set_attr "length" "2")
8015 (set_attr "type" "call")]
8016 )
8017
8018 (define_insn "*call_reg_thumb1"
8019 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8020 (match_operand 1 "" ""))
8021 (use (match_operand 2 "" ""))
8022 (clobber (reg:SI LR_REGNUM))]
8023 "TARGET_THUMB1 && !arm_arch5"
8024 "*
8025 {
8026 if (!TARGET_CALLER_INTERWORKING)
8027 return thumb_call_via_reg (operands[0]);
8028 else if (operands[1] == const0_rtx)
8029 return \"bl\\t%__interwork_call_via_%0\";
8030 else if (frame_pointer_needed)
8031 return \"bl\\t%__interwork_r7_call_via_%0\";
8032 else
8033 return \"bl\\t%__interwork_r11_call_via_%0\";
8034 }"
8035 [(set_attr "type" "call")]
8036 )
8037
8038 (define_expand "call_value"
8039 [(parallel [(set (match_operand 0 "" "")
8040 (call (match_operand 1 "memory_operand" "")
8041 (match_operand 2 "general_operand" "")))
8042 (use (match_operand 3 "" ""))
8043 (clobber (reg:SI LR_REGNUM))])]
8044 "TARGET_EITHER"
8045 "
8046 {
8047 rtx pat, callee;
8048
8049 /* In an untyped call, we can get NULL for operand 2. */
8050 if (operands[3] == 0)
8051 operands[3] = const0_rtx;
8052
8053 /* Decide if we should generate indirect calls by loading the
8054 32-bit address of the callee into a register before performing the
8055 branch and link. */
8056 callee = XEXP (operands[1], 0);
8057 if (GET_CODE (callee) == SYMBOL_REF
8058 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8059 : !REG_P (callee))
8060 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8061
8062 pat = gen_call_value_internal (operands[0], operands[1],
8063 operands[2], operands[3]);
8064 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8065 DONE;
8066 }"
8067 )
8068
8069 (define_expand "call_value_internal"
8070 [(parallel [(set (match_operand 0 "" "")
8071 (call (match_operand 1 "memory_operand" "")
8072 (match_operand 2 "general_operand" "")))
8073 (use (match_operand 3 "" ""))
8074 (clobber (reg:SI LR_REGNUM))])])
8075
8076 (define_insn "*call_value_reg_armv5"
8077 [(set (match_operand 0 "" "")
8078 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8079 (match_operand 2 "" "")))
8080 (use (match_operand 3 "" ""))
8081 (clobber (reg:SI LR_REGNUM))]
8082 "TARGET_ARM && arm_arch5"
8083 "blx%?\\t%1"
8084 [(set_attr "type" "call")]
8085 )
8086
8087 (define_insn "*call_value_reg_arm"
8088 [(set (match_operand 0 "" "")
8089 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8090 (match_operand 2 "" "")))
8091 (use (match_operand 3 "" ""))
8092 (clobber (reg:SI LR_REGNUM))]
8093 "TARGET_ARM && !arm_arch5"
8094 "*
8095 return output_call (&operands[1]);
8096 "
8097 [(set_attr "length" "12")
8098 (set_attr "type" "call")]
8099 )
8100
8101 ;; Note: see *call_mem
8102
8103 (define_insn "*call_value_mem"
8104 [(set (match_operand 0 "" "")
8105 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8106 (match_operand 2 "" "")))
8107 (use (match_operand 3 "" ""))
8108 (clobber (reg:SI LR_REGNUM))]
8109 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8110 "*
8111 return output_call_mem (&operands[1]);
8112 "
8113 [(set_attr "length" "12")
8114 (set_attr "type" "call")]
8115 )
8116
8117 (define_insn "*call_value_reg_thumb1_v5"
8118 [(set (match_operand 0 "" "")
8119 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8120 (match_operand 2 "" "")))
8121 (use (match_operand 3 "" ""))
8122 (clobber (reg:SI LR_REGNUM))]
8123 "TARGET_THUMB1 && arm_arch5"
8124 "blx\\t%1"
8125 [(set_attr "length" "2")
8126 (set_attr "type" "call")]
8127 )
8128
8129 (define_insn "*call_value_reg_thumb1"
8130 [(set (match_operand 0 "" "")
8131 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8132 (match_operand 2 "" "")))
8133 (use (match_operand 3 "" ""))
8134 (clobber (reg:SI LR_REGNUM))]
8135 "TARGET_THUMB1 && !arm_arch5"
8136 "*
8137 {
8138 if (!TARGET_CALLER_INTERWORKING)
8139 return thumb_call_via_reg (operands[1]);
8140 else if (operands[2] == const0_rtx)
8141 return \"bl\\t%__interwork_call_via_%1\";
8142 else if (frame_pointer_needed)
8143 return \"bl\\t%__interwork_r7_call_via_%1\";
8144 else
8145 return \"bl\\t%__interwork_r11_call_via_%1\";
8146 }"
8147 [(set_attr "type" "call")]
8148 )
8149
8150 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8151 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8152
8153 (define_insn "*call_symbol"
8154 [(call (mem:SI (match_operand:SI 0 "" ""))
8155 (match_operand 1 "" ""))
8156 (use (match_operand 2 "" ""))
8157 (clobber (reg:SI LR_REGNUM))]
8158 "TARGET_32BIT
8159 && (GET_CODE (operands[0]) == SYMBOL_REF)
8160 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8161 "*
8162 {
8163 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8164 }"
8165 [(set_attr "type" "call")]
8166 )
8167
8168 (define_insn "*call_value_symbol"
8169 [(set (match_operand 0 "" "")
8170 (call (mem:SI (match_operand:SI 1 "" ""))
8171 (match_operand:SI 2 "" "")))
8172 (use (match_operand 3 "" ""))
8173 (clobber (reg:SI LR_REGNUM))]
8174 "TARGET_32BIT
8175 && (GET_CODE (operands[1]) == SYMBOL_REF)
8176 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8177 "*
8178 {
8179 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8180 }"
8181 [(set_attr "type" "call")]
8182 )
8183
8184 (define_insn "*call_insn"
8185 [(call (mem:SI (match_operand:SI 0 "" ""))
8186 (match_operand:SI 1 "" ""))
8187 (use (match_operand 2 "" ""))
8188 (clobber (reg:SI LR_REGNUM))]
8189 "TARGET_THUMB1
8190 && GET_CODE (operands[0]) == SYMBOL_REF
8191 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8192 "bl\\t%a0"
8193 [(set_attr "length" "4")
8194 (set_attr "type" "call")]
8195 )
8196
8197 (define_insn "*call_value_insn"
8198 [(set (match_operand 0 "" "")
8199 (call (mem:SI (match_operand 1 "" ""))
8200 (match_operand 2 "" "")))
8201 (use (match_operand 3 "" ""))
8202 (clobber (reg:SI LR_REGNUM))]
8203 "TARGET_THUMB1
8204 && GET_CODE (operands[1]) == SYMBOL_REF
8205 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8206 "bl\\t%a1"
8207 [(set_attr "length" "4")
8208 (set_attr "type" "call")]
8209 )
8210
8211 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8212 (define_expand "sibcall"
8213 [(parallel [(call (match_operand 0 "memory_operand" "")
8214 (match_operand 1 "general_operand" ""))
8215 (return)
8216 (use (match_operand 2 "" ""))])]
8217 "TARGET_32BIT"
8218 "
8219 {
8220 if (operands[2] == NULL_RTX)
8221 operands[2] = const0_rtx;
8222 }"
8223 )
8224
8225 (define_expand "sibcall_value"
8226 [(parallel [(set (match_operand 0 "" "")
8227 (call (match_operand 1 "memory_operand" "")
8228 (match_operand 2 "general_operand" "")))
8229 (return)
8230 (use (match_operand 3 "" ""))])]
8231 "TARGET_32BIT"
8232 "
8233 {
8234 if (operands[3] == NULL_RTX)
8235 operands[3] = const0_rtx;
8236 }"
8237 )
8238
8239 (define_insn "*sibcall_insn"
8240 [(call (mem:SI (match_operand:SI 0 "" "X"))
8241 (match_operand 1 "" ""))
8242 (return)
8243 (use (match_operand 2 "" ""))]
8244 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8245 "*
8246 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8247 "
8248 [(set_attr "type" "call")]
8249 )
8250
8251 (define_insn "*sibcall_value_insn"
8252 [(set (match_operand 0 "" "")
8253 (call (mem:SI (match_operand:SI 1 "" "X"))
8254 (match_operand 2 "" "")))
8255 (return)
8256 (use (match_operand 3 "" ""))]
8257 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8258 "*
8259 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8260 "
8261 [(set_attr "type" "call")]
8262 )
8263
8264 (define_expand "return"
8265 [(return)]
8266 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8267 "")
8268
8269 ;; Often the return insn will be the same as loading from memory, so set attr
8270 (define_insn "*arm_return"
8271 [(return)]
8272 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8273 "*
8274 {
8275 if (arm_ccfsm_state == 2)
8276 {
8277 arm_ccfsm_state += 2;
8278 return \"\";
8279 }
8280 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8281 }"
8282 [(set_attr "type" "load1")
8283 (set_attr "length" "12")
8284 (set_attr "predicable" "yes")]
8285 )
8286
8287 (define_insn "*cond_return"
8288 [(set (pc)
8289 (if_then_else (match_operator 0 "arm_comparison_operator"
8290 [(match_operand 1 "cc_register" "") (const_int 0)])
8291 (return)
8292 (pc)))]
8293 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8294 "*
8295 {
8296 if (arm_ccfsm_state == 2)
8297 {
8298 arm_ccfsm_state += 2;
8299 return \"\";
8300 }
8301 return output_return_instruction (operands[0], TRUE, FALSE);
8302 }"
8303 [(set_attr "conds" "use")
8304 (set_attr "length" "12")
8305 (set_attr "type" "load1")]
8306 )
8307
8308 (define_insn "*cond_return_inverted"
8309 [(set (pc)
8310 (if_then_else (match_operator 0 "arm_comparison_operator"
8311 [(match_operand 1 "cc_register" "") (const_int 0)])
8312 (pc)
8313 (return)))]
8314 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8315 "*
8316 {
8317 if (arm_ccfsm_state == 2)
8318 {
8319 arm_ccfsm_state += 2;
8320 return \"\";
8321 }
8322 return output_return_instruction (operands[0], TRUE, TRUE);
8323 }"
8324 [(set_attr "conds" "use")
8325 (set_attr "length" "12")
8326 (set_attr "type" "load1")]
8327 )
8328
8329 ;; Generate a sequence of instructions to determine if the processor is
8330 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8331 ;; mask.
8332
8333 (define_expand "return_addr_mask"
8334 [(set (match_dup 1)
8335 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8336 (const_int 0)))
8337 (set (match_operand:SI 0 "s_register_operand" "")
8338 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8339 (const_int -1)
8340 (const_int 67108860)))] ; 0x03fffffc
8341 "TARGET_ARM"
8342 "
8343 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8344 ")
8345
8346 (define_insn "*check_arch2"
8347 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8348 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8349 (const_int 0)))]
8350 "TARGET_ARM"
8351 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8352 [(set_attr "length" "8")
8353 (set_attr "conds" "set")]
8354 )
8355
8356 ;; Call subroutine returning any type.
8357
8358 (define_expand "untyped_call"
8359 [(parallel [(call (match_operand 0 "" "")
8360 (const_int 0))
8361 (match_operand 1 "" "")
8362 (match_operand 2 "" "")])]
8363 "TARGET_EITHER"
8364 "
8365 {
8366 int i;
8367 rtx par = gen_rtx_PARALLEL (VOIDmode,
8368 rtvec_alloc (XVECLEN (operands[2], 0)));
8369 rtx addr = gen_reg_rtx (Pmode);
8370 rtx mem;
8371 int size = 0;
8372
8373 emit_move_insn (addr, XEXP (operands[1], 0));
8374 mem = change_address (operands[1], BLKmode, addr);
8375
8376 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8377 {
8378 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8379
8380 /* Default code only uses r0 as a return value, but we could
8381 be using anything up to 4 registers. */
8382 if (REGNO (src) == R0_REGNUM)
8383 src = gen_rtx_REG (TImode, R0_REGNUM);
8384
8385 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8386 GEN_INT (size));
8387 size += GET_MODE_SIZE (GET_MODE (src));
8388 }
8389
8390 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8391 const0_rtx));
8392
8393 size = 0;
8394
8395 for (i = 0; i < XVECLEN (par, 0); i++)
8396 {
8397 HOST_WIDE_INT offset = 0;
8398 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8399
8400 if (size != 0)
8401 emit_move_insn (addr, plus_constant (addr, size));
8402
8403 mem = change_address (mem, GET_MODE (reg), NULL);
8404 if (REGNO (reg) == R0_REGNUM)
8405 {
8406 /* On thumb we have to use a write-back instruction. */
8407 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8408 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8409 size = TARGET_ARM ? 16 : 0;
8410 }
8411 else
8412 {
8413 emit_move_insn (mem, reg);
8414 size = GET_MODE_SIZE (GET_MODE (reg));
8415 }
8416 }
8417
8418 /* The optimizer does not know that the call sets the function value
8419 registers we stored in the result block. We avoid problems by
8420 claiming that all hard registers are used and clobbered at this
8421 point. */
8422 emit_insn (gen_blockage ());
8423
8424 DONE;
8425 }"
8426 )
8427
8428 (define_expand "untyped_return"
8429 [(match_operand:BLK 0 "memory_operand" "")
8430 (match_operand 1 "" "")]
8431 "TARGET_EITHER"
8432 "
8433 {
8434 int i;
8435 rtx addr = gen_reg_rtx (Pmode);
8436 rtx mem;
8437 int size = 0;
8438
8439 emit_move_insn (addr, XEXP (operands[0], 0));
8440 mem = change_address (operands[0], BLKmode, addr);
8441
8442 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8443 {
8444 HOST_WIDE_INT offset = 0;
8445 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8446
8447 if (size != 0)
8448 emit_move_insn (addr, plus_constant (addr, size));
8449
8450 mem = change_address (mem, GET_MODE (reg), NULL);
8451 if (REGNO (reg) == R0_REGNUM)
8452 {
8453 /* On thumb we have to use a write-back instruction. */
8454 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8455 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8456 size = TARGET_ARM ? 16 : 0;
8457 }
8458 else
8459 {
8460 emit_move_insn (reg, mem);
8461 size = GET_MODE_SIZE (GET_MODE (reg));
8462 }
8463 }
8464
8465 /* Emit USE insns before the return. */
8466 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8467 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8468
8469 /* Construct the return. */
8470 expand_naked_return ();
8471
8472 DONE;
8473 }"
8474 )
8475
8476 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8477 ;; all of memory. This blocks insns from being moved across this point.
8478
8479 (define_insn "blockage"
8480 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8481 "TARGET_EITHER"
8482 ""
8483 [(set_attr "length" "0")
8484 (set_attr "type" "block")]
8485 )
8486
8487 (define_expand "casesi"
8488 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8489 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8490 (match_operand:SI 2 "const_int_operand" "") ; total range
8491 (match_operand:SI 3 "" "") ; table label
8492 (match_operand:SI 4 "" "")] ; Out of range label
8493 "TARGET_32BIT || optimize_size || flag_pic"
8494 "
8495 {
8496 enum insn_code code;
8497 if (operands[1] != const0_rtx)
8498 {
8499 rtx reg = gen_reg_rtx (SImode);
8500
8501 emit_insn (gen_addsi3 (reg, operands[0],
8502 gen_int_mode (-INTVAL (operands[1]),
8503 SImode)));
8504 operands[0] = reg;
8505 }
8506
8507 if (TARGET_ARM)
8508 code = CODE_FOR_arm_casesi_internal;
8509 else if (TARGET_THUMB1)
8510 code = CODE_FOR_thumb1_casesi_internal_pic;
8511 else if (flag_pic)
8512 code = CODE_FOR_thumb2_casesi_internal_pic;
8513 else
8514 code = CODE_FOR_thumb2_casesi_internal;
8515
8516 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8517 operands[2] = force_reg (SImode, operands[2]);
8518
8519 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8520 operands[3], operands[4]));
8521 DONE;
8522 }"
8523 )
8524
8525 ;; The USE in this pattern is needed to tell flow analysis that this is
8526 ;; a CASESI insn. It has no other purpose.
8527 (define_insn "arm_casesi_internal"
8528 [(parallel [(set (pc)
8529 (if_then_else
8530 (leu (match_operand:SI 0 "s_register_operand" "r")
8531 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8532 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8533 (label_ref (match_operand 2 "" ""))))
8534 (label_ref (match_operand 3 "" ""))))
8535 (clobber (reg:CC CC_REGNUM))
8536 (use (label_ref (match_dup 2)))])]
8537 "TARGET_ARM"
8538 "*
8539 if (flag_pic)
8540 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8541 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8542 "
8543 [(set_attr "conds" "clob")
8544 (set_attr "length" "12")]
8545 )
8546
8547 (define_expand "thumb1_casesi_internal_pic"
8548 [(match_operand:SI 0 "s_register_operand" "")
8549 (match_operand:SI 1 "thumb1_cmp_operand" "")
8550 (match_operand 2 "" "")
8551 (match_operand 3 "" "")]
8552 "TARGET_THUMB1"
8553 {
8554 rtx reg0;
8555 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8556 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8557 operands[3]));
8558 reg0 = gen_rtx_REG (SImode, 0);
8559 emit_move_insn (reg0, operands[0]);
8560 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8561 DONE;
8562 }
8563 )
8564
8565 (define_insn "thumb1_casesi_dispatch"
8566 [(parallel [(set (pc) (unspec [(reg:SI 0)
8567 (label_ref (match_operand 0 "" ""))
8568 ;; (label_ref (match_operand 1 "" ""))
8569 ]
8570 UNSPEC_THUMB1_CASESI))
8571 (clobber (reg:SI IP_REGNUM))
8572 (clobber (reg:SI LR_REGNUM))])]
8573 "TARGET_THUMB1"
8574 "* return thumb1_output_casesi(operands);"
8575 [(set_attr "length" "4")]
8576 )
8577
8578 (define_expand "indirect_jump"
8579 [(set (pc)
8580 (match_operand:SI 0 "s_register_operand" ""))]
8581 "TARGET_EITHER"
8582 "
8583 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8584 address and use bx. */
8585 if (TARGET_THUMB2)
8586 {
8587 rtx tmp;
8588 tmp = gen_reg_rtx (SImode);
8589 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8590 operands[0] = tmp;
8591 }
8592 "
8593 )
8594
8595 ;; NB Never uses BX.
8596 (define_insn "*arm_indirect_jump"
8597 [(set (pc)
8598 (match_operand:SI 0 "s_register_operand" "r"))]
8599 "TARGET_ARM"
8600 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8601 [(set_attr "predicable" "yes")]
8602 )
8603
8604 (define_insn "*load_indirect_jump"
8605 [(set (pc)
8606 (match_operand:SI 0 "memory_operand" "m"))]
8607 "TARGET_ARM"
8608 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8609 [(set_attr "type" "load1")
8610 (set_attr "pool_range" "4096")
8611 (set_attr "neg_pool_range" "4084")
8612 (set_attr "predicable" "yes")]
8613 )
8614
8615 ;; NB Never uses BX.
8616 (define_insn "*thumb1_indirect_jump"
8617 [(set (pc)
8618 (match_operand:SI 0 "register_operand" "l*r"))]
8619 "TARGET_THUMB1"
8620 "mov\\tpc, %0"
8621 [(set_attr "conds" "clob")
8622 (set_attr "length" "2")]
8623 )
8624
8625 \f
8626 ;; Misc insns
8627
8628 (define_insn "nop"
8629 [(const_int 0)]
8630 "TARGET_EITHER"
8631 "*
8632 if (TARGET_UNIFIED_ASM)
8633 return \"nop\";
8634 if (TARGET_ARM)
8635 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8636 return \"mov\\tr8, r8\";
8637 "
8638 [(set (attr "length")
8639 (if_then_else (eq_attr "is_thumb" "yes")
8640 (const_int 2)
8641 (const_int 4)))]
8642 )
8643
8644 \f
8645 ;; Patterns to allow combination of arithmetic, cond code and shifts
8646
8647 (define_insn "*arith_shiftsi"
8648 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8649 (match_operator:SI 1 "shiftable_operator"
8650 [(match_operator:SI 3 "shift_operator"
8651 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8652 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8653 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8654 "TARGET_32BIT"
8655 "%i1%?\\t%0, %2, %4%S3"
8656 [(set_attr "predicable" "yes")
8657 (set_attr "shift" "4")
8658 (set_attr "arch" "a,t2,t2,a")
8659 ;; Thumb2 doesn't allow the stack pointer to be used for
8660 ;; operand1 for all operations other than add and sub. In this case
8661 ;; the minus operation is a candidate for an rsub and hence needs
8662 ;; to be disabled.
8663 ;; We have to make sure to disable the fourth alternative if
8664 ;; the shift_operator is MULT, since otherwise the insn will
8665 ;; also match a multiply_accumulate pattern and validate_change
8666 ;; will allow a replacement of the constant with a register
8667 ;; despite the checks done in shift_operator.
8668 (set_attr_alternative "insn_enabled"
8669 [(const_string "yes")
8670 (if_then_else
8671 (match_operand:SI 1 "add_operator" "")
8672 (const_string "yes") (const_string "no"))
8673 (const_string "yes")
8674 (if_then_else
8675 (match_operand:SI 3 "mult_operator" "")
8676 (const_string "no") (const_string "yes"))])
8677 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8678
8679 (define_split
8680 [(set (match_operand:SI 0 "s_register_operand" "")
8681 (match_operator:SI 1 "shiftable_operator"
8682 [(match_operator:SI 2 "shiftable_operator"
8683 [(match_operator:SI 3 "shift_operator"
8684 [(match_operand:SI 4 "s_register_operand" "")
8685 (match_operand:SI 5 "reg_or_int_operand" "")])
8686 (match_operand:SI 6 "s_register_operand" "")])
8687 (match_operand:SI 7 "arm_rhs_operand" "")]))
8688 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8689 "TARGET_32BIT"
8690 [(set (match_dup 8)
8691 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8692 (match_dup 6)]))
8693 (set (match_dup 0)
8694 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8695 "")
8696
8697 (define_insn "*arith_shiftsi_compare0"
8698 [(set (reg:CC_NOOV CC_REGNUM)
8699 (compare:CC_NOOV
8700 (match_operator:SI 1 "shiftable_operator"
8701 [(match_operator:SI 3 "shift_operator"
8702 [(match_operand:SI 4 "s_register_operand" "r,r")
8703 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8704 (match_operand:SI 2 "s_register_operand" "r,r")])
8705 (const_int 0)))
8706 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8707 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8708 (match_dup 2)]))]
8709 "TARGET_32BIT"
8710 "%i1%.\\t%0, %2, %4%S3"
8711 [(set_attr "conds" "set")
8712 (set_attr "shift" "4")
8713 (set_attr "arch" "32,a")
8714 (set_attr "type" "alu_shift,alu_shift_reg")])
8715
8716 (define_insn "*arith_shiftsi_compare0_scratch"
8717 [(set (reg:CC_NOOV CC_REGNUM)
8718 (compare:CC_NOOV
8719 (match_operator:SI 1 "shiftable_operator"
8720 [(match_operator:SI 3 "shift_operator"
8721 [(match_operand:SI 4 "s_register_operand" "r,r")
8722 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8723 (match_operand:SI 2 "s_register_operand" "r,r")])
8724 (const_int 0)))
8725 (clobber (match_scratch:SI 0 "=r,r"))]
8726 "TARGET_32BIT"
8727 "%i1%.\\t%0, %2, %4%S3"
8728 [(set_attr "conds" "set")
8729 (set_attr "shift" "4")
8730 (set_attr "arch" "32,a")
8731 (set_attr "type" "alu_shift,alu_shift_reg")])
8732
8733 (define_insn "*sub_shiftsi"
8734 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8735 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8736 (match_operator:SI 2 "shift_operator"
8737 [(match_operand:SI 3 "s_register_operand" "r,r")
8738 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8739 "TARGET_32BIT"
8740 "sub%?\\t%0, %1, %3%S2"
8741 [(set_attr "predicable" "yes")
8742 (set_attr "shift" "3")
8743 (set_attr "arch" "32,a")
8744 (set_attr "type" "alu_shift,alu_shift_reg")])
8745
8746 (define_insn "*sub_shiftsi_compare0"
8747 [(set (reg:CC_NOOV CC_REGNUM)
8748 (compare:CC_NOOV
8749 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8750 (match_operator:SI 2 "shift_operator"
8751 [(match_operand:SI 3 "s_register_operand" "r,r")
8752 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8753 (const_int 0)))
8754 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8755 (minus:SI (match_dup 1)
8756 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8757 "TARGET_32BIT"
8758 "sub%.\\t%0, %1, %3%S2"
8759 [(set_attr "conds" "set")
8760 (set_attr "shift" "3")
8761 (set_attr "arch" "32,a")
8762 (set_attr "type" "alu_shift,alu_shift_reg")])
8763
8764 (define_insn "*sub_shiftsi_compare0_scratch"
8765 [(set (reg:CC_NOOV CC_REGNUM)
8766 (compare:CC_NOOV
8767 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8768 (match_operator:SI 2 "shift_operator"
8769 [(match_operand:SI 3 "s_register_operand" "r,r")
8770 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8771 (const_int 0)))
8772 (clobber (match_scratch:SI 0 "=r,r"))]
8773 "TARGET_32BIT"
8774 "sub%.\\t%0, %1, %3%S2"
8775 [(set_attr "conds" "set")
8776 (set_attr "shift" "3")
8777 (set_attr "arch" "32,a")
8778 (set_attr "type" "alu_shift,alu_shift_reg")])
8779 \f
8780
8781 (define_insn "*and_scc"
8782 [(set (match_operand:SI 0 "s_register_operand" "=r")
8783 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8784 [(match_operand 3 "cc_register" "") (const_int 0)])
8785 (match_operand:SI 2 "s_register_operand" "r")))]
8786 "TARGET_ARM"
8787 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8788 [(set_attr "conds" "use")
8789 (set_attr "insn" "mov")
8790 (set_attr "length" "8")]
8791 )
8792
8793 (define_insn "*ior_scc"
8794 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8795 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8796 [(match_operand 3 "cc_register" "") (const_int 0)])
8797 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8798 "TARGET_ARM"
8799 "@
8800 orr%d2\\t%0, %1, #1
8801 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8802 [(set_attr "conds" "use")
8803 (set_attr "length" "4,8")]
8804 )
8805
8806 ; A series of splitters for the compare_scc pattern below. Note that
8807 ; order is important.
8808 (define_split
8809 [(set (match_operand:SI 0 "s_register_operand" "")
8810 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8811 (const_int 0)))
8812 (clobber (reg:CC CC_REGNUM))]
8813 "TARGET_32BIT && reload_completed"
8814 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8815
8816 (define_split
8817 [(set (match_operand:SI 0 "s_register_operand" "")
8818 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8819 (const_int 0)))
8820 (clobber (reg:CC CC_REGNUM))]
8821 "TARGET_32BIT && reload_completed"
8822 [(set (match_dup 0) (not:SI (match_dup 1)))
8823 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8824
8825 (define_split
8826 [(set (match_operand:SI 0 "s_register_operand" "")
8827 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8828 (const_int 0)))
8829 (clobber (reg:CC CC_REGNUM))]
8830 "TARGET_32BIT && reload_completed"
8831 [(parallel
8832 [(set (reg:CC CC_REGNUM)
8833 (compare:CC (const_int 1) (match_dup 1)))
8834 (set (match_dup 0)
8835 (minus:SI (const_int 1) (match_dup 1)))])
8836 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8837 (set (match_dup 0) (const_int 0)))])
8838
8839 (define_split
8840 [(set (match_operand:SI 0 "s_register_operand" "")
8841 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8842 (match_operand:SI 2 "const_int_operand" "")))
8843 (clobber (reg:CC CC_REGNUM))]
8844 "TARGET_32BIT && reload_completed"
8845 [(parallel
8846 [(set (reg:CC CC_REGNUM)
8847 (compare:CC (match_dup 1) (match_dup 2)))
8848 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8849 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8850 (set (match_dup 0) (const_int 1)))]
8851 {
8852 operands[3] = GEN_INT (-INTVAL (operands[2]));
8853 })
8854
8855 (define_split
8856 [(set (match_operand:SI 0 "s_register_operand" "")
8857 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8858 (match_operand:SI 2 "arm_add_operand" "")))
8859 (clobber (reg:CC CC_REGNUM))]
8860 "TARGET_32BIT && reload_completed"
8861 [(parallel
8862 [(set (reg:CC_NOOV CC_REGNUM)
8863 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8864 (const_int 0)))
8865 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8866 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8867 (set (match_dup 0) (const_int 1)))])
8868
8869 (define_insn_and_split "*compare_scc"
8870 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8871 (match_operator:SI 1 "arm_comparison_operator"
8872 [(match_operand:SI 2 "s_register_operand" "r,r")
8873 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8874 (clobber (reg:CC CC_REGNUM))]
8875 "TARGET_32BIT"
8876 "#"
8877 "&& reload_completed"
8878 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8879 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8880 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8881 {
8882 rtx tmp1;
8883 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8884 operands[2], operands[3]);
8885 enum rtx_code rc = GET_CODE (operands[1]);
8886
8887 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8888
8889 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8890 if (mode == CCFPmode || mode == CCFPEmode)
8891 rc = reverse_condition_maybe_unordered (rc);
8892 else
8893 rc = reverse_condition (rc);
8894 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8895 })
8896
8897 ;; Attempt to improve the sequence generated by the compare_scc splitters
8898 ;; not to use conditional execution.
8899 (define_peephole2
8900 [(set (reg:CC CC_REGNUM)
8901 (compare:CC (match_operand:SI 1 "register_operand" "")
8902 (match_operand:SI 2 "arm_rhs_operand" "")))
8903 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8904 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8905 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8906 (set (match_dup 0) (const_int 1)))
8907 (match_scratch:SI 3 "r")]
8908 "TARGET_32BIT"
8909 [(parallel
8910 [(set (reg:CC CC_REGNUM)
8911 (compare:CC (match_dup 1) (match_dup 2)))
8912 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
8913 (parallel
8914 [(set (reg:CC CC_REGNUM)
8915 (compare:CC (const_int 0) (match_dup 3)))
8916 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8917 (parallel
8918 [(set (match_dup 0)
8919 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8920 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
8921 (clobber (reg:CC CC_REGNUM))])])
8922
8923 (define_insn "*cond_move"
8924 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8925 (if_then_else:SI (match_operator 3 "equality_operator"
8926 [(match_operator 4 "arm_comparison_operator"
8927 [(match_operand 5 "cc_register" "") (const_int 0)])
8928 (const_int 0)])
8929 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8930 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8931 "TARGET_ARM"
8932 "*
8933 if (GET_CODE (operands[3]) == NE)
8934 {
8935 if (which_alternative != 1)
8936 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8937 if (which_alternative != 0)
8938 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8939 return \"\";
8940 }
8941 if (which_alternative != 0)
8942 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8943 if (which_alternative != 1)
8944 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8945 return \"\";
8946 "
8947 [(set_attr "conds" "use")
8948 (set_attr "insn" "mov")
8949 (set_attr "length" "4,4,8")]
8950 )
8951
8952 (define_insn "*cond_arith"
8953 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8954 (match_operator:SI 5 "shiftable_operator"
8955 [(match_operator:SI 4 "arm_comparison_operator"
8956 [(match_operand:SI 2 "s_register_operand" "r,r")
8957 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8958 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8959 (clobber (reg:CC CC_REGNUM))]
8960 "TARGET_ARM"
8961 "*
8962 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8963 return \"%i5\\t%0, %1, %2, lsr #31\";
8964
8965 output_asm_insn (\"cmp\\t%2, %3\", operands);
8966 if (GET_CODE (operands[5]) == AND)
8967 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8968 else if (GET_CODE (operands[5]) == MINUS)
8969 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8970 else if (which_alternative != 0)
8971 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8972 return \"%i5%d4\\t%0, %1, #1\";
8973 "
8974 [(set_attr "conds" "clob")
8975 (set_attr "length" "12")]
8976 )
8977
8978 (define_insn "*cond_sub"
8979 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8980 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8981 (match_operator:SI 4 "arm_comparison_operator"
8982 [(match_operand:SI 2 "s_register_operand" "r,r")
8983 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8984 (clobber (reg:CC CC_REGNUM))]
8985 "TARGET_ARM"
8986 "*
8987 output_asm_insn (\"cmp\\t%2, %3\", operands);
8988 if (which_alternative != 0)
8989 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8990 return \"sub%d4\\t%0, %1, #1\";
8991 "
8992 [(set_attr "conds" "clob")
8993 (set_attr "length" "8,12")]
8994 )
8995
8996 (define_insn "*cmp_ite0"
8997 [(set (match_operand 6 "dominant_cc_register" "")
8998 (compare
8999 (if_then_else:SI
9000 (match_operator 4 "arm_comparison_operator"
9001 [(match_operand:SI 0 "s_register_operand"
9002 "l,l,l,r,r,r,r,r,r")
9003 (match_operand:SI 1 "arm_add_operand"
9004 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9005 (match_operator:SI 5 "arm_comparison_operator"
9006 [(match_operand:SI 2 "s_register_operand"
9007 "l,r,r,l,l,r,r,r,r")
9008 (match_operand:SI 3 "arm_add_operand"
9009 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9010 (const_int 0))
9011 (const_int 0)))]
9012 "TARGET_32BIT"
9013 "*
9014 {
9015 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9016 {
9017 {\"cmp%d5\\t%0, %1\",
9018 \"cmp%d4\\t%2, %3\"},
9019 {\"cmn%d5\\t%0, #%n1\",
9020 \"cmp%d4\\t%2, %3\"},
9021 {\"cmp%d5\\t%0, %1\",
9022 \"cmn%d4\\t%2, #%n3\"},
9023 {\"cmn%d5\\t%0, #%n1\",
9024 \"cmn%d4\\t%2, #%n3\"}
9025 };
9026 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9027 {
9028 {\"cmp\\t%2, %3\",
9029 \"cmp\\t%0, %1\"},
9030 {\"cmp\\t%2, %3\",
9031 \"cmn\\t%0, #%n1\"},
9032 {\"cmn\\t%2, #%n3\",
9033 \"cmp\\t%0, %1\"},
9034 {\"cmn\\t%2, #%n3\",
9035 \"cmn\\t%0, #%n1\"}
9036 };
9037 static const char * const ite[2] =
9038 {
9039 \"it\\t%d5\",
9040 \"it\\t%d4\"
9041 };
9042 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9043 CMP_CMP, CMN_CMP, CMP_CMP,
9044 CMN_CMP, CMP_CMN, CMN_CMN};
9045 int swap =
9046 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9047
9048 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9049 if (TARGET_THUMB2) {
9050 output_asm_insn (ite[swap], operands);
9051 }
9052 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9053 return \"\";
9054 }"
9055 [(set_attr "conds" "set")
9056 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9057 (set_attr_alternative "length"
9058 [(const_int 6)
9059 (const_int 8)
9060 (const_int 8)
9061 (const_int 8)
9062 (const_int 8)
9063 (if_then_else (eq_attr "is_thumb" "no")
9064 (const_int 8)
9065 (const_int 10))
9066 (if_then_else (eq_attr "is_thumb" "no")
9067 (const_int 8)
9068 (const_int 10))
9069 (if_then_else (eq_attr "is_thumb" "no")
9070 (const_int 8)
9071 (const_int 10))
9072 (if_then_else (eq_attr "is_thumb" "no")
9073 (const_int 8)
9074 (const_int 10))])]
9075 )
9076
9077 (define_insn "*cmp_ite1"
9078 [(set (match_operand 6 "dominant_cc_register" "")
9079 (compare
9080 (if_then_else:SI
9081 (match_operator 4 "arm_comparison_operator"
9082 [(match_operand:SI 0 "s_register_operand"
9083 "l,l,l,r,r,r,r,r,r")
9084 (match_operand:SI 1 "arm_add_operand"
9085 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9086 (match_operator:SI 5 "arm_comparison_operator"
9087 [(match_operand:SI 2 "s_register_operand"
9088 "l,r,r,l,l,r,r,r,r")
9089 (match_operand:SI 3 "arm_add_operand"
9090 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9091 (const_int 1))
9092 (const_int 0)))]
9093 "TARGET_32BIT"
9094 "*
9095 {
9096 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9097 {
9098 {\"cmp\\t%0, %1\",
9099 \"cmp\\t%2, %3\"},
9100 {\"cmn\\t%0, #%n1\",
9101 \"cmp\\t%2, %3\"},
9102 {\"cmp\\t%0, %1\",
9103 \"cmn\\t%2, #%n3\"},
9104 {\"cmn\\t%0, #%n1\",
9105 \"cmn\\t%2, #%n3\"}
9106 };
9107 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9108 {
9109 {\"cmp%d4\\t%2, %3\",
9110 \"cmp%D5\\t%0, %1\"},
9111 {\"cmp%d4\\t%2, %3\",
9112 \"cmn%D5\\t%0, #%n1\"},
9113 {\"cmn%d4\\t%2, #%n3\",
9114 \"cmp%D5\\t%0, %1\"},
9115 {\"cmn%d4\\t%2, #%n3\",
9116 \"cmn%D5\\t%0, #%n1\"}
9117 };
9118 static const char * const ite[2] =
9119 {
9120 \"it\\t%d4\",
9121 \"it\\t%D5\"
9122 };
9123 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9124 CMP_CMP, CMN_CMP, CMP_CMP,
9125 CMN_CMP, CMP_CMN, CMN_CMN};
9126 int swap =
9127 comparison_dominates_p (GET_CODE (operands[5]),
9128 reverse_condition (GET_CODE (operands[4])));
9129
9130 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9131 if (TARGET_THUMB2) {
9132 output_asm_insn (ite[swap], operands);
9133 }
9134 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9135 return \"\";
9136 }"
9137 [(set_attr "conds" "set")
9138 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9139 (set_attr_alternative "length"
9140 [(const_int 6)
9141 (const_int 8)
9142 (const_int 8)
9143 (const_int 8)
9144 (const_int 8)
9145 (if_then_else (eq_attr "is_thumb" "no")
9146 (const_int 8)
9147 (const_int 10))
9148 (if_then_else (eq_attr "is_thumb" "no")
9149 (const_int 8)
9150 (const_int 10))
9151 (if_then_else (eq_attr "is_thumb" "no")
9152 (const_int 8)
9153 (const_int 10))
9154 (if_then_else (eq_attr "is_thumb" "no")
9155 (const_int 8)
9156 (const_int 10))])]
9157 )
9158
9159 (define_insn "*cmp_and"
9160 [(set (match_operand 6 "dominant_cc_register" "")
9161 (compare
9162 (and:SI
9163 (match_operator 4 "arm_comparison_operator"
9164 [(match_operand:SI 0 "s_register_operand"
9165 "l,l,l,r,r,r,r,r,r")
9166 (match_operand:SI 1 "arm_add_operand"
9167 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9168 (match_operator:SI 5 "arm_comparison_operator"
9169 [(match_operand:SI 2 "s_register_operand"
9170 "l,r,r,l,l,r,r,r,r")
9171 (match_operand:SI 3 "arm_add_operand"
9172 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9173 (const_int 0)))]
9174 "TARGET_32BIT"
9175 "*
9176 {
9177 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9178 {
9179 {\"cmp%d5\\t%0, %1\",
9180 \"cmp%d4\\t%2, %3\"},
9181 {\"cmn%d5\\t%0, #%n1\",
9182 \"cmp%d4\\t%2, %3\"},
9183 {\"cmp%d5\\t%0, %1\",
9184 \"cmn%d4\\t%2, #%n3\"},
9185 {\"cmn%d5\\t%0, #%n1\",
9186 \"cmn%d4\\t%2, #%n3\"}
9187 };
9188 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9189 {
9190 {\"cmp\\t%2, %3\",
9191 \"cmp\\t%0, %1\"},
9192 {\"cmp\\t%2, %3\",
9193 \"cmn\\t%0, #%n1\"},
9194 {\"cmn\\t%2, #%n3\",
9195 \"cmp\\t%0, %1\"},
9196 {\"cmn\\t%2, #%n3\",
9197 \"cmn\\t%0, #%n1\"}
9198 };
9199 static const char *const ite[2] =
9200 {
9201 \"it\\t%d5\",
9202 \"it\\t%d4\"
9203 };
9204 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9205 CMP_CMP, CMN_CMP, CMP_CMP,
9206 CMN_CMP, CMP_CMN, CMN_CMN};
9207 int swap =
9208 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9209
9210 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9211 if (TARGET_THUMB2) {
9212 output_asm_insn (ite[swap], operands);
9213 }
9214 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9215 return \"\";
9216 }"
9217 [(set_attr "conds" "set")
9218 (set_attr "predicable" "no")
9219 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9220 (set_attr_alternative "length"
9221 [(const_int 6)
9222 (const_int 8)
9223 (const_int 8)
9224 (const_int 8)
9225 (const_int 8)
9226 (if_then_else (eq_attr "is_thumb" "no")
9227 (const_int 8)
9228 (const_int 10))
9229 (if_then_else (eq_attr "is_thumb" "no")
9230 (const_int 8)
9231 (const_int 10))
9232 (if_then_else (eq_attr "is_thumb" "no")
9233 (const_int 8)
9234 (const_int 10))
9235 (if_then_else (eq_attr "is_thumb" "no")
9236 (const_int 8)
9237 (const_int 10))])]
9238 )
9239
9240 (define_insn "*cmp_ior"
9241 [(set (match_operand 6 "dominant_cc_register" "")
9242 (compare
9243 (ior:SI
9244 (match_operator 4 "arm_comparison_operator"
9245 [(match_operand:SI 0 "s_register_operand"
9246 "l,l,l,r,r,r,r,r,r")
9247 (match_operand:SI 1 "arm_add_operand"
9248 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9249 (match_operator:SI 5 "arm_comparison_operator"
9250 [(match_operand:SI 2 "s_register_operand"
9251 "l,r,r,l,l,r,r,r,r")
9252 (match_operand:SI 3 "arm_add_operand"
9253 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9254 (const_int 0)))]
9255 "TARGET_32BIT"
9256 "*
9257 {
9258 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9259 {
9260 {\"cmp\\t%0, %1\",
9261 \"cmp\\t%2, %3\"},
9262 {\"cmn\\t%0, #%n1\",
9263 \"cmp\\t%2, %3\"},
9264 {\"cmp\\t%0, %1\",
9265 \"cmn\\t%2, #%n3\"},
9266 {\"cmn\\t%0, #%n1\",
9267 \"cmn\\t%2, #%n3\"}
9268 };
9269 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9270 {
9271 {\"cmp%D4\\t%2, %3\",
9272 \"cmp%D5\\t%0, %1\"},
9273 {\"cmp%D4\\t%2, %3\",
9274 \"cmn%D5\\t%0, #%n1\"},
9275 {\"cmn%D4\\t%2, #%n3\",
9276 \"cmp%D5\\t%0, %1\"},
9277 {\"cmn%D4\\t%2, #%n3\",
9278 \"cmn%D5\\t%0, #%n1\"}
9279 };
9280 static const char *const ite[2] =
9281 {
9282 \"it\\t%D4\",
9283 \"it\\t%D5\"
9284 };
9285 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9286 CMP_CMP, CMN_CMP, CMP_CMP,
9287 CMN_CMP, CMP_CMN, CMN_CMN};
9288 int swap =
9289 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9290
9291 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9292 if (TARGET_THUMB2) {
9293 output_asm_insn (ite[swap], operands);
9294 }
9295 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9296 return \"\";
9297 }
9298 "
9299 [(set_attr "conds" "set")
9300 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9301 (set_attr_alternative "length"
9302 [(const_int 6)
9303 (const_int 8)
9304 (const_int 8)
9305 (const_int 8)
9306 (const_int 8)
9307 (if_then_else (eq_attr "is_thumb" "no")
9308 (const_int 8)
9309 (const_int 10))
9310 (if_then_else (eq_attr "is_thumb" "no")
9311 (const_int 8)
9312 (const_int 10))
9313 (if_then_else (eq_attr "is_thumb" "no")
9314 (const_int 8)
9315 (const_int 10))
9316 (if_then_else (eq_attr "is_thumb" "no")
9317 (const_int 8)
9318 (const_int 10))])]
9319 )
9320
9321 (define_insn_and_split "*ior_scc_scc"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r")
9323 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9324 [(match_operand:SI 1 "s_register_operand" "r")
9325 (match_operand:SI 2 "arm_add_operand" "rIL")])
9326 (match_operator:SI 6 "arm_comparison_operator"
9327 [(match_operand:SI 4 "s_register_operand" "r")
9328 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9329 (clobber (reg:CC CC_REGNUM))]
9330 "TARGET_32BIT
9331 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9332 != CCmode)"
9333 "#"
9334 "TARGET_32BIT && reload_completed"
9335 [(set (match_dup 7)
9336 (compare
9337 (ior:SI
9338 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9339 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9340 (const_int 0)))
9341 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9342 "operands[7]
9343 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9344 DOM_CC_X_OR_Y),
9345 CC_REGNUM);"
9346 [(set_attr "conds" "clob")
9347 (set_attr "length" "16")])
9348
9349 ; If the above pattern is followed by a CMP insn, then the compare is
9350 ; redundant, since we can rework the conditional instruction that follows.
9351 (define_insn_and_split "*ior_scc_scc_cmp"
9352 [(set (match_operand 0 "dominant_cc_register" "")
9353 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9354 [(match_operand:SI 1 "s_register_operand" "r")
9355 (match_operand:SI 2 "arm_add_operand" "rIL")])
9356 (match_operator:SI 6 "arm_comparison_operator"
9357 [(match_operand:SI 4 "s_register_operand" "r")
9358 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9359 (const_int 0)))
9360 (set (match_operand:SI 7 "s_register_operand" "=r")
9361 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9362 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9363 "TARGET_32BIT"
9364 "#"
9365 "TARGET_32BIT && reload_completed"
9366 [(set (match_dup 0)
9367 (compare
9368 (ior:SI
9369 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9370 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9371 (const_int 0)))
9372 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9373 ""
9374 [(set_attr "conds" "set")
9375 (set_attr "length" "16")])
9376
9377 (define_insn_and_split "*and_scc_scc"
9378 [(set (match_operand:SI 0 "s_register_operand" "=r")
9379 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9380 [(match_operand:SI 1 "s_register_operand" "r")
9381 (match_operand:SI 2 "arm_add_operand" "rIL")])
9382 (match_operator:SI 6 "arm_comparison_operator"
9383 [(match_operand:SI 4 "s_register_operand" "r")
9384 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9385 (clobber (reg:CC CC_REGNUM))]
9386 "TARGET_32BIT
9387 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9388 != CCmode)"
9389 "#"
9390 "TARGET_32BIT && reload_completed
9391 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9392 != CCmode)"
9393 [(set (match_dup 7)
9394 (compare
9395 (and:SI
9396 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9397 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9398 (const_int 0)))
9399 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9400 "operands[7]
9401 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9402 DOM_CC_X_AND_Y),
9403 CC_REGNUM);"
9404 [(set_attr "conds" "clob")
9405 (set_attr "length" "16")])
9406
9407 ; If the above pattern is followed by a CMP insn, then the compare is
9408 ; redundant, since we can rework the conditional instruction that follows.
9409 (define_insn_and_split "*and_scc_scc_cmp"
9410 [(set (match_operand 0 "dominant_cc_register" "")
9411 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9412 [(match_operand:SI 1 "s_register_operand" "r")
9413 (match_operand:SI 2 "arm_add_operand" "rIL")])
9414 (match_operator:SI 6 "arm_comparison_operator"
9415 [(match_operand:SI 4 "s_register_operand" "r")
9416 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9417 (const_int 0)))
9418 (set (match_operand:SI 7 "s_register_operand" "=r")
9419 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9420 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9421 "TARGET_32BIT"
9422 "#"
9423 "TARGET_32BIT && reload_completed"
9424 [(set (match_dup 0)
9425 (compare
9426 (and:SI
9427 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9428 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9429 (const_int 0)))
9430 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9431 ""
9432 [(set_attr "conds" "set")
9433 (set_attr "length" "16")])
9434
9435 ;; If there is no dominance in the comparison, then we can still save an
9436 ;; instruction in the AND case, since we can know that the second compare
9437 ;; need only zero the value if false (if true, then the value is already
9438 ;; correct).
9439 (define_insn_and_split "*and_scc_scc_nodom"
9440 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9441 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9442 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9443 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9444 (match_operator:SI 6 "arm_comparison_operator"
9445 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9446 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9447 (clobber (reg:CC CC_REGNUM))]
9448 "TARGET_32BIT
9449 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9450 == CCmode)"
9451 "#"
9452 "TARGET_32BIT && reload_completed"
9453 [(parallel [(set (match_dup 0)
9454 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9455 (clobber (reg:CC CC_REGNUM))])
9456 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9457 (set (match_dup 0)
9458 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9459 (match_dup 0)
9460 (const_int 0)))]
9461 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9462 operands[4], operands[5]),
9463 CC_REGNUM);
9464 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9465 operands[5]);"
9466 [(set_attr "conds" "clob")
9467 (set_attr "length" "20")])
9468
9469 (define_split
9470 [(set (reg:CC_NOOV CC_REGNUM)
9471 (compare:CC_NOOV (ior:SI
9472 (and:SI (match_operand:SI 0 "s_register_operand" "")
9473 (const_int 1))
9474 (match_operator:SI 1 "arm_comparison_operator"
9475 [(match_operand:SI 2 "s_register_operand" "")
9476 (match_operand:SI 3 "arm_add_operand" "")]))
9477 (const_int 0)))
9478 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9479 "TARGET_ARM"
9480 [(set (match_dup 4)
9481 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9482 (match_dup 0)))
9483 (set (reg:CC_NOOV CC_REGNUM)
9484 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9485 (const_int 0)))]
9486 "")
9487
9488 (define_split
9489 [(set (reg:CC_NOOV CC_REGNUM)
9490 (compare:CC_NOOV (ior:SI
9491 (match_operator:SI 1 "arm_comparison_operator"
9492 [(match_operand:SI 2 "s_register_operand" "")
9493 (match_operand:SI 3 "arm_add_operand" "")])
9494 (and:SI (match_operand:SI 0 "s_register_operand" "")
9495 (const_int 1)))
9496 (const_int 0)))
9497 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9498 "TARGET_ARM"
9499 [(set (match_dup 4)
9500 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9501 (match_dup 0)))
9502 (set (reg:CC_NOOV CC_REGNUM)
9503 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9504 (const_int 0)))]
9505 "")
9506 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9507
9508 (define_insn "*negscc"
9509 [(set (match_operand:SI 0 "s_register_operand" "=r")
9510 (neg:SI (match_operator 3 "arm_comparison_operator"
9511 [(match_operand:SI 1 "s_register_operand" "r")
9512 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9513 (clobber (reg:CC CC_REGNUM))]
9514 "TARGET_ARM"
9515 "*
9516 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9517 return \"mov\\t%0, %1, asr #31\";
9518
9519 if (GET_CODE (operands[3]) == NE)
9520 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9521
9522 output_asm_insn (\"cmp\\t%1, %2\", operands);
9523 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9524 return \"mvn%d3\\t%0, #0\";
9525 "
9526 [(set_attr "conds" "clob")
9527 (set_attr "length" "12")]
9528 )
9529
9530 (define_insn "movcond"
9531 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9532 (if_then_else:SI
9533 (match_operator 5 "arm_comparison_operator"
9534 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9535 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9536 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9537 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9538 (clobber (reg:CC CC_REGNUM))]
9539 "TARGET_ARM"
9540 "*
9541 if (GET_CODE (operands[5]) == LT
9542 && (operands[4] == const0_rtx))
9543 {
9544 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9545 {
9546 if (operands[2] == const0_rtx)
9547 return \"and\\t%0, %1, %3, asr #31\";
9548 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9549 }
9550 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9551 {
9552 if (operands[1] == const0_rtx)
9553 return \"bic\\t%0, %2, %3, asr #31\";
9554 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9555 }
9556 /* The only case that falls through to here is when both ops 1 & 2
9557 are constants. */
9558 }
9559
9560 if (GET_CODE (operands[5]) == GE
9561 && (operands[4] == const0_rtx))
9562 {
9563 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9564 {
9565 if (operands[2] == const0_rtx)
9566 return \"bic\\t%0, %1, %3, asr #31\";
9567 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9568 }
9569 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9570 {
9571 if (operands[1] == const0_rtx)
9572 return \"and\\t%0, %2, %3, asr #31\";
9573 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9574 }
9575 /* The only case that falls through to here is when both ops 1 & 2
9576 are constants. */
9577 }
9578 if (GET_CODE (operands[4]) == CONST_INT
9579 && !const_ok_for_arm (INTVAL (operands[4])))
9580 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9581 else
9582 output_asm_insn (\"cmp\\t%3, %4\", operands);
9583 if (which_alternative != 0)
9584 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9585 if (which_alternative != 1)
9586 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9587 return \"\";
9588 "
9589 [(set_attr "conds" "clob")
9590 (set_attr "length" "8,8,12")]
9591 )
9592
9593 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9594
9595 (define_insn "*ifcompare_plus_move"
9596 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9597 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9598 [(match_operand:SI 4 "s_register_operand" "r,r")
9599 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9600 (plus:SI
9601 (match_operand:SI 2 "s_register_operand" "r,r")
9602 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9603 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9604 (clobber (reg:CC CC_REGNUM))]
9605 "TARGET_ARM"
9606 "#"
9607 [(set_attr "conds" "clob")
9608 (set_attr "length" "8,12")]
9609 )
9610
9611 (define_insn "*if_plus_move"
9612 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9613 (if_then_else:SI
9614 (match_operator 4 "arm_comparison_operator"
9615 [(match_operand 5 "cc_register" "") (const_int 0)])
9616 (plus:SI
9617 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9618 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9619 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9620 "TARGET_ARM"
9621 "@
9622 add%d4\\t%0, %2, %3
9623 sub%d4\\t%0, %2, #%n3
9624 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9625 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9626 [(set_attr "conds" "use")
9627 (set_attr "length" "4,4,8,8")
9628 (set_attr "type" "*,*,*,*")]
9629 )
9630
9631 (define_insn "*ifcompare_move_plus"
9632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9633 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9634 [(match_operand:SI 4 "s_register_operand" "r,r")
9635 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9636 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9637 (plus:SI
9638 (match_operand:SI 2 "s_register_operand" "r,r")
9639 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9640 (clobber (reg:CC CC_REGNUM))]
9641 "TARGET_ARM"
9642 "#"
9643 [(set_attr "conds" "clob")
9644 (set_attr "length" "8,12")]
9645 )
9646
9647 (define_insn "*if_move_plus"
9648 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9649 (if_then_else:SI
9650 (match_operator 4 "arm_comparison_operator"
9651 [(match_operand 5 "cc_register" "") (const_int 0)])
9652 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9653 (plus:SI
9654 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9655 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9656 "TARGET_ARM"
9657 "@
9658 add%D4\\t%0, %2, %3
9659 sub%D4\\t%0, %2, #%n3
9660 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9661 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9662 [(set_attr "conds" "use")
9663 (set_attr "length" "4,4,8,8")
9664 (set_attr "type" "*,*,*,*")]
9665 )
9666
9667 (define_insn "*ifcompare_arith_arith"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r")
9669 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9670 [(match_operand:SI 5 "s_register_operand" "r")
9671 (match_operand:SI 6 "arm_add_operand" "rIL")])
9672 (match_operator:SI 8 "shiftable_operator"
9673 [(match_operand:SI 1 "s_register_operand" "r")
9674 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9675 (match_operator:SI 7 "shiftable_operator"
9676 [(match_operand:SI 3 "s_register_operand" "r")
9677 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9678 (clobber (reg:CC CC_REGNUM))]
9679 "TARGET_ARM"
9680 "#"
9681 [(set_attr "conds" "clob")
9682 (set_attr "length" "12")]
9683 )
9684
9685 (define_insn "*if_arith_arith"
9686 [(set (match_operand:SI 0 "s_register_operand" "=r")
9687 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9688 [(match_operand 8 "cc_register" "") (const_int 0)])
9689 (match_operator:SI 6 "shiftable_operator"
9690 [(match_operand:SI 1 "s_register_operand" "r")
9691 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9692 (match_operator:SI 7 "shiftable_operator"
9693 [(match_operand:SI 3 "s_register_operand" "r")
9694 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9695 "TARGET_ARM"
9696 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9697 [(set_attr "conds" "use")
9698 (set_attr "length" "8")]
9699 )
9700
9701 (define_insn "*ifcompare_arith_move"
9702 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9703 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9704 [(match_operand:SI 2 "s_register_operand" "r,r")
9705 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9706 (match_operator:SI 7 "shiftable_operator"
9707 [(match_operand:SI 4 "s_register_operand" "r,r")
9708 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9709 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9710 (clobber (reg:CC CC_REGNUM))]
9711 "TARGET_ARM"
9712 "*
9713 /* If we have an operation where (op x 0) is the identity operation and
9714 the conditional operator is LT or GE and we are comparing against zero and
9715 everything is in registers then we can do this in two instructions. */
9716 if (operands[3] == const0_rtx
9717 && GET_CODE (operands[7]) != AND
9718 && GET_CODE (operands[5]) == REG
9719 && GET_CODE (operands[1]) == REG
9720 && REGNO (operands[1]) == REGNO (operands[4])
9721 && REGNO (operands[4]) != REGNO (operands[0]))
9722 {
9723 if (GET_CODE (operands[6]) == LT)
9724 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9725 else if (GET_CODE (operands[6]) == GE)
9726 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9727 }
9728 if (GET_CODE (operands[3]) == CONST_INT
9729 && !const_ok_for_arm (INTVAL (operands[3])))
9730 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9731 else
9732 output_asm_insn (\"cmp\\t%2, %3\", operands);
9733 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9734 if (which_alternative != 0)
9735 return \"mov%D6\\t%0, %1\";
9736 return \"\";
9737 "
9738 [(set_attr "conds" "clob")
9739 (set_attr "length" "8,12")]
9740 )
9741
9742 (define_insn "*if_arith_move"
9743 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9744 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9745 [(match_operand 6 "cc_register" "") (const_int 0)])
9746 (match_operator:SI 5 "shiftable_operator"
9747 [(match_operand:SI 2 "s_register_operand" "r,r")
9748 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9749 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9750 "TARGET_ARM"
9751 "@
9752 %I5%d4\\t%0, %2, %3
9753 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9754 [(set_attr "conds" "use")
9755 (set_attr "length" "4,8")
9756 (set_attr "type" "*,*")]
9757 )
9758
9759 (define_insn "*ifcompare_move_arith"
9760 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9761 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9762 [(match_operand:SI 4 "s_register_operand" "r,r")
9763 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9764 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9765 (match_operator:SI 7 "shiftable_operator"
9766 [(match_operand:SI 2 "s_register_operand" "r,r")
9767 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9768 (clobber (reg:CC CC_REGNUM))]
9769 "TARGET_ARM"
9770 "*
9771 /* If we have an operation where (op x 0) is the identity operation and
9772 the conditional operator is LT or GE and we are comparing against zero and
9773 everything is in registers then we can do this in two instructions */
9774 if (operands[5] == const0_rtx
9775 && GET_CODE (operands[7]) != AND
9776 && GET_CODE (operands[3]) == REG
9777 && GET_CODE (operands[1]) == REG
9778 && REGNO (operands[1]) == REGNO (operands[2])
9779 && REGNO (operands[2]) != REGNO (operands[0]))
9780 {
9781 if (GET_CODE (operands[6]) == GE)
9782 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9783 else if (GET_CODE (operands[6]) == LT)
9784 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9785 }
9786
9787 if (GET_CODE (operands[5]) == CONST_INT
9788 && !const_ok_for_arm (INTVAL (operands[5])))
9789 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9790 else
9791 output_asm_insn (\"cmp\\t%4, %5\", operands);
9792
9793 if (which_alternative != 0)
9794 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9795 return \"%I7%D6\\t%0, %2, %3\";
9796 "
9797 [(set_attr "conds" "clob")
9798 (set_attr "length" "8,12")]
9799 )
9800
9801 (define_insn "*if_move_arith"
9802 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9803 (if_then_else:SI
9804 (match_operator 4 "arm_comparison_operator"
9805 [(match_operand 6 "cc_register" "") (const_int 0)])
9806 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9807 (match_operator:SI 5 "shiftable_operator"
9808 [(match_operand:SI 2 "s_register_operand" "r,r")
9809 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9810 "TARGET_ARM"
9811 "@
9812 %I5%D4\\t%0, %2, %3
9813 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9814 [(set_attr "conds" "use")
9815 (set_attr "length" "4,8")
9816 (set_attr "type" "*,*")]
9817 )
9818
9819 (define_insn "*ifcompare_move_not"
9820 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9821 (if_then_else:SI
9822 (match_operator 5 "arm_comparison_operator"
9823 [(match_operand:SI 3 "s_register_operand" "r,r")
9824 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9825 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9826 (not:SI
9827 (match_operand:SI 2 "s_register_operand" "r,r"))))
9828 (clobber (reg:CC CC_REGNUM))]
9829 "TARGET_ARM"
9830 "#"
9831 [(set_attr "conds" "clob")
9832 (set_attr "length" "8,12")]
9833 )
9834
9835 (define_insn "*if_move_not"
9836 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9837 (if_then_else:SI
9838 (match_operator 4 "arm_comparison_operator"
9839 [(match_operand 3 "cc_register" "") (const_int 0)])
9840 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9841 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9842 "TARGET_ARM"
9843 "@
9844 mvn%D4\\t%0, %2
9845 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9846 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9847 [(set_attr "conds" "use")
9848 (set_attr "insn" "mvn")
9849 (set_attr "length" "4,8,8")]
9850 )
9851
9852 (define_insn "*ifcompare_not_move"
9853 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9854 (if_then_else:SI
9855 (match_operator 5 "arm_comparison_operator"
9856 [(match_operand:SI 3 "s_register_operand" "r,r")
9857 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9858 (not:SI
9859 (match_operand:SI 2 "s_register_operand" "r,r"))
9860 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9861 (clobber (reg:CC CC_REGNUM))]
9862 "TARGET_ARM"
9863 "#"
9864 [(set_attr "conds" "clob")
9865 (set_attr "length" "8,12")]
9866 )
9867
9868 (define_insn "*if_not_move"
9869 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9870 (if_then_else:SI
9871 (match_operator 4 "arm_comparison_operator"
9872 [(match_operand 3 "cc_register" "") (const_int 0)])
9873 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9874 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9875 "TARGET_ARM"
9876 "@
9877 mvn%d4\\t%0, %2
9878 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9879 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9880 [(set_attr "conds" "use")
9881 (set_attr "insn" "mvn")
9882 (set_attr "length" "4,8,8")]
9883 )
9884
9885 (define_insn "*ifcompare_shift_move"
9886 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9887 (if_then_else:SI
9888 (match_operator 6 "arm_comparison_operator"
9889 [(match_operand:SI 4 "s_register_operand" "r,r")
9890 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9891 (match_operator:SI 7 "shift_operator"
9892 [(match_operand:SI 2 "s_register_operand" "r,r")
9893 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9894 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9895 (clobber (reg:CC CC_REGNUM))]
9896 "TARGET_ARM"
9897 "#"
9898 [(set_attr "conds" "clob")
9899 (set_attr "length" "8,12")]
9900 )
9901
9902 (define_insn "*if_shift_move"
9903 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9904 (if_then_else:SI
9905 (match_operator 5 "arm_comparison_operator"
9906 [(match_operand 6 "cc_register" "") (const_int 0)])
9907 (match_operator:SI 4 "shift_operator"
9908 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9909 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9910 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9911 "TARGET_ARM"
9912 "@
9913 mov%d5\\t%0, %2%S4
9914 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9915 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9916 [(set_attr "conds" "use")
9917 (set_attr "shift" "2")
9918 (set_attr "length" "4,8,8")
9919 (set_attr "insn" "mov")
9920 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9921 (const_string "alu_shift")
9922 (const_string "alu_shift_reg")))]
9923 )
9924
9925 (define_insn "*ifcompare_move_shift"
9926 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9927 (if_then_else:SI
9928 (match_operator 6 "arm_comparison_operator"
9929 [(match_operand:SI 4 "s_register_operand" "r,r")
9930 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9931 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9932 (match_operator:SI 7 "shift_operator"
9933 [(match_operand:SI 2 "s_register_operand" "r,r")
9934 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9935 (clobber (reg:CC CC_REGNUM))]
9936 "TARGET_ARM"
9937 "#"
9938 [(set_attr "conds" "clob")
9939 (set_attr "length" "8,12")]
9940 )
9941
9942 (define_insn "*if_move_shift"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9944 (if_then_else:SI
9945 (match_operator 5 "arm_comparison_operator"
9946 [(match_operand 6 "cc_register" "") (const_int 0)])
9947 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9948 (match_operator:SI 4 "shift_operator"
9949 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9950 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9951 "TARGET_ARM"
9952 "@
9953 mov%D5\\t%0, %2%S4
9954 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9955 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9956 [(set_attr "conds" "use")
9957 (set_attr "shift" "2")
9958 (set_attr "length" "4,8,8")
9959 (set_attr "insn" "mov")
9960 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9961 (const_string "alu_shift")
9962 (const_string "alu_shift_reg")))]
9963 )
9964
9965 (define_insn "*ifcompare_shift_shift"
9966 [(set (match_operand:SI 0 "s_register_operand" "=r")
9967 (if_then_else:SI
9968 (match_operator 7 "arm_comparison_operator"
9969 [(match_operand:SI 5 "s_register_operand" "r")
9970 (match_operand:SI 6 "arm_add_operand" "rIL")])
9971 (match_operator:SI 8 "shift_operator"
9972 [(match_operand:SI 1 "s_register_operand" "r")
9973 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9974 (match_operator:SI 9 "shift_operator"
9975 [(match_operand:SI 3 "s_register_operand" "r")
9976 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9977 (clobber (reg:CC CC_REGNUM))]
9978 "TARGET_ARM"
9979 "#"
9980 [(set_attr "conds" "clob")
9981 (set_attr "length" "12")]
9982 )
9983
9984 (define_insn "*if_shift_shift"
9985 [(set (match_operand:SI 0 "s_register_operand" "=r")
9986 (if_then_else:SI
9987 (match_operator 5 "arm_comparison_operator"
9988 [(match_operand 8 "cc_register" "") (const_int 0)])
9989 (match_operator:SI 6 "shift_operator"
9990 [(match_operand:SI 1 "s_register_operand" "r")
9991 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9992 (match_operator:SI 7 "shift_operator"
9993 [(match_operand:SI 3 "s_register_operand" "r")
9994 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9995 "TARGET_ARM"
9996 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9997 [(set_attr "conds" "use")
9998 (set_attr "shift" "1")
9999 (set_attr "length" "8")
10000 (set_attr "insn" "mov")
10001 (set (attr "type") (if_then_else
10002 (and (match_operand 2 "const_int_operand" "")
10003 (match_operand 4 "const_int_operand" ""))
10004 (const_string "alu_shift")
10005 (const_string "alu_shift_reg")))]
10006 )
10007
10008 (define_insn "*ifcompare_not_arith"
10009 [(set (match_operand:SI 0 "s_register_operand" "=r")
10010 (if_then_else:SI
10011 (match_operator 6 "arm_comparison_operator"
10012 [(match_operand:SI 4 "s_register_operand" "r")
10013 (match_operand:SI 5 "arm_add_operand" "rIL")])
10014 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10015 (match_operator:SI 7 "shiftable_operator"
10016 [(match_operand:SI 2 "s_register_operand" "r")
10017 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10018 (clobber (reg:CC CC_REGNUM))]
10019 "TARGET_ARM"
10020 "#"
10021 [(set_attr "conds" "clob")
10022 (set_attr "length" "12")]
10023 )
10024
10025 (define_insn "*if_not_arith"
10026 [(set (match_operand:SI 0 "s_register_operand" "=r")
10027 (if_then_else:SI
10028 (match_operator 5 "arm_comparison_operator"
10029 [(match_operand 4 "cc_register" "") (const_int 0)])
10030 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10031 (match_operator:SI 6 "shiftable_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10034 "TARGET_ARM"
10035 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10036 [(set_attr "conds" "use")
10037 (set_attr "insn" "mvn")
10038 (set_attr "length" "8")]
10039 )
10040
10041 (define_insn "*ifcompare_arith_not"
10042 [(set (match_operand:SI 0 "s_register_operand" "=r")
10043 (if_then_else:SI
10044 (match_operator 6 "arm_comparison_operator"
10045 [(match_operand:SI 4 "s_register_operand" "r")
10046 (match_operand:SI 5 "arm_add_operand" "rIL")])
10047 (match_operator:SI 7 "shiftable_operator"
10048 [(match_operand:SI 2 "s_register_operand" "r")
10049 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10050 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10051 (clobber (reg:CC CC_REGNUM))]
10052 "TARGET_ARM"
10053 "#"
10054 [(set_attr "conds" "clob")
10055 (set_attr "length" "12")]
10056 )
10057
10058 (define_insn "*if_arith_not"
10059 [(set (match_operand:SI 0 "s_register_operand" "=r")
10060 (if_then_else:SI
10061 (match_operator 5 "arm_comparison_operator"
10062 [(match_operand 4 "cc_register" "") (const_int 0)])
10063 (match_operator:SI 6 "shiftable_operator"
10064 [(match_operand:SI 2 "s_register_operand" "r")
10065 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10066 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10067 "TARGET_ARM"
10068 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10069 [(set_attr "conds" "use")
10070 (set_attr "insn" "mvn")
10071 (set_attr "length" "8")]
10072 )
10073
10074 (define_insn "*ifcompare_neg_move"
10075 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10076 (if_then_else:SI
10077 (match_operator 5 "arm_comparison_operator"
10078 [(match_operand:SI 3 "s_register_operand" "r,r")
10079 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10080 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10081 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10082 (clobber (reg:CC CC_REGNUM))]
10083 "TARGET_ARM"
10084 "#"
10085 [(set_attr "conds" "clob")
10086 (set_attr "length" "8,12")]
10087 )
10088
10089 (define_insn "*if_neg_move"
10090 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10091 (if_then_else:SI
10092 (match_operator 4 "arm_comparison_operator"
10093 [(match_operand 3 "cc_register" "") (const_int 0)])
10094 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10095 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10096 "TARGET_ARM"
10097 "@
10098 rsb%d4\\t%0, %2, #0
10099 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10100 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10101 [(set_attr "conds" "use")
10102 (set_attr "length" "4,8,8")]
10103 )
10104
10105 (define_insn "*ifcompare_move_neg"
10106 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10107 (if_then_else:SI
10108 (match_operator 5 "arm_comparison_operator"
10109 [(match_operand:SI 3 "s_register_operand" "r,r")
10110 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10111 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10112 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10113 (clobber (reg:CC CC_REGNUM))]
10114 "TARGET_ARM"
10115 "#"
10116 [(set_attr "conds" "clob")
10117 (set_attr "length" "8,12")]
10118 )
10119
10120 (define_insn "*if_move_neg"
10121 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10122 (if_then_else:SI
10123 (match_operator 4 "arm_comparison_operator"
10124 [(match_operand 3 "cc_register" "") (const_int 0)])
10125 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10126 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10127 "TARGET_ARM"
10128 "@
10129 rsb%D4\\t%0, %2, #0
10130 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10131 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10132 [(set_attr "conds" "use")
10133 (set_attr "length" "4,8,8")]
10134 )
10135
10136 (define_insn "*arith_adjacentmem"
10137 [(set (match_operand:SI 0 "s_register_operand" "=r")
10138 (match_operator:SI 1 "shiftable_operator"
10139 [(match_operand:SI 2 "memory_operand" "m")
10140 (match_operand:SI 3 "memory_operand" "m")]))
10141 (clobber (match_scratch:SI 4 "=r"))]
10142 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10143 "*
10144 {
10145 rtx ldm[3];
10146 rtx arith[4];
10147 rtx base_reg;
10148 HOST_WIDE_INT val1 = 0, val2 = 0;
10149
10150 if (REGNO (operands[0]) > REGNO (operands[4]))
10151 {
10152 ldm[1] = operands[4];
10153 ldm[2] = operands[0];
10154 }
10155 else
10156 {
10157 ldm[1] = operands[0];
10158 ldm[2] = operands[4];
10159 }
10160
10161 base_reg = XEXP (operands[2], 0);
10162
10163 if (!REG_P (base_reg))
10164 {
10165 val1 = INTVAL (XEXP (base_reg, 1));
10166 base_reg = XEXP (base_reg, 0);
10167 }
10168
10169 if (!REG_P (XEXP (operands[3], 0)))
10170 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10171
10172 arith[0] = operands[0];
10173 arith[3] = operands[1];
10174
10175 if (val1 < val2)
10176 {
10177 arith[1] = ldm[1];
10178 arith[2] = ldm[2];
10179 }
10180 else
10181 {
10182 arith[1] = ldm[2];
10183 arith[2] = ldm[1];
10184 }
10185
10186 ldm[0] = base_reg;
10187 if (val1 !=0 && val2 != 0)
10188 {
10189 rtx ops[3];
10190
10191 if (val1 == 4 || val2 == 4)
10192 /* Other val must be 8, since we know they are adjacent and neither
10193 is zero. */
10194 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10195 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10196 {
10197 ldm[0] = ops[0] = operands[4];
10198 ops[1] = base_reg;
10199 ops[2] = GEN_INT (val1);
10200 output_add_immediate (ops);
10201 if (val1 < val2)
10202 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10203 else
10204 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10205 }
10206 else
10207 {
10208 /* Offset is out of range for a single add, so use two ldr. */
10209 ops[0] = ldm[1];
10210 ops[1] = base_reg;
10211 ops[2] = GEN_INT (val1);
10212 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10213 ops[0] = ldm[2];
10214 ops[2] = GEN_INT (val2);
10215 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10216 }
10217 }
10218 else if (val1 != 0)
10219 {
10220 if (val1 < val2)
10221 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10222 else
10223 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10224 }
10225 else
10226 {
10227 if (val1 < val2)
10228 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10229 else
10230 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10231 }
10232 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10233 return \"\";
10234 }"
10235 [(set_attr "length" "12")
10236 (set_attr "predicable" "yes")
10237 (set_attr "type" "load1")]
10238 )
10239
10240 ; This pattern is never tried by combine, so do it as a peephole
10241
10242 (define_peephole2
10243 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10244 (match_operand:SI 1 "arm_general_register_operand" ""))
10245 (set (reg:CC CC_REGNUM)
10246 (compare:CC (match_dup 1) (const_int 0)))]
10247 "TARGET_ARM"
10248 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10249 (set (match_dup 0) (match_dup 1))])]
10250 ""
10251 )
10252
10253 (define_split
10254 [(set (match_operand:SI 0 "s_register_operand" "")
10255 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10256 (const_int 0))
10257 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10258 [(match_operand:SI 3 "s_register_operand" "")
10259 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10260 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10261 "TARGET_ARM"
10262 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10263 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10264 (match_dup 5)))]
10265 ""
10266 )
10267
10268 ;; This split can be used because CC_Z mode implies that the following
10269 ;; branch will be an equality, or an unsigned inequality, so the sign
10270 ;; extension is not needed.
10271
10272 (define_split
10273 [(set (reg:CC_Z CC_REGNUM)
10274 (compare:CC_Z
10275 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10276 (const_int 24))
10277 (match_operand 1 "const_int_operand" "")))
10278 (clobber (match_scratch:SI 2 ""))]
10279 "TARGET_ARM
10280 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10281 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10282 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10283 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10284 "
10285 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10286 "
10287 )
10288 ;; ??? Check the patterns above for Thumb-2 usefulness
10289
10290 (define_expand "prologue"
10291 [(clobber (const_int 0))]
10292 "TARGET_EITHER"
10293 "if (TARGET_32BIT)
10294 arm_expand_prologue ();
10295 else
10296 thumb1_expand_prologue ();
10297 DONE;
10298 "
10299 )
10300
10301 (define_expand "epilogue"
10302 [(clobber (const_int 0))]
10303 "TARGET_EITHER"
10304 "
10305 if (crtl->calls_eh_return)
10306 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10307 if (TARGET_THUMB1)
10308 thumb1_expand_epilogue ();
10309 else if (USE_RETURN_INSN (FALSE))
10310 {
10311 emit_jump_insn (gen_return ());
10312 DONE;
10313 }
10314 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10315 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10316 DONE;
10317 "
10318 )
10319
10320 (define_insn "prologue_thumb1_interwork"
10321 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10322 "TARGET_THUMB1"
10323 "* return thumb1_output_interwork ();"
10324 [(set_attr "length" "8")]
10325 )
10326
10327 ;; Note - although unspec_volatile's USE all hard registers,
10328 ;; USEs are ignored after relaod has completed. Thus we need
10329 ;; to add an unspec of the link register to ensure that flow
10330 ;; does not think that it is unused by the sibcall branch that
10331 ;; will replace the standard function epilogue.
10332 (define_insn "sibcall_epilogue"
10333 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10334 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10335 "TARGET_32BIT"
10336 "*
10337 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10338 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10339 return arm_output_epilogue (next_nonnote_insn (insn));
10340 "
10341 ;; Length is absolute worst case
10342 [(set_attr "length" "44")
10343 (set_attr "type" "block")
10344 ;; We don't clobber the conditions, but the potential length of this
10345 ;; operation is sufficient to make conditionalizing the sequence
10346 ;; unlikely to be profitable.
10347 (set_attr "conds" "clob")]
10348 )
10349
10350 (define_insn "*epilogue_insns"
10351 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10352 "TARGET_EITHER"
10353 "*
10354 if (TARGET_32BIT)
10355 return arm_output_epilogue (NULL);
10356 else /* TARGET_THUMB1 */
10357 return thumb_unexpanded_epilogue ();
10358 "
10359 ; Length is absolute worst case
10360 [(set_attr "length" "44")
10361 (set_attr "type" "block")
10362 ;; We don't clobber the conditions, but the potential length of this
10363 ;; operation is sufficient to make conditionalizing the sequence
10364 ;; unlikely to be profitable.
10365 (set_attr "conds" "clob")]
10366 )
10367
10368 (define_expand "eh_epilogue"
10369 [(use (match_operand:SI 0 "register_operand" ""))
10370 (use (match_operand:SI 1 "register_operand" ""))
10371 (use (match_operand:SI 2 "register_operand" ""))]
10372 "TARGET_EITHER"
10373 "
10374 {
10375 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10376 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10377 {
10378 rtx ra = gen_rtx_REG (Pmode, 2);
10379
10380 emit_move_insn (ra, operands[2]);
10381 operands[2] = ra;
10382 }
10383 /* This is a hack -- we may have crystalized the function type too
10384 early. */
10385 cfun->machine->func_type = 0;
10386 }"
10387 )
10388
10389 ;; This split is only used during output to reduce the number of patterns
10390 ;; that need assembler instructions adding to them. We allowed the setting
10391 ;; of the conditions to be implicit during rtl generation so that
10392 ;; the conditional compare patterns would work. However this conflicts to
10393 ;; some extent with the conditional data operations, so we have to split them
10394 ;; up again here.
10395
10396 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10397 ;; conditional execution sufficient?
10398
10399 (define_split
10400 [(set (match_operand:SI 0 "s_register_operand" "")
10401 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10402 [(match_operand 2 "" "") (match_operand 3 "" "")])
10403 (match_dup 0)
10404 (match_operand 4 "" "")))
10405 (clobber (reg:CC CC_REGNUM))]
10406 "TARGET_ARM && reload_completed"
10407 [(set (match_dup 5) (match_dup 6))
10408 (cond_exec (match_dup 7)
10409 (set (match_dup 0) (match_dup 4)))]
10410 "
10411 {
10412 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10413 operands[2], operands[3]);
10414 enum rtx_code rc = GET_CODE (operands[1]);
10415
10416 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10417 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10418 if (mode == CCFPmode || mode == CCFPEmode)
10419 rc = reverse_condition_maybe_unordered (rc);
10420 else
10421 rc = reverse_condition (rc);
10422
10423 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10424 }"
10425 )
10426
10427 (define_split
10428 [(set (match_operand:SI 0 "s_register_operand" "")
10429 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10430 [(match_operand 2 "" "") (match_operand 3 "" "")])
10431 (match_operand 4 "" "")
10432 (match_dup 0)))
10433 (clobber (reg:CC CC_REGNUM))]
10434 "TARGET_ARM && reload_completed"
10435 [(set (match_dup 5) (match_dup 6))
10436 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10437 (set (match_dup 0) (match_dup 4)))]
10438 "
10439 {
10440 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10441 operands[2], operands[3]);
10442
10443 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10444 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10445 }"
10446 )
10447
10448 (define_split
10449 [(set (match_operand:SI 0 "s_register_operand" "")
10450 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10451 [(match_operand 2 "" "") (match_operand 3 "" "")])
10452 (match_operand 4 "" "")
10453 (match_operand 5 "" "")))
10454 (clobber (reg:CC CC_REGNUM))]
10455 "TARGET_ARM && reload_completed"
10456 [(set (match_dup 6) (match_dup 7))
10457 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10458 (set (match_dup 0) (match_dup 4)))
10459 (cond_exec (match_dup 8)
10460 (set (match_dup 0) (match_dup 5)))]
10461 "
10462 {
10463 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10464 operands[2], operands[3]);
10465 enum rtx_code rc = GET_CODE (operands[1]);
10466
10467 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10468 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10469 if (mode == CCFPmode || mode == CCFPEmode)
10470 rc = reverse_condition_maybe_unordered (rc);
10471 else
10472 rc = reverse_condition (rc);
10473
10474 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10475 }"
10476 )
10477
10478 (define_split
10479 [(set (match_operand:SI 0 "s_register_operand" "")
10480 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10481 [(match_operand:SI 2 "s_register_operand" "")
10482 (match_operand:SI 3 "arm_add_operand" "")])
10483 (match_operand:SI 4 "arm_rhs_operand" "")
10484 (not:SI
10485 (match_operand:SI 5 "s_register_operand" ""))))
10486 (clobber (reg:CC CC_REGNUM))]
10487 "TARGET_ARM && reload_completed"
10488 [(set (match_dup 6) (match_dup 7))
10489 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10490 (set (match_dup 0) (match_dup 4)))
10491 (cond_exec (match_dup 8)
10492 (set (match_dup 0) (not:SI (match_dup 5))))]
10493 "
10494 {
10495 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10496 operands[2], operands[3]);
10497 enum rtx_code rc = GET_CODE (operands[1]);
10498
10499 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10500 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10501 if (mode == CCFPmode || mode == CCFPEmode)
10502 rc = reverse_condition_maybe_unordered (rc);
10503 else
10504 rc = reverse_condition (rc);
10505
10506 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10507 }"
10508 )
10509
10510 (define_insn "*cond_move_not"
10511 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10512 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10513 [(match_operand 3 "cc_register" "") (const_int 0)])
10514 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10515 (not:SI
10516 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10517 "TARGET_ARM"
10518 "@
10519 mvn%D4\\t%0, %2
10520 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10521 [(set_attr "conds" "use")
10522 (set_attr "insn" "mvn")
10523 (set_attr "length" "4,8")]
10524 )
10525
10526 ;; The next two patterns occur when an AND operation is followed by a
10527 ;; scc insn sequence
10528
10529 (define_insn "*sign_extract_onebit"
10530 [(set (match_operand:SI 0 "s_register_operand" "=r")
10531 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10532 (const_int 1)
10533 (match_operand:SI 2 "const_int_operand" "n")))
10534 (clobber (reg:CC CC_REGNUM))]
10535 "TARGET_ARM"
10536 "*
10537 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10538 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10539 return \"mvnne\\t%0, #0\";
10540 "
10541 [(set_attr "conds" "clob")
10542 (set_attr "length" "8")]
10543 )
10544
10545 (define_insn "*not_signextract_onebit"
10546 [(set (match_operand:SI 0 "s_register_operand" "=r")
10547 (not:SI
10548 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10549 (const_int 1)
10550 (match_operand:SI 2 "const_int_operand" "n"))))
10551 (clobber (reg:CC CC_REGNUM))]
10552 "TARGET_ARM"
10553 "*
10554 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10555 output_asm_insn (\"tst\\t%1, %2\", operands);
10556 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10557 return \"movne\\t%0, #0\";
10558 "
10559 [(set_attr "conds" "clob")
10560 (set_attr "length" "12")]
10561 )
10562 ;; ??? The above patterns need auditing for Thumb-2
10563
10564 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10565 ;; expressions. For simplicity, the first register is also in the unspec
10566 ;; part.
10567 ;; To avoid the usage of GNU extension, the length attribute is computed
10568 ;; in a C function arm_attr_length_push_multi.
10569 (define_insn "*push_multi"
10570 [(match_parallel 2 "multi_register_push"
10571 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10572 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10573 UNSPEC_PUSH_MULT))])]
10574 ""
10575 "*
10576 {
10577 int num_saves = XVECLEN (operands[2], 0);
10578
10579 /* For the StrongARM at least it is faster to
10580 use STR to store only a single register.
10581 In Thumb mode always use push, and the assembler will pick
10582 something appropriate. */
10583 if (num_saves == 1 && TARGET_ARM)
10584 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10585 else
10586 {
10587 int i;
10588 char pattern[100];
10589
10590 if (TARGET_ARM)
10591 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10592 else
10593 strcpy (pattern, \"push\\t{%1\");
10594
10595 for (i = 1; i < num_saves; i++)
10596 {
10597 strcat (pattern, \", %|\");
10598 strcat (pattern,
10599 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10600 }
10601
10602 strcat (pattern, \"}\");
10603 output_asm_insn (pattern, operands);
10604 }
10605
10606 return \"\";
10607 }"
10608 [(set_attr "type" "store4")
10609 (set (attr "length")
10610 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10611 )
10612
10613 (define_insn "stack_tie"
10614 [(set (mem:BLK (scratch))
10615 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10616 (match_operand:SI 1 "s_register_operand" "rk")]
10617 UNSPEC_PRLG_STK))]
10618 ""
10619 ""
10620 [(set_attr "length" "0")]
10621 )
10622
10623 ;; Similarly for the floating point registers
10624 (define_insn "*push_fp_multi"
10625 [(match_parallel 2 "multi_register_push"
10626 [(set (match_operand:BLK 0 "memory_operand" "=m")
10627 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10628 UNSPEC_PUSH_MULT))])]
10629 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10630 "*
10631 {
10632 char pattern[100];
10633
10634 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10635 output_asm_insn (pattern, operands);
10636 return \"\";
10637 }"
10638 [(set_attr "type" "f_fpa_store")]
10639 )
10640
10641 ;; Special patterns for dealing with the constant pool
10642
10643 (define_insn "align_4"
10644 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10645 "TARGET_EITHER"
10646 "*
10647 assemble_align (32);
10648 return \"\";
10649 "
10650 )
10651
10652 (define_insn "align_8"
10653 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10654 "TARGET_EITHER"
10655 "*
10656 assemble_align (64);
10657 return \"\";
10658 "
10659 )
10660
10661 (define_insn "consttable_end"
10662 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10663 "TARGET_EITHER"
10664 "*
10665 making_const_table = FALSE;
10666 return \"\";
10667 "
10668 )
10669
10670 (define_insn "consttable_1"
10671 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10672 "TARGET_THUMB1"
10673 "*
10674 making_const_table = TRUE;
10675 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10676 assemble_zeros (3);
10677 return \"\";
10678 "
10679 [(set_attr "length" "4")]
10680 )
10681
10682 (define_insn "consttable_2"
10683 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10684 "TARGET_THUMB1"
10685 "*
10686 making_const_table = TRUE;
10687 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10688 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10689 assemble_zeros (2);
10690 return \"\";
10691 "
10692 [(set_attr "length" "4")]
10693 )
10694
10695 (define_insn "consttable_4"
10696 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10697 "TARGET_EITHER"
10698 "*
10699 {
10700 rtx x = operands[0];
10701 making_const_table = TRUE;
10702 switch (GET_MODE_CLASS (GET_MODE (x)))
10703 {
10704 case MODE_FLOAT:
10705 if (GET_MODE (x) == HFmode)
10706 arm_emit_fp16_const (x);
10707 else
10708 {
10709 REAL_VALUE_TYPE r;
10710 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10711 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10712 }
10713 break;
10714 default:
10715 /* XXX: Sometimes gcc does something really dumb and ends up with
10716 a HIGH in a constant pool entry, usually because it's trying to
10717 load into a VFP register. We know this will always be used in
10718 combination with a LO_SUM which ignores the high bits, so just
10719 strip off the HIGH. */
10720 if (GET_CODE (x) == HIGH)
10721 x = XEXP (x, 0);
10722 assemble_integer (x, 4, BITS_PER_WORD, 1);
10723 mark_symbol_refs_as_used (x);
10724 break;
10725 }
10726 return \"\";
10727 }"
10728 [(set_attr "length" "4")]
10729 )
10730
10731 (define_insn "consttable_8"
10732 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10733 "TARGET_EITHER"
10734 "*
10735 {
10736 making_const_table = TRUE;
10737 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10738 {
10739 case MODE_FLOAT:
10740 {
10741 REAL_VALUE_TYPE r;
10742 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10743 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10744 break;
10745 }
10746 default:
10747 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10748 break;
10749 }
10750 return \"\";
10751 }"
10752 [(set_attr "length" "8")]
10753 )
10754
10755 (define_insn "consttable_16"
10756 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10757 "TARGET_EITHER"
10758 "*
10759 {
10760 making_const_table = TRUE;
10761 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10762 {
10763 case MODE_FLOAT:
10764 {
10765 REAL_VALUE_TYPE r;
10766 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10767 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10768 break;
10769 }
10770 default:
10771 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10772 break;
10773 }
10774 return \"\";
10775 }"
10776 [(set_attr "length" "16")]
10777 )
10778
10779 ;; Miscellaneous Thumb patterns
10780
10781 (define_expand "tablejump"
10782 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10783 (use (label_ref (match_operand 1 "" "")))])]
10784 "TARGET_THUMB1"
10785 "
10786 if (flag_pic)
10787 {
10788 /* Hopefully, CSE will eliminate this copy. */
10789 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10790 rtx reg2 = gen_reg_rtx (SImode);
10791
10792 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10793 operands[0] = reg2;
10794 }
10795 "
10796 )
10797
10798 ;; NB never uses BX.
10799 (define_insn "*thumb1_tablejump"
10800 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10801 (use (label_ref (match_operand 1 "" "")))]
10802 "TARGET_THUMB1"
10803 "mov\\t%|pc, %0"
10804 [(set_attr "length" "2")]
10805 )
10806
10807 ;; V5 Instructions,
10808
10809 (define_insn "clzsi2"
10810 [(set (match_operand:SI 0 "s_register_operand" "=r")
10811 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10812 "TARGET_32BIT && arm_arch5"
10813 "clz%?\\t%0, %1"
10814 [(set_attr "predicable" "yes")
10815 (set_attr "insn" "clz")])
10816
10817 (define_insn "rbitsi2"
10818 [(set (match_operand:SI 0 "s_register_operand" "=r")
10819 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10820 "TARGET_32BIT && arm_arch_thumb2"
10821 "rbit%?\\t%0, %1"
10822 [(set_attr "predicable" "yes")
10823 (set_attr "insn" "clz")])
10824
10825 (define_expand "ctzsi2"
10826 [(set (match_operand:SI 0 "s_register_operand" "")
10827 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10828 "TARGET_32BIT && arm_arch_thumb2"
10829 "
10830 {
10831 rtx tmp = gen_reg_rtx (SImode);
10832 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10833 emit_insn (gen_clzsi2 (operands[0], tmp));
10834 }
10835 DONE;
10836 "
10837 )
10838
10839 ;; V5E instructions.
10840
10841 (define_insn "prefetch"
10842 [(prefetch (match_operand:SI 0 "address_operand" "p")
10843 (match_operand:SI 1 "" "")
10844 (match_operand:SI 2 "" ""))]
10845 "TARGET_32BIT && arm_arch5e"
10846 "pld\\t%a0")
10847
10848 ;; General predication pattern
10849
10850 (define_cond_exec
10851 [(match_operator 0 "arm_comparison_operator"
10852 [(match_operand 1 "cc_register" "")
10853 (const_int 0)])]
10854 "TARGET_32BIT"
10855 ""
10856 )
10857
10858 (define_insn "prologue_use"
10859 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10860 ""
10861 "%@ %0 needed for prologue"
10862 [(set_attr "length" "0")]
10863 )
10864
10865
10866 ;; Patterns for exception handling
10867
10868 (define_expand "eh_return"
10869 [(use (match_operand 0 "general_operand" ""))]
10870 "TARGET_EITHER"
10871 "
10872 {
10873 if (TARGET_32BIT)
10874 emit_insn (gen_arm_eh_return (operands[0]));
10875 else
10876 emit_insn (gen_thumb_eh_return (operands[0]));
10877 DONE;
10878 }"
10879 )
10880
10881 ;; We can't expand this before we know where the link register is stored.
10882 (define_insn_and_split "arm_eh_return"
10883 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10884 VUNSPEC_EH_RETURN)
10885 (clobber (match_scratch:SI 1 "=&r"))]
10886 "TARGET_ARM"
10887 "#"
10888 "&& reload_completed"
10889 [(const_int 0)]
10890 "
10891 {
10892 arm_set_return_address (operands[0], operands[1]);
10893 DONE;
10894 }"
10895 )
10896
10897 (define_insn_and_split "thumb_eh_return"
10898 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10899 VUNSPEC_EH_RETURN)
10900 (clobber (match_scratch:SI 1 "=&l"))]
10901 "TARGET_THUMB1"
10902 "#"
10903 "&& reload_completed"
10904 [(const_int 0)]
10905 "
10906 {
10907 thumb_set_return_address (operands[0], operands[1]);
10908 DONE;
10909 }"
10910 )
10911
10912 \f
10913 ;; TLS support
10914
10915 (define_insn "load_tp_hard"
10916 [(set (match_operand:SI 0 "register_operand" "=r")
10917 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10918 "TARGET_HARD_TP"
10919 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10920 [(set_attr "predicable" "yes")]
10921 )
10922
10923 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10924 (define_insn "load_tp_soft"
10925 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10926 (clobber (reg:SI LR_REGNUM))
10927 (clobber (reg:SI IP_REGNUM))
10928 (clobber (reg:CC CC_REGNUM))]
10929 "TARGET_SOFT_TP"
10930 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10931 [(set_attr "conds" "clob")]
10932 )
10933
10934 ;; tls descriptor call
10935 (define_insn "tlscall"
10936 [(set (reg:SI R0_REGNUM)
10937 (unspec:SI [(reg:SI R0_REGNUM)
10938 (match_operand:SI 0 "" "X")
10939 (match_operand 1 "" "")] UNSPEC_TLS))
10940 (clobber (reg:SI R1_REGNUM))
10941 (clobber (reg:SI LR_REGNUM))
10942 (clobber (reg:SI CC_REGNUM))]
10943 "TARGET_GNU2_TLS"
10944 {
10945 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10946 INTVAL (operands[1]));
10947 return "bl\\t%c0(tlscall)";
10948 }
10949 [(set_attr "conds" "clob")
10950 (set_attr "length" "4")]
10951 )
10952
10953 ;;
10954
10955 ;; We only care about the lower 16 bits of the constant
10956 ;; being inserted into the upper 16 bits of the register.
10957 (define_insn "*arm_movtas_ze"
10958 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10959 (const_int 16)
10960 (const_int 16))
10961 (match_operand:SI 1 "const_int_operand" ""))]
10962 "arm_arch_thumb2"
10963 "movt%?\t%0, %L1"
10964 [(set_attr "predicable" "yes")
10965 (set_attr "length" "4")]
10966 )
10967
10968 (define_insn "*arm_rev"
10969 [(set (match_operand:SI 0 "s_register_operand" "=r")
10970 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10971 "TARGET_32BIT && arm_arch6"
10972 "rev%?\t%0, %1"
10973 [(set_attr "predicable" "yes")
10974 (set_attr "length" "4")]
10975 )
10976
10977 (define_insn "*thumb1_rev"
10978 [(set (match_operand:SI 0 "s_register_operand" "=l")
10979 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10980 "TARGET_THUMB1 && arm_arch6"
10981 "rev\t%0, %1"
10982 [(set_attr "length" "2")]
10983 )
10984
10985 (define_expand "arm_legacy_rev"
10986 [(set (match_operand:SI 2 "s_register_operand" "")
10987 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10988 (const_int 16))
10989 (match_dup 1)))
10990 (set (match_dup 2)
10991 (lshiftrt:SI (match_dup 2)
10992 (const_int 8)))
10993 (set (match_operand:SI 3 "s_register_operand" "")
10994 (rotatert:SI (match_dup 1)
10995 (const_int 8)))
10996 (set (match_dup 2)
10997 (and:SI (match_dup 2)
10998 (const_int -65281)))
10999 (set (match_operand:SI 0 "s_register_operand" "")
11000 (xor:SI (match_dup 3)
11001 (match_dup 2)))]
11002 "TARGET_32BIT"
11003 ""
11004 )
11005
11006 ;; Reuse temporaries to keep register pressure down.
11007 (define_expand "thumb_legacy_rev"
11008 [(set (match_operand:SI 2 "s_register_operand" "")
11009 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11010 (const_int 24)))
11011 (set (match_operand:SI 3 "s_register_operand" "")
11012 (lshiftrt:SI (match_dup 1)
11013 (const_int 24)))
11014 (set (match_dup 3)
11015 (ior:SI (match_dup 3)
11016 (match_dup 2)))
11017 (set (match_operand:SI 4 "s_register_operand" "")
11018 (const_int 16))
11019 (set (match_operand:SI 5 "s_register_operand" "")
11020 (rotatert:SI (match_dup 1)
11021 (match_dup 4)))
11022 (set (match_dup 2)
11023 (ashift:SI (match_dup 5)
11024 (const_int 24)))
11025 (set (match_dup 5)
11026 (lshiftrt:SI (match_dup 5)
11027 (const_int 24)))
11028 (set (match_dup 5)
11029 (ior:SI (match_dup 5)
11030 (match_dup 2)))
11031 (set (match_dup 5)
11032 (rotatert:SI (match_dup 5)
11033 (match_dup 4)))
11034 (set (match_operand:SI 0 "s_register_operand" "")
11035 (ior:SI (match_dup 5)
11036 (match_dup 3)))]
11037 "TARGET_THUMB"
11038 ""
11039 )
11040
11041 (define_expand "bswapsi2"
11042 [(set (match_operand:SI 0 "s_register_operand" "=r")
11043 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11044 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11045 "
11046 if (!arm_arch6)
11047 {
11048 rtx op2 = gen_reg_rtx (SImode);
11049 rtx op3 = gen_reg_rtx (SImode);
11050
11051 if (TARGET_THUMB)
11052 {
11053 rtx op4 = gen_reg_rtx (SImode);
11054 rtx op5 = gen_reg_rtx (SImode);
11055
11056 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11057 op2, op3, op4, op5));
11058 }
11059 else
11060 {
11061 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11062 op2, op3));
11063 }
11064
11065 DONE;
11066 }
11067 "
11068 )
11069
11070 ;; Load the load/store multiple patterns
11071 (include "ldmstm.md")
11072 ;; Load the FPA co-processor patterns
11073 (include "fpa.md")
11074 ;; Load the Maverick co-processor patterns
11075 (include "cirrus.md")
11076 ;; Vector bits common to IWMMXT and Neon
11077 (include "vec-common.md")
11078 ;; Load the Intel Wireless Multimedia Extension patterns
11079 (include "iwmmxt.md")
11080 ;; Load the VFP co-processor patterns
11081 (include "vfp.md")
11082 ;; Thumb-2 patterns
11083 (include "thumb2.md")
11084 ;; Neon patterns
11085 (include "neon.md")
11086 ;; Synchronization Primitives
11087 (include "sync.md")
11088 ;; Fixed-point patterns
11089 (include "arm-fixed.md")