For Marcus - Implement sync primitives inline for ARM.
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
79 ; register to "use".
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
95 ; instruction stream.
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
107 ]
108 )
109
110 ;; UNSPEC_VOLATILE Usage:
111
112 (define_constants
113 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
114 ; insn in the code.
115 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
116 ; instruction epilogue sequence that isn't expanded
117 ; into normal RTL. Used for both normal and sibcall
118 ; epilogues.
119 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
120 ; for inlined constants.
121 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
122 ; table.
123 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
124 ; an 8-bit object.
125 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
126 ; a 16-bit object.
127 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
128 ; a 32-bit object.
129 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
130 ; a 64-bit object.
131 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
132 ; a 128-bit object.
133 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
134 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
135 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
136 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
137 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
138 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
139 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
140 ; handling.
141 (VUNSPEC_SYNC_COMPARE_AND_SWAP 21) ; Represent an atomic compare swap.
142 (VUNSPEC_SYNC_LOCK 22) ; Represent a sync_lock_test_and_set.
143 (VUNSPEC_SYNC_OP 23) ; Represent a sync_<op>
144 (VUNSPEC_SYNC_NEW_OP 24) ; Represent a sync_new_<op>
145 (VUNSPEC_SYNC_OLD_OP 25) ; Represent a sync_old_<op>
146 ]
147 )
148 \f
149 ;;---------------------------------------------------------------------------
150 ;; Attributes
151
152 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
153 ; generating ARM code. This is used to control the length of some insn
154 ; patterns that share the same RTL in both ARM and Thumb code.
155 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
156
157 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
158 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
159
160 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
161 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
162
163 ;; Operand number of an input operand that is shifted. Zero if the
164 ;; given instruction does not shift one of its input operands.
165 (define_attr "shift" "" (const_int 0))
166
167 ; Floating Point Unit. If we only have floating point emulation, then there
168 ; is no point in scheduling the floating point insns. (Well, for best
169 ; performance we should try and group them together).
170 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
171 (const (symbol_ref "arm_fpu_attr")))
172
173 (define_attr "sync_result" "none,0,1,2,3,4,5" (const_string "none"))
174 (define_attr "sync_memory" "none,0,1,2,3,4,5" (const_string "none"))
175 (define_attr "sync_required_value" "none,0,1,2,3,4,5" (const_string "none"))
176 (define_attr "sync_new_value" "none,0,1,2,3,4,5" (const_string "none"))
177 (define_attr "sync_t1" "none,0,1,2,3,4,5" (const_string "none"))
178 (define_attr "sync_t2" "none,0,1,2,3,4,5" (const_string "none"))
179 (define_attr "sync_release_barrier" "yes,no" (const_string "yes"))
180 (define_attr "sync_op" "none,add,sub,ior,xor,and,nand"
181 (const_string "none"))
182
183 ; LENGTH of an instruction (in bytes)
184 (define_attr "length" ""
185 (cond [(not (eq_attr "sync_memory" "none"))
186 (symbol_ref "arm_sync_loop_insns (insn, operands) * 4")
187 ] (const_int 4)))
188
189 ; The architecture which supports the instruction (or alternative).
190 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
191 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
192 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
193 ; arm_arch6. This attribute is used to compute attribute "enabled",
194 ; use type "any" to enable an alternative in all cases.
195 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6"
196 (const_string "any"))
197
198 (define_attr "arch_enabled" "no,yes"
199 (cond [(eq_attr "arch" "any")
200 (const_string "yes")
201
202 (and (eq_attr "arch" "a")
203 (ne (symbol_ref "TARGET_ARM") (const_int 0)))
204 (const_string "yes")
205
206 (and (eq_attr "arch" "t")
207 (ne (symbol_ref "TARGET_THUMB") (const_int 0)))
208 (const_string "yes")
209
210 (and (eq_attr "arch" "t1")
211 (ne (symbol_ref "TARGET_THUMB1") (const_int 0)))
212 (const_string "yes")
213
214 (and (eq_attr "arch" "t2")
215 (ne (symbol_ref "TARGET_THUMB2") (const_int 0)))
216 (const_string "yes")
217
218 (and (eq_attr "arch" "32")
219 (ne (symbol_ref "TARGET_32BIT") (const_int 0)))
220 (const_string "yes")
221
222 (and (eq_attr "arch" "v6")
223 (ne (symbol_ref "(TARGET_32BIT && arm_arch6)") (const_int 0)))
224 (const_string "yes")
225
226 (and (eq_attr "arch" "nov6")
227 (ne (symbol_ref "(TARGET_32BIT && !arm_arch6)") (const_int 0)))
228 (const_string "yes")]
229 (const_string "no")))
230
231 ; Allows an insn to disable certain alternatives for reasons other than
232 ; arch support.
233 (define_attr "insn_enabled" "no,yes"
234 (const_string "yes"))
235
236 ; Enable all alternatives that are both arch_enabled and insn_enabled.
237 (define_attr "enabled" "no,yes"
238 (if_then_else (eq_attr "insn_enabled" "yes")
239 (attr "arch_enabled")
240 (const_string "no")))
241
242 ; POOL_RANGE is how far away from a constant pool entry that this insn
243 ; can be placed. If the distance is zero, then this insn will never
244 ; reference the pool.
245 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
246 ; before its address.
247 (define_attr "arm_pool_range" "" (const_int 0))
248 (define_attr "thumb2_pool_range" "" (const_int 0))
249 (define_attr "arm_neg_pool_range" "" (const_int 0))
250 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
251
252 (define_attr "pool_range" ""
253 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
254 (attr "arm_pool_range")))
255 (define_attr "neg_pool_range" ""
256 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
257 (attr "arm_neg_pool_range")))
258
259 ; An assembler sequence may clobber the condition codes without us knowing.
260 ; If such an insn references the pool, then we have no way of knowing how,
261 ; so use the most conservative value for pool_range.
262 (define_asm_attributes
263 [(set_attr "conds" "clob")
264 (set_attr "length" "4")
265 (set_attr "pool_range" "250")])
266
267 ;; The instruction used to implement a particular pattern. This
268 ;; information is used by pipeline descriptions to provide accurate
269 ;; scheduling information.
270
271 (define_attr "insn"
272 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
273 (const_string "other"))
274
275 ; TYPE attribute is used to detect floating point instructions which, if
276 ; running on a co-processor can run in parallel with other, basic instructions
277 ; If write-buffer scheduling is enabled then it can also be used in the
278 ; scheduling of writes.
279
280 ; Classification of each insn
281 ; Note: vfp.md has different meanings for some of these, and some further
282 ; types as well. See that file for details.
283 ; alu any alu instruction that doesn't hit memory or fp
284 ; regs or have a shifted source operand
285 ; alu_shift any data instruction that doesn't hit memory or fp
286 ; regs, but has a source operand shifted by a constant
287 ; alu_shift_reg any data instruction that doesn't hit memory or fp
288 ; regs, but has a source operand shifted by a register value
289 ; mult a multiply instruction
290 ; block blockage insn, this blocks all functional units
291 ; float a floating point arithmetic operation (subject to expansion)
292 ; fdivd DFmode floating point division
293 ; fdivs SFmode floating point division
294 ; fmul Floating point multiply
295 ; ffmul Fast floating point multiply
296 ; farith Floating point arithmetic (4 cycle)
297 ; ffarith Fast floating point arithmetic (2 cycle)
298 ; float_em a floating point arithmetic operation that is normally emulated
299 ; even on a machine with an fpa.
300 ; f_load a floating point load from memory
301 ; f_store a floating point store to memory
302 ; f_load[sd] single/double load from memory
303 ; f_store[sd] single/double store to memory
304 ; f_flag a transfer of co-processor flags to the CPSR
305 ; f_mem_r a transfer of a floating point register to a real reg via mem
306 ; r_mem_f the reverse of f_mem_r
307 ; f_2_r fast transfer float to arm (no memory needed)
308 ; r_2_f fast transfer arm to float
309 ; f_cvt convert floating<->integral
310 ; branch a branch
311 ; call a subroutine call
312 ; load_byte load byte(s) from memory to arm registers
313 ; load1 load 1 word from memory to arm registers
314 ; load2 load 2 words from memory to arm registers
315 ; load3 load 3 words from memory to arm registers
316 ; load4 load 4 words from memory to arm registers
317 ; store store 1 word to memory from arm registers
318 ; store2 store 2 words
319 ; store3 store 3 words
320 ; store4 store 4 (or more) words
321 ; Additions for Cirrus Maverick co-processor:
322 ; mav_farith Floating point arithmetic (4 cycle)
323 ; mav_dmult Double multiplies (7 cycle)
324 ;
325
326 (define_attr "type"
327 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
328 (if_then_else
329 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
330 (const_string "mult")
331 (const_string "alu")))
332
333 ; Load scheduling, set from the arm_ld_sched variable
334 ; initialized by arm_override_options()
335 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
336
337 ;; Classification of NEON instructions for scheduling purposes.
338 ;; Do not set this attribute and the "type" attribute together in
339 ;; any one instruction pattern.
340 (define_attr "neon_type"
341 "neon_int_1,\
342 neon_int_2,\
343 neon_int_3,\
344 neon_int_4,\
345 neon_int_5,\
346 neon_vqneg_vqabs,\
347 neon_vmov,\
348 neon_vaba,\
349 neon_vsma,\
350 neon_vaba_qqq,\
351 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
352 neon_mul_qqq_8_16_32_ddd_32,\
353 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
354 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
355 neon_mla_qqq_8_16,\
356 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
357 neon_mla_qqq_32_qqd_32_scalar,\
358 neon_mul_ddd_16_scalar_32_16_long_scalar,\
359 neon_mul_qqd_32_scalar,\
360 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
361 neon_shift_1,\
362 neon_shift_2,\
363 neon_shift_3,\
364 neon_vshl_ddd,\
365 neon_vqshl_vrshl_vqrshl_qqq,\
366 neon_vsra_vrsra,\
367 neon_fp_vadd_ddd_vabs_dd,\
368 neon_fp_vadd_qqq_vabs_qq,\
369 neon_fp_vsum,\
370 neon_fp_vmul_ddd,\
371 neon_fp_vmul_qqd,\
372 neon_fp_vmla_ddd,\
373 neon_fp_vmla_qqq,\
374 neon_fp_vmla_ddd_scalar,\
375 neon_fp_vmla_qqq_scalar,\
376 neon_fp_vrecps_vrsqrts_ddd,\
377 neon_fp_vrecps_vrsqrts_qqq,\
378 neon_bp_simple,\
379 neon_bp_2cycle,\
380 neon_bp_3cycle,\
381 neon_ldr,\
382 neon_str,\
383 neon_vld1_1_2_regs,\
384 neon_vld1_3_4_regs,\
385 neon_vld2_2_regs_vld1_vld2_all_lanes,\
386 neon_vld2_4_regs,\
387 neon_vld3_vld4,\
388 neon_vst1_1_2_regs_vst2_2_regs,\
389 neon_vst1_3_4_regs,\
390 neon_vst2_4_regs_vst3_vst4,\
391 neon_vst3_vst4,\
392 neon_vld1_vld2_lane,\
393 neon_vld3_vld4_lane,\
394 neon_vst1_vst2_lane,\
395 neon_vst3_vst4_lane,\
396 neon_vld3_vld4_all_lanes,\
397 neon_mcr,\
398 neon_mcr_2_mcrr,\
399 neon_mrc,\
400 neon_mrrc,\
401 neon_ldm_2,\
402 neon_stm_2,\
403 none"
404 (const_string "none"))
405
406 ; condition codes: this one is used by final_prescan_insn to speed up
407 ; conditionalizing instructions. It saves having to scan the rtl to see if
408 ; it uses or alters the condition codes.
409 ;
410 ; USE means that the condition codes are used by the insn in the process of
411 ; outputting code, this means (at present) that we can't use the insn in
412 ; inlined branches
413 ;
414 ; SET means that the purpose of the insn is to set the condition codes in a
415 ; well defined manner.
416 ;
417 ; CLOB means that the condition codes are altered in an undefined manner, if
418 ; they are altered at all
419 ;
420 ; UNCONDITIONAL means the instions can not be conditionally executed.
421 ;
422 ; NOCOND means that the condition codes are neither altered nor affect the
423 ; output of this insn
424
425 (define_attr "conds" "use,set,clob,unconditional,nocond"
426 (if_then_else
427 (ior (eq_attr "is_thumb1" "yes")
428 (eq_attr "type" "call"))
429 (const_string "clob")
430 (if_then_else (eq_attr "neon_type" "none")
431 (const_string "nocond")
432 (const_string "unconditional"))))
433
434 ; Predicable means that the insn can be conditionally executed based on
435 ; an automatically added predicate (additional patterns are generated by
436 ; gen...). We default to 'no' because no Thumb patterns match this rule
437 ; and not all ARM patterns do.
438 (define_attr "predicable" "no,yes" (const_string "no"))
439
440 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
441 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
442 ; suffer blockages enough to warrant modelling this (and it can adversely
443 ; affect the schedule).
444 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
445
446 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
447 ; to stall the processor. Used with model_wbuf above.
448 (define_attr "write_conflict" "no,yes"
449 (if_then_else (eq_attr "type"
450 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
451 (const_string "yes")
452 (const_string "no")))
453
454 ; Classify the insns into those that take one cycle and those that take more
455 ; than one on the main cpu execution unit.
456 (define_attr "core_cycles" "single,multi"
457 (if_then_else (eq_attr "type"
458 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
459 (const_string "single")
460 (const_string "multi")))
461
462 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
463 ;; distant label. Only applicable to Thumb code.
464 (define_attr "far_jump" "yes,no" (const_string "no"))
465
466
467 ;; The number of machine instructions this pattern expands to.
468 ;; Used for Thumb-2 conditional execution.
469 (define_attr "ce_count" "" (const_int 1))
470
471 ;;---------------------------------------------------------------------------
472 ;; Mode iterators
473
474 (include "iterators.md")
475
476 ;;---------------------------------------------------------------------------
477 ;; Predicates
478
479 (include "predicates.md")
480 (include "constraints.md")
481
482 ;;---------------------------------------------------------------------------
483 ;; Pipeline descriptions
484
485 ;; Processor type. This is created automatically from arm-cores.def.
486 (include "arm-tune.md")
487
488 (define_attr "tune_cortexr4" "yes,no"
489 (const (if_then_else
490 (eq_attr "tune" "cortexr4,cortexr4f")
491 (const_string "yes")
492 (const_string "no"))))
493
494 ;; True if the generic scheduling description should be used.
495
496 (define_attr "generic_sched" "yes,no"
497 (const (if_then_else
498 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
499 (eq_attr "tune_cortexr4" "yes"))
500 (const_string "no")
501 (const_string "yes"))))
502
503 (define_attr "generic_vfp" "yes,no"
504 (const (if_then_else
505 (and (eq_attr "fpu" "vfp")
506 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
507 (eq_attr "tune_cortexr4" "no"))
508 (const_string "yes")
509 (const_string "no"))))
510
511 (include "arm-generic.md")
512 (include "arm926ejs.md")
513 (include "arm1020e.md")
514 (include "arm1026ejs.md")
515 (include "arm1136jfs.md")
516 (include "cortex-a8.md")
517 (include "cortex-a9.md")
518 (include "cortex-r4.md")
519 (include "cortex-r4f.md")
520 (include "vfp11.md")
521
522 \f
523 ;;---------------------------------------------------------------------------
524 ;; Insn patterns
525 ;;
526 ;; Addition insns.
527
528 ;; Note: For DImode insns, there is normally no reason why operands should
529 ;; not be in the same register, what we don't want is for something being
530 ;; written to partially overlap something that is an input.
531 ;; Cirrus 64bit additions should not be split because we have a native
532 ;; 64bit addition instructions.
533
534 (define_expand "adddi3"
535 [(parallel
536 [(set (match_operand:DI 0 "s_register_operand" "")
537 (plus:DI (match_operand:DI 1 "s_register_operand" "")
538 (match_operand:DI 2 "s_register_operand" "")))
539 (clobber (reg:CC CC_REGNUM))])]
540 "TARGET_EITHER"
541 "
542 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
543 {
544 if (!cirrus_fp_register (operands[0], DImode))
545 operands[0] = force_reg (DImode, operands[0]);
546 if (!cirrus_fp_register (operands[1], DImode))
547 operands[1] = force_reg (DImode, operands[1]);
548 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
549 DONE;
550 }
551
552 if (TARGET_THUMB1)
553 {
554 if (GET_CODE (operands[1]) != REG)
555 operands[1] = force_reg (DImode, operands[1]);
556 if (GET_CODE (operands[2]) != REG)
557 operands[2] = force_reg (DImode, operands[2]);
558 }
559 "
560 )
561
562 (define_insn "*thumb1_adddi3"
563 [(set (match_operand:DI 0 "register_operand" "=l")
564 (plus:DI (match_operand:DI 1 "register_operand" "%0")
565 (match_operand:DI 2 "register_operand" "l")))
566 (clobber (reg:CC CC_REGNUM))
567 ]
568 "TARGET_THUMB1"
569 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
570 [(set_attr "length" "4")]
571 )
572
573 (define_insn_and_split "*arm_adddi3"
574 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
575 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
576 (match_operand:DI 2 "s_register_operand" "r, 0")))
577 (clobber (reg:CC CC_REGNUM))]
578 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK) && !TARGET_NEON"
579 "#"
580 "TARGET_32BIT && reload_completed
581 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
582 [(parallel [(set (reg:CC_C CC_REGNUM)
583 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
584 (match_dup 1)))
585 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
586 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
587 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
588 "
589 {
590 operands[3] = gen_highpart (SImode, operands[0]);
591 operands[0] = gen_lowpart (SImode, operands[0]);
592 operands[4] = gen_highpart (SImode, operands[1]);
593 operands[1] = gen_lowpart (SImode, operands[1]);
594 operands[5] = gen_highpart (SImode, operands[2]);
595 operands[2] = gen_lowpart (SImode, operands[2]);
596 }"
597 [(set_attr "conds" "clob")
598 (set_attr "length" "8")]
599 )
600
601 (define_insn_and_split "*adddi_sesidi_di"
602 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
603 (plus:DI (sign_extend:DI
604 (match_operand:SI 2 "s_register_operand" "r,r"))
605 (match_operand:DI 1 "s_register_operand" "0,r")))
606 (clobber (reg:CC CC_REGNUM))]
607 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
608 "#"
609 "TARGET_32BIT && reload_completed"
610 [(parallel [(set (reg:CC_C CC_REGNUM)
611 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
612 (match_dup 1)))
613 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
614 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
615 (const_int 31))
616 (match_dup 4))
617 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
618 "
619 {
620 operands[3] = gen_highpart (SImode, operands[0]);
621 operands[0] = gen_lowpart (SImode, operands[0]);
622 operands[4] = gen_highpart (SImode, operands[1]);
623 operands[1] = gen_lowpart (SImode, operands[1]);
624 operands[2] = gen_lowpart (SImode, operands[2]);
625 }"
626 [(set_attr "conds" "clob")
627 (set_attr "length" "8")]
628 )
629
630 (define_insn_and_split "*adddi_zesidi_di"
631 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
632 (plus:DI (zero_extend:DI
633 (match_operand:SI 2 "s_register_operand" "r,r"))
634 (match_operand:DI 1 "s_register_operand" "0,r")))
635 (clobber (reg:CC CC_REGNUM))]
636 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
637 "#"
638 "TARGET_32BIT && reload_completed"
639 [(parallel [(set (reg:CC_C CC_REGNUM)
640 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
641 (match_dup 1)))
642 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
643 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
644 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
645 "
646 {
647 operands[3] = gen_highpart (SImode, operands[0]);
648 operands[0] = gen_lowpart (SImode, operands[0]);
649 operands[4] = gen_highpart (SImode, operands[1]);
650 operands[1] = gen_lowpart (SImode, operands[1]);
651 operands[2] = gen_lowpart (SImode, operands[2]);
652 }"
653 [(set_attr "conds" "clob")
654 (set_attr "length" "8")]
655 )
656
657 (define_expand "addsi3"
658 [(set (match_operand:SI 0 "s_register_operand" "")
659 (plus:SI (match_operand:SI 1 "s_register_operand" "")
660 (match_operand:SI 2 "reg_or_int_operand" "")))]
661 "TARGET_EITHER"
662 "
663 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
664 {
665 arm_split_constant (PLUS, SImode, NULL_RTX,
666 INTVAL (operands[2]), operands[0], operands[1],
667 optimize && can_create_pseudo_p ());
668 DONE;
669 }
670 "
671 )
672
673 ; If there is a scratch available, this will be faster than synthesizing the
674 ; addition.
675 (define_peephole2
676 [(match_scratch:SI 3 "r")
677 (set (match_operand:SI 0 "arm_general_register_operand" "")
678 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
679 (match_operand:SI 2 "const_int_operand" "")))]
680 "TARGET_32BIT &&
681 !(const_ok_for_arm (INTVAL (operands[2]))
682 || const_ok_for_arm (-INTVAL (operands[2])))
683 && const_ok_for_arm (~INTVAL (operands[2]))"
684 [(set (match_dup 3) (match_dup 2))
685 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
686 ""
687 )
688
689 ;; The r/r/k alternative is required when reloading the address
690 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
691 ;; put the duplicated register first, and not try the commutative version.
692 (define_insn_and_split "*arm_addsi3"
693 [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k,r")
694 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k,rk")
695 (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,L, L,?n")))]
696 "TARGET_32BIT"
697 "@
698 add%?\\t%0, %1, %2
699 add%?\\t%0, %1, %2
700 add%?\\t%0, %2, %1
701 sub%?\\t%0, %1, #%n2
702 sub%?\\t%0, %1, #%n2
703 #"
704 "TARGET_32BIT
705 && GET_CODE (operands[2]) == CONST_INT
706 && !(const_ok_for_arm (INTVAL (operands[2]))
707 || const_ok_for_arm (-INTVAL (operands[2])))
708 && (reload_completed || !arm_eliminable_register (operands[1]))"
709 [(clobber (const_int 0))]
710 "
711 arm_split_constant (PLUS, SImode, curr_insn,
712 INTVAL (operands[2]), operands[0],
713 operands[1], 0);
714 DONE;
715 "
716 [(set_attr "length" "4,4,4,4,4,16")
717 (set_attr "predicable" "yes")]
718 )
719
720 (define_insn_and_split "*thumb1_addsi3"
721 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
722 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
723 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
724 "TARGET_THUMB1"
725 "*
726 static const char * const asms[] =
727 {
728 \"add\\t%0, %0, %2\",
729 \"sub\\t%0, %0, #%n2\",
730 \"add\\t%0, %1, %2\",
731 \"add\\t%0, %0, %2\",
732 \"add\\t%0, %0, %2\",
733 \"add\\t%0, %1, %2\",
734 \"add\\t%0, %1, %2\",
735 \"#\",
736 \"#\",
737 \"#\"
738 };
739 if ((which_alternative == 2 || which_alternative == 6)
740 && GET_CODE (operands[2]) == CONST_INT
741 && INTVAL (operands[2]) < 0)
742 return \"sub\\t%0, %1, #%n2\";
743 return asms[which_alternative];
744 "
745 "&& reload_completed && CONST_INT_P (operands[2])
746 && ((operands[1] != stack_pointer_rtx
747 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
748 || (operands[1] == stack_pointer_rtx
749 && INTVAL (operands[2]) > 1020))"
750 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
751 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
752 {
753 HOST_WIDE_INT offset = INTVAL (operands[2]);
754 if (operands[1] == stack_pointer_rtx)
755 offset -= 1020;
756 else
757 {
758 if (offset > 255)
759 offset = 255;
760 else if (offset < -255)
761 offset = -255;
762 }
763 operands[3] = GEN_INT (offset);
764 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
765 }
766 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
767 )
768
769 ;; Reloading and elimination of the frame pointer can
770 ;; sometimes cause this optimization to be missed.
771 (define_peephole2
772 [(set (match_operand:SI 0 "arm_general_register_operand" "")
773 (match_operand:SI 1 "const_int_operand" ""))
774 (set (match_dup 0)
775 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
776 "TARGET_THUMB1
777 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
778 && (INTVAL (operands[1]) & 3) == 0"
779 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
780 ""
781 )
782
783 (define_insn "*addsi3_compare0"
784 [(set (reg:CC_NOOV CC_REGNUM)
785 (compare:CC_NOOV
786 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
787 (match_operand:SI 2 "arm_add_operand" "rI,L"))
788 (const_int 0)))
789 (set (match_operand:SI 0 "s_register_operand" "=r,r")
790 (plus:SI (match_dup 1) (match_dup 2)))]
791 "TARGET_ARM"
792 "@
793 add%.\\t%0, %1, %2
794 sub%.\\t%0, %1, #%n2"
795 [(set_attr "conds" "set")]
796 )
797
798 (define_insn "*addsi3_compare0_scratch"
799 [(set (reg:CC_NOOV CC_REGNUM)
800 (compare:CC_NOOV
801 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
802 (match_operand:SI 1 "arm_add_operand" "rI,L"))
803 (const_int 0)))]
804 "TARGET_ARM"
805 "@
806 cmn%?\\t%0, %1
807 cmp%?\\t%0, #%n1"
808 [(set_attr "conds" "set")]
809 )
810
811 (define_insn "*compare_negsi_si"
812 [(set (reg:CC_Z CC_REGNUM)
813 (compare:CC_Z
814 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
815 (match_operand:SI 1 "s_register_operand" "r")))]
816 "TARGET_32BIT"
817 "cmn%?\\t%1, %0"
818 [(set_attr "conds" "set")]
819 )
820
821 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
822 ;; addend is a constant.
823 (define_insn "*cmpsi2_addneg"
824 [(set (reg:CC CC_REGNUM)
825 (compare:CC
826 (match_operand:SI 1 "s_register_operand" "r,r")
827 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
828 (set (match_operand:SI 0 "s_register_operand" "=r,r")
829 (plus:SI (match_dup 1)
830 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
831 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
832 "@
833 add%.\\t%0, %1, %3
834 sub%.\\t%0, %1, #%n3"
835 [(set_attr "conds" "set")]
836 )
837
838 ;; Convert the sequence
839 ;; sub rd, rn, #1
840 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
841 ;; bne dest
842 ;; into
843 ;; subs rd, rn, #1
844 ;; bcs dest ((unsigned)rn >= 1)
845 ;; similarly for the beq variant using bcc.
846 ;; This is a common looping idiom (while (n--))
847 (define_peephole2
848 [(set (match_operand:SI 0 "arm_general_register_operand" "")
849 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
850 (const_int -1)))
851 (set (match_operand 2 "cc_register" "")
852 (compare (match_dup 0) (const_int -1)))
853 (set (pc)
854 (if_then_else (match_operator 3 "equality_operator"
855 [(match_dup 2) (const_int 0)])
856 (match_operand 4 "" "")
857 (match_operand 5 "" "")))]
858 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
859 [(parallel[
860 (set (match_dup 2)
861 (compare:CC
862 (match_dup 1) (const_int 1)))
863 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
864 (set (pc)
865 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
866 (match_dup 4)
867 (match_dup 5)))]
868 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
869 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
870 ? GEU : LTU),
871 VOIDmode,
872 operands[2], const0_rtx);"
873 )
874
875 ;; The next four insns work because they compare the result with one of
876 ;; the operands, and we know that the use of the condition code is
877 ;; either GEU or LTU, so we can use the carry flag from the addition
878 ;; instead of doing the compare a second time.
879 (define_insn "*addsi3_compare_op1"
880 [(set (reg:CC_C CC_REGNUM)
881 (compare:CC_C
882 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
883 (match_operand:SI 2 "arm_add_operand" "rI,L"))
884 (match_dup 1)))
885 (set (match_operand:SI 0 "s_register_operand" "=r,r")
886 (plus:SI (match_dup 1) (match_dup 2)))]
887 "TARGET_32BIT"
888 "@
889 add%.\\t%0, %1, %2
890 sub%.\\t%0, %1, #%n2"
891 [(set_attr "conds" "set")]
892 )
893
894 (define_insn "*addsi3_compare_op2"
895 [(set (reg:CC_C CC_REGNUM)
896 (compare:CC_C
897 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
898 (match_operand:SI 2 "arm_add_operand" "rI,L"))
899 (match_dup 2)))
900 (set (match_operand:SI 0 "s_register_operand" "=r,r")
901 (plus:SI (match_dup 1) (match_dup 2)))]
902 "TARGET_32BIT"
903 "@
904 add%.\\t%0, %1, %2
905 sub%.\\t%0, %1, #%n2"
906 [(set_attr "conds" "set")]
907 )
908
909 (define_insn "*compare_addsi2_op0"
910 [(set (reg:CC_C CC_REGNUM)
911 (compare:CC_C
912 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
913 (match_operand:SI 1 "arm_add_operand" "rI,L"))
914 (match_dup 0)))]
915 "TARGET_32BIT"
916 "@
917 cmn%?\\t%0, %1
918 cmp%?\\t%0, #%n1"
919 [(set_attr "conds" "set")]
920 )
921
922 (define_insn "*compare_addsi2_op1"
923 [(set (reg:CC_C CC_REGNUM)
924 (compare:CC_C
925 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
926 (match_operand:SI 1 "arm_add_operand" "rI,L"))
927 (match_dup 1)))]
928 "TARGET_32BIT"
929 "@
930 cmn%?\\t%0, %1
931 cmp%?\\t%0, #%n1"
932 [(set_attr "conds" "set")]
933 )
934
935 (define_insn "*addsi3_carryin_<optab>"
936 [(set (match_operand:SI 0 "s_register_operand" "=r")
937 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
938 (match_operand:SI 2 "arm_rhs_operand" "rI"))
939 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
940 "TARGET_32BIT"
941 "adc%?\\t%0, %1, %2"
942 [(set_attr "conds" "use")]
943 )
944
945 (define_insn "*addsi3_carryin_alt2_<optab>"
946 [(set (match_operand:SI 0 "s_register_operand" "=r")
947 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
948 (match_operand:SI 1 "s_register_operand" "%r"))
949 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
950 "TARGET_32BIT"
951 "adc%?\\t%0, %1, %2"
952 [(set_attr "conds" "use")]
953 )
954
955 (define_insn "*addsi3_carryin_shift_<optab>"
956 [(set (match_operand:SI 0 "s_register_operand" "=r")
957 (plus:SI (plus:SI
958 (match_operator:SI 2 "shift_operator"
959 [(match_operand:SI 3 "s_register_operand" "r")
960 (match_operand:SI 4 "reg_or_int_operand" "rM")])
961 (match_operand:SI 1 "s_register_operand" "r"))
962 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
963 "TARGET_32BIT"
964 "adc%?\\t%0, %1, %3%S2"
965 [(set_attr "conds" "use")
966 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
967 (const_string "alu_shift")
968 (const_string "alu_shift_reg")))]
969 )
970
971 (define_expand "incscc"
972 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
973 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
974 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
975 (match_operand:SI 1 "s_register_operand" "0,?r")))]
976 "TARGET_32BIT"
977 ""
978 )
979
980 (define_insn "*arm_incscc"
981 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
982 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
983 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
984 (match_operand:SI 1 "s_register_operand" "0,?r")))]
985 "TARGET_ARM"
986 "@
987 add%d2\\t%0, %1, #1
988 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
989 [(set_attr "conds" "use")
990 (set_attr "length" "4,8")]
991 )
992
993 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
994 (define_split
995 [(set (match_operand:SI 0 "s_register_operand" "")
996 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
997 (match_operand:SI 2 "s_register_operand" ""))
998 (const_int -1)))
999 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1000 "TARGET_32BIT"
1001 [(set (match_dup 3) (match_dup 1))
1002 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1003 "
1004 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1005 ")
1006
1007 (define_expand "addsf3"
1008 [(set (match_operand:SF 0 "s_register_operand" "")
1009 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1010 (match_operand:SF 2 "arm_float_add_operand" "")))]
1011 "TARGET_32BIT && TARGET_HARD_FLOAT"
1012 "
1013 if (TARGET_MAVERICK
1014 && !cirrus_fp_register (operands[2], SFmode))
1015 operands[2] = force_reg (SFmode, operands[2]);
1016 ")
1017
1018 (define_expand "adddf3"
1019 [(set (match_operand:DF 0 "s_register_operand" "")
1020 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1021 (match_operand:DF 2 "arm_float_add_operand" "")))]
1022 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1023 "
1024 if (TARGET_MAVERICK
1025 && !cirrus_fp_register (operands[2], DFmode))
1026 operands[2] = force_reg (DFmode, operands[2]);
1027 ")
1028
1029 (define_expand "subdi3"
1030 [(parallel
1031 [(set (match_operand:DI 0 "s_register_operand" "")
1032 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1033 (match_operand:DI 2 "s_register_operand" "")))
1034 (clobber (reg:CC CC_REGNUM))])]
1035 "TARGET_EITHER"
1036 "
1037 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
1038 && TARGET_32BIT
1039 && cirrus_fp_register (operands[0], DImode)
1040 && cirrus_fp_register (operands[1], DImode))
1041 {
1042 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
1043 DONE;
1044 }
1045
1046 if (TARGET_THUMB1)
1047 {
1048 if (GET_CODE (operands[1]) != REG)
1049 operands[1] = force_reg (DImode, operands[1]);
1050 if (GET_CODE (operands[2]) != REG)
1051 operands[2] = force_reg (DImode, operands[2]);
1052 }
1053 "
1054 )
1055
1056 (define_insn "*arm_subdi3"
1057 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1058 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1059 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1060 (clobber (reg:CC CC_REGNUM))]
1061 "TARGET_32BIT && !TARGET_NEON"
1062 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1063 [(set_attr "conds" "clob")
1064 (set_attr "length" "8")]
1065 )
1066
1067 (define_insn "*thumb_subdi3"
1068 [(set (match_operand:DI 0 "register_operand" "=l")
1069 (minus:DI (match_operand:DI 1 "register_operand" "0")
1070 (match_operand:DI 2 "register_operand" "l")))
1071 (clobber (reg:CC CC_REGNUM))]
1072 "TARGET_THUMB1"
1073 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1074 [(set_attr "length" "4")]
1075 )
1076
1077 (define_insn "*subdi_di_zesidi"
1078 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1079 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1080 (zero_extend:DI
1081 (match_operand:SI 2 "s_register_operand" "r,r"))))
1082 (clobber (reg:CC CC_REGNUM))]
1083 "TARGET_32BIT"
1084 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1085 [(set_attr "conds" "clob")
1086 (set_attr "length" "8")]
1087 )
1088
1089 (define_insn "*subdi_di_sesidi"
1090 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1091 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1092 (sign_extend:DI
1093 (match_operand:SI 2 "s_register_operand" "r,r"))))
1094 (clobber (reg:CC CC_REGNUM))]
1095 "TARGET_32BIT"
1096 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1097 [(set_attr "conds" "clob")
1098 (set_attr "length" "8")]
1099 )
1100
1101 (define_insn "*subdi_zesidi_di"
1102 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1103 (minus:DI (zero_extend:DI
1104 (match_operand:SI 2 "s_register_operand" "r,r"))
1105 (match_operand:DI 1 "s_register_operand" "0,r")))
1106 (clobber (reg:CC CC_REGNUM))]
1107 "TARGET_ARM"
1108 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1109 [(set_attr "conds" "clob")
1110 (set_attr "length" "8")]
1111 )
1112
1113 (define_insn "*subdi_sesidi_di"
1114 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1115 (minus:DI (sign_extend:DI
1116 (match_operand:SI 2 "s_register_operand" "r,r"))
1117 (match_operand:DI 1 "s_register_operand" "0,r")))
1118 (clobber (reg:CC CC_REGNUM))]
1119 "TARGET_ARM"
1120 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1121 [(set_attr "conds" "clob")
1122 (set_attr "length" "8")]
1123 )
1124
1125 (define_insn "*subdi_zesidi_zesidi"
1126 [(set (match_operand:DI 0 "s_register_operand" "=r")
1127 (minus:DI (zero_extend:DI
1128 (match_operand:SI 1 "s_register_operand" "r"))
1129 (zero_extend:DI
1130 (match_operand:SI 2 "s_register_operand" "r"))))
1131 (clobber (reg:CC CC_REGNUM))]
1132 "TARGET_32BIT"
1133 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1134 [(set_attr "conds" "clob")
1135 (set_attr "length" "8")]
1136 )
1137
1138 (define_expand "subsi3"
1139 [(set (match_operand:SI 0 "s_register_operand" "")
1140 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1141 (match_operand:SI 2 "s_register_operand" "")))]
1142 "TARGET_EITHER"
1143 "
1144 if (GET_CODE (operands[1]) == CONST_INT)
1145 {
1146 if (TARGET_32BIT)
1147 {
1148 arm_split_constant (MINUS, SImode, NULL_RTX,
1149 INTVAL (operands[1]), operands[0],
1150 operands[2], optimize && can_create_pseudo_p ());
1151 DONE;
1152 }
1153 else /* TARGET_THUMB1 */
1154 operands[1] = force_reg (SImode, operands[1]);
1155 }
1156 "
1157 )
1158
1159 (define_insn "thumb1_subsi3_insn"
1160 [(set (match_operand:SI 0 "register_operand" "=l")
1161 (minus:SI (match_operand:SI 1 "register_operand" "l")
1162 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1163 "TARGET_THUMB1"
1164 "sub\\t%0, %1, %2"
1165 [(set_attr "length" "2")
1166 (set_attr "conds" "set")])
1167
1168 ; ??? Check Thumb-2 split length
1169 (define_insn_and_split "*arm_subsi3_insn"
1170 [(set (match_operand:SI 0 "s_register_operand" "=r,r,rk,r,r")
1171 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,k,?n,r")
1172 (match_operand:SI 2 "reg_or_int_operand" "r,rI,r, r,?n")))]
1173 "TARGET_32BIT"
1174 "@
1175 rsb%?\\t%0, %2, %1
1176 sub%?\\t%0, %1, %2
1177 sub%?\\t%0, %1, %2
1178 #
1179 #"
1180 "&& ((GET_CODE (operands[1]) == CONST_INT
1181 && !const_ok_for_arm (INTVAL (operands[1])))
1182 || (GET_CODE (operands[2]) == CONST_INT
1183 && !const_ok_for_arm (INTVAL (operands[2]))))"
1184 [(clobber (const_int 0))]
1185 "
1186 arm_split_constant (MINUS, SImode, curr_insn,
1187 INTVAL (operands[1]), operands[0], operands[2], 0);
1188 DONE;
1189 "
1190 [(set_attr "length" "4,4,4,16,16")
1191 (set_attr "predicable" "yes")]
1192 )
1193
1194 (define_peephole2
1195 [(match_scratch:SI 3 "r")
1196 (set (match_operand:SI 0 "arm_general_register_operand" "")
1197 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1198 (match_operand:SI 2 "arm_general_register_operand" "")))]
1199 "TARGET_32BIT
1200 && !const_ok_for_arm (INTVAL (operands[1]))
1201 && const_ok_for_arm (~INTVAL (operands[1]))"
1202 [(set (match_dup 3) (match_dup 1))
1203 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1204 ""
1205 )
1206
1207 (define_insn "*subsi3_compare0"
1208 [(set (reg:CC_NOOV CC_REGNUM)
1209 (compare:CC_NOOV
1210 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1211 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1212 (const_int 0)))
1213 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1214 (minus:SI (match_dup 1) (match_dup 2)))]
1215 "TARGET_32BIT"
1216 "@
1217 sub%.\\t%0, %1, %2
1218 rsb%.\\t%0, %2, %1"
1219 [(set_attr "conds" "set")]
1220 )
1221
1222 (define_insn "*subsi3_compare"
1223 [(set (reg:CC CC_REGNUM)
1224 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,I")
1225 (match_operand:SI 2 "arm_rhs_operand" "rI,r")))
1226 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1227 (minus:SI (match_dup 1) (match_dup 2)))]
1228 "TARGET_32BIT"
1229 "@
1230 sub%.\\t%0, %1, %2
1231 rsb%.\\t%0, %2, %1"
1232 [(set_attr "conds" "set")]
1233 )
1234
1235 (define_expand "decscc"
1236 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1237 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1238 (match_operator:SI 2 "arm_comparison_operator"
1239 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1240 "TARGET_32BIT"
1241 ""
1242 )
1243
1244 (define_insn "*arm_decscc"
1245 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1246 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1247 (match_operator:SI 2 "arm_comparison_operator"
1248 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1249 "TARGET_ARM"
1250 "@
1251 sub%d2\\t%0, %1, #1
1252 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1253 [(set_attr "conds" "use")
1254 (set_attr "length" "*,8")]
1255 )
1256
1257 (define_expand "subsf3"
1258 [(set (match_operand:SF 0 "s_register_operand" "")
1259 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1260 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1261 "TARGET_32BIT && TARGET_HARD_FLOAT"
1262 "
1263 if (TARGET_MAVERICK)
1264 {
1265 if (!cirrus_fp_register (operands[1], SFmode))
1266 operands[1] = force_reg (SFmode, operands[1]);
1267 if (!cirrus_fp_register (operands[2], SFmode))
1268 operands[2] = force_reg (SFmode, operands[2]);
1269 }
1270 ")
1271
1272 (define_expand "subdf3"
1273 [(set (match_operand:DF 0 "s_register_operand" "")
1274 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1275 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1276 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1277 "
1278 if (TARGET_MAVERICK)
1279 {
1280 if (!cirrus_fp_register (operands[1], DFmode))
1281 operands[1] = force_reg (DFmode, operands[1]);
1282 if (!cirrus_fp_register (operands[2], DFmode))
1283 operands[2] = force_reg (DFmode, operands[2]);
1284 }
1285 ")
1286
1287 \f
1288 ;; Multiplication insns
1289
1290 (define_expand "mulsi3"
1291 [(set (match_operand:SI 0 "s_register_operand" "")
1292 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1293 (match_operand:SI 1 "s_register_operand" "")))]
1294 "TARGET_EITHER"
1295 ""
1296 )
1297
1298 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1299 (define_insn "*arm_mulsi3"
1300 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1301 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1302 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1303 "TARGET_32BIT && !arm_arch6"
1304 "mul%?\\t%0, %2, %1"
1305 [(set_attr "insn" "mul")
1306 (set_attr "predicable" "yes")]
1307 )
1308
1309 (define_insn "*arm_mulsi3_v6"
1310 [(set (match_operand:SI 0 "s_register_operand" "=r")
1311 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1312 (match_operand:SI 2 "s_register_operand" "r")))]
1313 "TARGET_32BIT && arm_arch6"
1314 "mul%?\\t%0, %1, %2"
1315 [(set_attr "insn" "mul")
1316 (set_attr "predicable" "yes")]
1317 )
1318
1319 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1320 ; 1 and 2; are the same, because reload will make operand 0 match
1321 ; operand 1 without realizing that this conflicts with operand 2. We fix
1322 ; this by adding another alternative to match this case, and then `reload'
1323 ; it ourselves. This alternative must come first.
1324 (define_insn "*thumb_mulsi3"
1325 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1326 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1327 (match_operand:SI 2 "register_operand" "l,l,l")))]
1328 "TARGET_THUMB1 && !arm_arch6"
1329 "*
1330 if (which_alternative < 2)
1331 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1332 else
1333 return \"mul\\t%0, %2\";
1334 "
1335 [(set_attr "length" "4,4,2")
1336 (set_attr "insn" "mul")]
1337 )
1338
1339 (define_insn "*thumb_mulsi3_v6"
1340 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1341 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1342 (match_operand:SI 2 "register_operand" "l,0,0")))]
1343 "TARGET_THUMB1 && arm_arch6"
1344 "@
1345 mul\\t%0, %2
1346 mul\\t%0, %1
1347 mul\\t%0, %1"
1348 [(set_attr "length" "2")
1349 (set_attr "insn" "mul")]
1350 )
1351
1352 (define_insn "*mulsi3_compare0"
1353 [(set (reg:CC_NOOV CC_REGNUM)
1354 (compare:CC_NOOV (mult:SI
1355 (match_operand:SI 2 "s_register_operand" "r,r")
1356 (match_operand:SI 1 "s_register_operand" "%0,r"))
1357 (const_int 0)))
1358 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1359 (mult:SI (match_dup 2) (match_dup 1)))]
1360 "TARGET_ARM && !arm_arch6"
1361 "mul%.\\t%0, %2, %1"
1362 [(set_attr "conds" "set")
1363 (set_attr "insn" "muls")]
1364 )
1365
1366 (define_insn "*mulsi3_compare0_v6"
1367 [(set (reg:CC_NOOV CC_REGNUM)
1368 (compare:CC_NOOV (mult:SI
1369 (match_operand:SI 2 "s_register_operand" "r")
1370 (match_operand:SI 1 "s_register_operand" "r"))
1371 (const_int 0)))
1372 (set (match_operand:SI 0 "s_register_operand" "=r")
1373 (mult:SI (match_dup 2) (match_dup 1)))]
1374 "TARGET_ARM && arm_arch6 && optimize_size"
1375 "mul%.\\t%0, %2, %1"
1376 [(set_attr "conds" "set")
1377 (set_attr "insn" "muls")]
1378 )
1379
1380 (define_insn "*mulsi_compare0_scratch"
1381 [(set (reg:CC_NOOV CC_REGNUM)
1382 (compare:CC_NOOV (mult:SI
1383 (match_operand:SI 2 "s_register_operand" "r,r")
1384 (match_operand:SI 1 "s_register_operand" "%0,r"))
1385 (const_int 0)))
1386 (clobber (match_scratch:SI 0 "=&r,&r"))]
1387 "TARGET_ARM && !arm_arch6"
1388 "mul%.\\t%0, %2, %1"
1389 [(set_attr "conds" "set")
1390 (set_attr "insn" "muls")]
1391 )
1392
1393 (define_insn "*mulsi_compare0_scratch_v6"
1394 [(set (reg:CC_NOOV CC_REGNUM)
1395 (compare:CC_NOOV (mult:SI
1396 (match_operand:SI 2 "s_register_operand" "r")
1397 (match_operand:SI 1 "s_register_operand" "r"))
1398 (const_int 0)))
1399 (clobber (match_scratch:SI 0 "=r"))]
1400 "TARGET_ARM && arm_arch6 && optimize_size"
1401 "mul%.\\t%0, %2, %1"
1402 [(set_attr "conds" "set")
1403 (set_attr "insn" "muls")]
1404 )
1405
1406 ;; Unnamed templates to match MLA instruction.
1407
1408 (define_insn "*mulsi3addsi"
1409 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1410 (plus:SI
1411 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1412 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1413 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1414 "TARGET_32BIT && !arm_arch6"
1415 "mla%?\\t%0, %2, %1, %3"
1416 [(set_attr "insn" "mla")
1417 (set_attr "predicable" "yes")]
1418 )
1419
1420 (define_insn "*mulsi3addsi_v6"
1421 [(set (match_operand:SI 0 "s_register_operand" "=r")
1422 (plus:SI
1423 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1424 (match_operand:SI 1 "s_register_operand" "r"))
1425 (match_operand:SI 3 "s_register_operand" "r")))]
1426 "TARGET_32BIT && arm_arch6"
1427 "mla%?\\t%0, %2, %1, %3"
1428 [(set_attr "insn" "mla")
1429 (set_attr "predicable" "yes")]
1430 )
1431
1432 (define_insn "*mulsi3addsi_compare0"
1433 [(set (reg:CC_NOOV CC_REGNUM)
1434 (compare:CC_NOOV
1435 (plus:SI (mult:SI
1436 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1437 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1438 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1439 (const_int 0)))
1440 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1441 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1442 (match_dup 3)))]
1443 "TARGET_ARM && arm_arch6"
1444 "mla%.\\t%0, %2, %1, %3"
1445 [(set_attr "conds" "set")
1446 (set_attr "insn" "mlas")]
1447 )
1448
1449 (define_insn "*mulsi3addsi_compare0_v6"
1450 [(set (reg:CC_NOOV CC_REGNUM)
1451 (compare:CC_NOOV
1452 (plus:SI (mult:SI
1453 (match_operand:SI 2 "s_register_operand" "r")
1454 (match_operand:SI 1 "s_register_operand" "r"))
1455 (match_operand:SI 3 "s_register_operand" "r"))
1456 (const_int 0)))
1457 (set (match_operand:SI 0 "s_register_operand" "=r")
1458 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1459 (match_dup 3)))]
1460 "TARGET_ARM && arm_arch6 && optimize_size"
1461 "mla%.\\t%0, %2, %1, %3"
1462 [(set_attr "conds" "set")
1463 (set_attr "insn" "mlas")]
1464 )
1465
1466 (define_insn "*mulsi3addsi_compare0_scratch"
1467 [(set (reg:CC_NOOV CC_REGNUM)
1468 (compare:CC_NOOV
1469 (plus:SI (mult:SI
1470 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1471 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1472 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1473 (const_int 0)))
1474 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1475 "TARGET_ARM && !arm_arch6"
1476 "mla%.\\t%0, %2, %1, %3"
1477 [(set_attr "conds" "set")
1478 (set_attr "insn" "mlas")]
1479 )
1480
1481 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1482 [(set (reg:CC_NOOV CC_REGNUM)
1483 (compare:CC_NOOV
1484 (plus:SI (mult:SI
1485 (match_operand:SI 2 "s_register_operand" "r")
1486 (match_operand:SI 1 "s_register_operand" "r"))
1487 (match_operand:SI 3 "s_register_operand" "r"))
1488 (const_int 0)))
1489 (clobber (match_scratch:SI 0 "=r"))]
1490 "TARGET_ARM && arm_arch6 && optimize_size"
1491 "mla%.\\t%0, %2, %1, %3"
1492 [(set_attr "conds" "set")
1493 (set_attr "insn" "mlas")]
1494 )
1495
1496 (define_insn "*mulsi3subsi"
1497 [(set (match_operand:SI 0 "s_register_operand" "=r")
1498 (minus:SI
1499 (match_operand:SI 3 "s_register_operand" "r")
1500 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1501 (match_operand:SI 1 "s_register_operand" "r"))))]
1502 "TARGET_32BIT && arm_arch_thumb2"
1503 "mls%?\\t%0, %2, %1, %3"
1504 [(set_attr "insn" "mla")
1505 (set_attr "predicable" "yes")]
1506 )
1507
1508 (define_expand "maddsidi4"
1509 [(set (match_operand:DI 0 "s_register_operand" "")
1510 (plus:DI
1511 (mult:DI
1512 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1513 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1514 (match_operand:DI 3 "s_register_operand" "")))]
1515 "TARGET_32BIT && arm_arch3m"
1516 "")
1517
1518 (define_insn "*mulsidi3adddi"
1519 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1520 (plus:DI
1521 (mult:DI
1522 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1523 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1524 (match_operand:DI 1 "s_register_operand" "0")))]
1525 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1526 "smlal%?\\t%Q0, %R0, %3, %2"
1527 [(set_attr "insn" "smlal")
1528 (set_attr "predicable" "yes")]
1529 )
1530
1531 (define_insn "*mulsidi3adddi_v6"
1532 [(set (match_operand:DI 0 "s_register_operand" "=r")
1533 (plus:DI
1534 (mult:DI
1535 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1536 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1537 (match_operand:DI 1 "s_register_operand" "0")))]
1538 "TARGET_32BIT && arm_arch6"
1539 "smlal%?\\t%Q0, %R0, %3, %2"
1540 [(set_attr "insn" "smlal")
1541 (set_attr "predicable" "yes")]
1542 )
1543
1544 ;; 32x32->64 widening multiply.
1545 ;; As with mulsi3, the only difference between the v3-5 and v6+
1546 ;; versions of these patterns is the requirement that the output not
1547 ;; overlap the inputs, but that still means we have to have a named
1548 ;; expander and two different starred insns.
1549
1550 (define_expand "mulsidi3"
1551 [(set (match_operand:DI 0 "s_register_operand" "")
1552 (mult:DI
1553 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1554 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1555 "TARGET_32BIT && arm_arch3m"
1556 ""
1557 )
1558
1559 (define_insn "*mulsidi3_nov6"
1560 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1561 (mult:DI
1562 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1563 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1564 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1565 "smull%?\\t%Q0, %R0, %1, %2"
1566 [(set_attr "insn" "smull")
1567 (set_attr "predicable" "yes")]
1568 )
1569
1570 (define_insn "*mulsidi3_v6"
1571 [(set (match_operand:DI 0 "s_register_operand" "=r")
1572 (mult:DI
1573 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1574 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1575 "TARGET_32BIT && arm_arch6"
1576 "smull%?\\t%Q0, %R0, %1, %2"
1577 [(set_attr "insn" "smull")
1578 (set_attr "predicable" "yes")]
1579 )
1580
1581 (define_expand "umulsidi3"
1582 [(set (match_operand:DI 0 "s_register_operand" "")
1583 (mult:DI
1584 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1585 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1586 "TARGET_32BIT && arm_arch3m"
1587 ""
1588 )
1589
1590 (define_insn "*umulsidi3_nov6"
1591 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1592 (mult:DI
1593 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1594 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1595 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1596 "umull%?\\t%Q0, %R0, %1, %2"
1597 [(set_attr "insn" "umull")
1598 (set_attr "predicable" "yes")]
1599 )
1600
1601 (define_insn "*umulsidi3_v6"
1602 [(set (match_operand:DI 0 "s_register_operand" "=r")
1603 (mult:DI
1604 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1605 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1606 "TARGET_32BIT && arm_arch6"
1607 "umull%?\\t%Q0, %R0, %1, %2"
1608 [(set_attr "insn" "umull")
1609 (set_attr "predicable" "yes")]
1610 )
1611
1612 (define_expand "umaddsidi4"
1613 [(set (match_operand:DI 0 "s_register_operand" "")
1614 (plus:DI
1615 (mult:DI
1616 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1617 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1618 (match_operand:DI 3 "s_register_operand" "")))]
1619 "TARGET_32BIT && arm_arch3m"
1620 "")
1621
1622 (define_insn "*umulsidi3adddi"
1623 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1624 (plus:DI
1625 (mult:DI
1626 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1627 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1628 (match_operand:DI 1 "s_register_operand" "0")))]
1629 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1630 "umlal%?\\t%Q0, %R0, %3, %2"
1631 [(set_attr "insn" "umlal")
1632 (set_attr "predicable" "yes")]
1633 )
1634
1635 (define_insn "*umulsidi3adddi_v6"
1636 [(set (match_operand:DI 0 "s_register_operand" "=r")
1637 (plus:DI
1638 (mult:DI
1639 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1640 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1641 (match_operand:DI 1 "s_register_operand" "0")))]
1642 "TARGET_32BIT && arm_arch6"
1643 "umlal%?\\t%Q0, %R0, %3, %2"
1644 [(set_attr "insn" "umlal")
1645 (set_attr "predicable" "yes")]
1646 )
1647
1648 (define_expand "smulsi3_highpart"
1649 [(parallel
1650 [(set (match_operand:SI 0 "s_register_operand" "")
1651 (truncate:SI
1652 (lshiftrt:DI
1653 (mult:DI
1654 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1655 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1656 (const_int 32))))
1657 (clobber (match_scratch:SI 3 ""))])]
1658 "TARGET_32BIT && arm_arch3m"
1659 ""
1660 )
1661
1662 (define_insn "*smulsi3_highpart_nov6"
1663 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1664 (truncate:SI
1665 (lshiftrt:DI
1666 (mult:DI
1667 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1668 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1669 (const_int 32))))
1670 (clobber (match_scratch:SI 3 "=&r,&r"))]
1671 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1672 "smull%?\\t%3, %0, %2, %1"
1673 [(set_attr "insn" "smull")
1674 (set_attr "predicable" "yes")]
1675 )
1676
1677 (define_insn "*smulsi3_highpart_v6"
1678 [(set (match_operand:SI 0 "s_register_operand" "=r")
1679 (truncate:SI
1680 (lshiftrt:DI
1681 (mult:DI
1682 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1683 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1684 (const_int 32))))
1685 (clobber (match_scratch:SI 3 "=r"))]
1686 "TARGET_32BIT && arm_arch6"
1687 "smull%?\\t%3, %0, %2, %1"
1688 [(set_attr "insn" "smull")
1689 (set_attr "predicable" "yes")]
1690 )
1691
1692 (define_expand "umulsi3_highpart"
1693 [(parallel
1694 [(set (match_operand:SI 0 "s_register_operand" "")
1695 (truncate:SI
1696 (lshiftrt:DI
1697 (mult:DI
1698 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1699 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1700 (const_int 32))))
1701 (clobber (match_scratch:SI 3 ""))])]
1702 "TARGET_32BIT && arm_arch3m"
1703 ""
1704 )
1705
1706 (define_insn "*umulsi3_highpart_nov6"
1707 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1708 (truncate:SI
1709 (lshiftrt:DI
1710 (mult:DI
1711 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1712 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1713 (const_int 32))))
1714 (clobber (match_scratch:SI 3 "=&r,&r"))]
1715 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1716 "umull%?\\t%3, %0, %2, %1"
1717 [(set_attr "insn" "umull")
1718 (set_attr "predicable" "yes")]
1719 )
1720
1721 (define_insn "*umulsi3_highpart_v6"
1722 [(set (match_operand:SI 0 "s_register_operand" "=r")
1723 (truncate:SI
1724 (lshiftrt:DI
1725 (mult:DI
1726 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1727 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1728 (const_int 32))))
1729 (clobber (match_scratch:SI 3 "=r"))]
1730 "TARGET_32BIT && arm_arch6"
1731 "umull%?\\t%3, %0, %2, %1"
1732 [(set_attr "insn" "umull")
1733 (set_attr "predicable" "yes")]
1734 )
1735
1736 (define_insn "mulhisi3"
1737 [(set (match_operand:SI 0 "s_register_operand" "=r")
1738 (mult:SI (sign_extend:SI
1739 (match_operand:HI 1 "s_register_operand" "%r"))
1740 (sign_extend:SI
1741 (match_operand:HI 2 "s_register_operand" "r"))))]
1742 "TARGET_DSP_MULTIPLY"
1743 "smulbb%?\\t%0, %1, %2"
1744 [(set_attr "insn" "smulxy")
1745 (set_attr "predicable" "yes")]
1746 )
1747
1748 (define_insn "*mulhisi3tb"
1749 [(set (match_operand:SI 0 "s_register_operand" "=r")
1750 (mult:SI (ashiftrt:SI
1751 (match_operand:SI 1 "s_register_operand" "r")
1752 (const_int 16))
1753 (sign_extend:SI
1754 (match_operand:HI 2 "s_register_operand" "r"))))]
1755 "TARGET_DSP_MULTIPLY"
1756 "smultb%?\\t%0, %1, %2"
1757 [(set_attr "insn" "smulxy")
1758 (set_attr "predicable" "yes")]
1759 )
1760
1761 (define_insn "*mulhisi3bt"
1762 [(set (match_operand:SI 0 "s_register_operand" "=r")
1763 (mult:SI (sign_extend:SI
1764 (match_operand:HI 1 "s_register_operand" "r"))
1765 (ashiftrt:SI
1766 (match_operand:SI 2 "s_register_operand" "r")
1767 (const_int 16))))]
1768 "TARGET_DSP_MULTIPLY"
1769 "smulbt%?\\t%0, %1, %2"
1770 [(set_attr "insn" "smulxy")
1771 (set_attr "predicable" "yes")]
1772 )
1773
1774 (define_insn "*mulhisi3tt"
1775 [(set (match_operand:SI 0 "s_register_operand" "=r")
1776 (mult:SI (ashiftrt:SI
1777 (match_operand:SI 1 "s_register_operand" "r")
1778 (const_int 16))
1779 (ashiftrt:SI
1780 (match_operand:SI 2 "s_register_operand" "r")
1781 (const_int 16))))]
1782 "TARGET_DSP_MULTIPLY"
1783 "smultt%?\\t%0, %1, %2"
1784 [(set_attr "insn" "smulxy")
1785 (set_attr "predicable" "yes")]
1786 )
1787
1788 (define_insn "maddhisi4"
1789 [(set (match_operand:SI 0 "s_register_operand" "=r")
1790 (plus:SI (match_operand:SI 3 "s_register_operand" "r")
1791 (mult:SI (sign_extend:SI
1792 (match_operand:HI 1 "s_register_operand" "%r"))
1793 (sign_extend:SI
1794 (match_operand:HI 2 "s_register_operand" "r")))))]
1795 "TARGET_DSP_MULTIPLY"
1796 "smlabb%?\\t%0, %1, %2, %3"
1797 [(set_attr "insn" "smlaxy")
1798 (set_attr "predicable" "yes")]
1799 )
1800
1801 (define_insn "*maddhidi4"
1802 [(set (match_operand:DI 0 "s_register_operand" "=r")
1803 (plus:DI
1804 (match_operand:DI 3 "s_register_operand" "0")
1805 (mult:DI (sign_extend:DI
1806 (match_operand:HI 1 "s_register_operand" "%r"))
1807 (sign_extend:DI
1808 (match_operand:HI 2 "s_register_operand" "r")))))]
1809 "TARGET_DSP_MULTIPLY"
1810 "smlalbb%?\\t%Q0, %R0, %1, %2"
1811 [(set_attr "insn" "smlalxy")
1812 (set_attr "predicable" "yes")])
1813
1814 (define_expand "mulsf3"
1815 [(set (match_operand:SF 0 "s_register_operand" "")
1816 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1817 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1818 "TARGET_32BIT && TARGET_HARD_FLOAT"
1819 "
1820 if (TARGET_MAVERICK
1821 && !cirrus_fp_register (operands[2], SFmode))
1822 operands[2] = force_reg (SFmode, operands[2]);
1823 ")
1824
1825 (define_expand "muldf3"
1826 [(set (match_operand:DF 0 "s_register_operand" "")
1827 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1828 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1829 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1830 "
1831 if (TARGET_MAVERICK
1832 && !cirrus_fp_register (operands[2], DFmode))
1833 operands[2] = force_reg (DFmode, operands[2]);
1834 ")
1835 \f
1836 ;; Division insns
1837
1838 (define_expand "divsf3"
1839 [(set (match_operand:SF 0 "s_register_operand" "")
1840 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1841 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1842 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1843 "")
1844
1845 (define_expand "divdf3"
1846 [(set (match_operand:DF 0 "s_register_operand" "")
1847 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1848 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1849 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1850 "")
1851 \f
1852 ;; Modulo insns
1853
1854 (define_expand "modsf3"
1855 [(set (match_operand:SF 0 "s_register_operand" "")
1856 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1857 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1858 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1859 "")
1860
1861 (define_expand "moddf3"
1862 [(set (match_operand:DF 0 "s_register_operand" "")
1863 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1864 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1865 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1866 "")
1867 \f
1868 ;; Boolean and,ior,xor insns
1869
1870 ;; Split up double word logical operations
1871
1872 ;; Split up simple DImode logical operations. Simply perform the logical
1873 ;; operation on the upper and lower halves of the registers.
1874 (define_split
1875 [(set (match_operand:DI 0 "s_register_operand" "")
1876 (match_operator:DI 6 "logical_binary_operator"
1877 [(match_operand:DI 1 "s_register_operand" "")
1878 (match_operand:DI 2 "s_register_operand" "")]))]
1879 "TARGET_32BIT && reload_completed
1880 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1881 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1882 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1883 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1884 "
1885 {
1886 operands[3] = gen_highpart (SImode, operands[0]);
1887 operands[0] = gen_lowpart (SImode, operands[0]);
1888 operands[4] = gen_highpart (SImode, operands[1]);
1889 operands[1] = gen_lowpart (SImode, operands[1]);
1890 operands[5] = gen_highpart (SImode, operands[2]);
1891 operands[2] = gen_lowpart (SImode, operands[2]);
1892 }"
1893 )
1894
1895 (define_split
1896 [(set (match_operand:DI 0 "s_register_operand" "")
1897 (match_operator:DI 6 "logical_binary_operator"
1898 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1899 (match_operand:DI 1 "s_register_operand" "")]))]
1900 "TARGET_32BIT && reload_completed"
1901 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1902 (set (match_dup 3) (match_op_dup:SI 6
1903 [(ashiftrt:SI (match_dup 2) (const_int 31))
1904 (match_dup 4)]))]
1905 "
1906 {
1907 operands[3] = gen_highpart (SImode, operands[0]);
1908 operands[0] = gen_lowpart (SImode, operands[0]);
1909 operands[4] = gen_highpart (SImode, operands[1]);
1910 operands[1] = gen_lowpart (SImode, operands[1]);
1911 operands[5] = gen_highpart (SImode, operands[2]);
1912 operands[2] = gen_lowpart (SImode, operands[2]);
1913 }"
1914 )
1915
1916 ;; The zero extend of operand 2 means we can just copy the high part of
1917 ;; operand1 into operand0.
1918 (define_split
1919 [(set (match_operand:DI 0 "s_register_operand" "")
1920 (ior:DI
1921 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1922 (match_operand:DI 1 "s_register_operand" "")))]
1923 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1924 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1925 (set (match_dup 3) (match_dup 4))]
1926 "
1927 {
1928 operands[4] = gen_highpart (SImode, operands[1]);
1929 operands[3] = gen_highpart (SImode, operands[0]);
1930 operands[0] = gen_lowpart (SImode, operands[0]);
1931 operands[1] = gen_lowpart (SImode, operands[1]);
1932 }"
1933 )
1934
1935 ;; The zero extend of operand 2 means we can just copy the high part of
1936 ;; operand1 into operand0.
1937 (define_split
1938 [(set (match_operand:DI 0 "s_register_operand" "")
1939 (xor:DI
1940 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1941 (match_operand:DI 1 "s_register_operand" "")))]
1942 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1943 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1944 (set (match_dup 3) (match_dup 4))]
1945 "
1946 {
1947 operands[4] = gen_highpart (SImode, operands[1]);
1948 operands[3] = gen_highpart (SImode, operands[0]);
1949 operands[0] = gen_lowpart (SImode, operands[0]);
1950 operands[1] = gen_lowpart (SImode, operands[1]);
1951 }"
1952 )
1953
1954 (define_expand "anddi3"
1955 [(set (match_operand:DI 0 "s_register_operand" "")
1956 (and:DI (match_operand:DI 1 "s_register_operand" "")
1957 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
1958 "TARGET_32BIT"
1959 ""
1960 )
1961
1962 (define_insn "*anddi3_insn"
1963 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1964 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1965 (match_operand:DI 2 "s_register_operand" "r,r")))]
1966 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
1967 "#"
1968 [(set_attr "length" "8")]
1969 )
1970
1971 (define_insn_and_split "*anddi_zesidi_di"
1972 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1973 (and:DI (zero_extend:DI
1974 (match_operand:SI 2 "s_register_operand" "r,r"))
1975 (match_operand:DI 1 "s_register_operand" "0,r")))]
1976 "TARGET_32BIT"
1977 "#"
1978 "TARGET_32BIT && reload_completed"
1979 ; The zero extend of operand 2 clears the high word of the output
1980 ; operand.
1981 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1982 (set (match_dup 3) (const_int 0))]
1983 "
1984 {
1985 operands[3] = gen_highpart (SImode, operands[0]);
1986 operands[0] = gen_lowpart (SImode, operands[0]);
1987 operands[1] = gen_lowpart (SImode, operands[1]);
1988 }"
1989 [(set_attr "length" "8")]
1990 )
1991
1992 (define_insn "*anddi_sesdi_di"
1993 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1994 (and:DI (sign_extend:DI
1995 (match_operand:SI 2 "s_register_operand" "r,r"))
1996 (match_operand:DI 1 "s_register_operand" "0,r")))]
1997 "TARGET_32BIT"
1998 "#"
1999 [(set_attr "length" "8")]
2000 )
2001
2002 (define_expand "andsi3"
2003 [(set (match_operand:SI 0 "s_register_operand" "")
2004 (and:SI (match_operand:SI 1 "s_register_operand" "")
2005 (match_operand:SI 2 "reg_or_int_operand" "")))]
2006 "TARGET_EITHER"
2007 "
2008 if (TARGET_32BIT)
2009 {
2010 if (GET_CODE (operands[2]) == CONST_INT)
2011 {
2012 if (INTVAL (operands[2]) == 255 && arm_arch6)
2013 {
2014 operands[1] = convert_to_mode (QImode, operands[1], 1);
2015 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2016 operands[1]));
2017 }
2018 else
2019 arm_split_constant (AND, SImode, NULL_RTX,
2020 INTVAL (operands[2]), operands[0],
2021 operands[1],
2022 optimize && can_create_pseudo_p ());
2023
2024 DONE;
2025 }
2026 }
2027 else /* TARGET_THUMB1 */
2028 {
2029 if (GET_CODE (operands[2]) != CONST_INT)
2030 {
2031 rtx tmp = force_reg (SImode, operands[2]);
2032 if (rtx_equal_p (operands[0], operands[1]))
2033 operands[2] = tmp;
2034 else
2035 {
2036 operands[2] = operands[1];
2037 operands[1] = tmp;
2038 }
2039 }
2040 else
2041 {
2042 int i;
2043
2044 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2045 {
2046 operands[2] = force_reg (SImode,
2047 GEN_INT (~INTVAL (operands[2])));
2048
2049 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2050
2051 DONE;
2052 }
2053
2054 for (i = 9; i <= 31; i++)
2055 {
2056 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2057 {
2058 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2059 const0_rtx));
2060 DONE;
2061 }
2062 else if ((((HOST_WIDE_INT) 1) << i) - 1
2063 == ~INTVAL (operands[2]))
2064 {
2065 rtx shift = GEN_INT (i);
2066 rtx reg = gen_reg_rtx (SImode);
2067
2068 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2069 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2070
2071 DONE;
2072 }
2073 }
2074
2075 operands[2] = force_reg (SImode, operands[2]);
2076 }
2077 }
2078 "
2079 )
2080
2081 ; ??? Check split length for Thumb-2
2082 (define_insn_and_split "*arm_andsi3_insn"
2083 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2084 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2085 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2086 "TARGET_32BIT"
2087 "@
2088 and%?\\t%0, %1, %2
2089 bic%?\\t%0, %1, #%B2
2090 #"
2091 "TARGET_32BIT
2092 && GET_CODE (operands[2]) == CONST_INT
2093 && !(const_ok_for_arm (INTVAL (operands[2]))
2094 || const_ok_for_arm (~INTVAL (operands[2])))"
2095 [(clobber (const_int 0))]
2096 "
2097 arm_split_constant (AND, SImode, curr_insn,
2098 INTVAL (operands[2]), operands[0], operands[1], 0);
2099 DONE;
2100 "
2101 [(set_attr "length" "4,4,16")
2102 (set_attr "predicable" "yes")]
2103 )
2104
2105 (define_insn "*thumb1_andsi3_insn"
2106 [(set (match_operand:SI 0 "register_operand" "=l")
2107 (and:SI (match_operand:SI 1 "register_operand" "%0")
2108 (match_operand:SI 2 "register_operand" "l")))]
2109 "TARGET_THUMB1"
2110 "and\\t%0, %2"
2111 [(set_attr "length" "2")
2112 (set_attr "conds" "set")])
2113
2114 (define_insn "*andsi3_compare0"
2115 [(set (reg:CC_NOOV CC_REGNUM)
2116 (compare:CC_NOOV
2117 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2118 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2119 (const_int 0)))
2120 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2121 (and:SI (match_dup 1) (match_dup 2)))]
2122 "TARGET_32BIT"
2123 "@
2124 and%.\\t%0, %1, %2
2125 bic%.\\t%0, %1, #%B2"
2126 [(set_attr "conds" "set")]
2127 )
2128
2129 (define_insn "*andsi3_compare0_scratch"
2130 [(set (reg:CC_NOOV CC_REGNUM)
2131 (compare:CC_NOOV
2132 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2133 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2134 (const_int 0)))
2135 (clobber (match_scratch:SI 2 "=X,r"))]
2136 "TARGET_32BIT"
2137 "@
2138 tst%?\\t%0, %1
2139 bic%.\\t%2, %0, #%B1"
2140 [(set_attr "conds" "set")]
2141 )
2142
2143 (define_insn "*zeroextractsi_compare0_scratch"
2144 [(set (reg:CC_NOOV CC_REGNUM)
2145 (compare:CC_NOOV (zero_extract:SI
2146 (match_operand:SI 0 "s_register_operand" "r")
2147 (match_operand 1 "const_int_operand" "n")
2148 (match_operand 2 "const_int_operand" "n"))
2149 (const_int 0)))]
2150 "TARGET_32BIT
2151 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2152 && INTVAL (operands[1]) > 0
2153 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2154 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2155 "*
2156 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2157 << INTVAL (operands[2]));
2158 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2159 return \"\";
2160 "
2161 [(set_attr "conds" "set")]
2162 )
2163
2164 (define_insn_and_split "*ne_zeroextractsi"
2165 [(set (match_operand:SI 0 "s_register_operand" "=r")
2166 (ne:SI (zero_extract:SI
2167 (match_operand:SI 1 "s_register_operand" "r")
2168 (match_operand:SI 2 "const_int_operand" "n")
2169 (match_operand:SI 3 "const_int_operand" "n"))
2170 (const_int 0)))
2171 (clobber (reg:CC CC_REGNUM))]
2172 "TARGET_32BIT
2173 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2174 && INTVAL (operands[2]) > 0
2175 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2176 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2177 "#"
2178 "TARGET_32BIT
2179 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2180 && INTVAL (operands[2]) > 0
2181 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2182 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2183 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2184 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2185 (const_int 0)))
2186 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2187 (set (match_dup 0)
2188 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2189 (match_dup 0) (const_int 1)))]
2190 "
2191 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2192 << INTVAL (operands[3]));
2193 "
2194 [(set_attr "conds" "clob")
2195 (set (attr "length")
2196 (if_then_else (eq_attr "is_thumb" "yes")
2197 (const_int 12)
2198 (const_int 8)))]
2199 )
2200
2201 (define_insn_and_split "*ne_zeroextractsi_shifted"
2202 [(set (match_operand:SI 0 "s_register_operand" "=r")
2203 (ne:SI (zero_extract:SI
2204 (match_operand:SI 1 "s_register_operand" "r")
2205 (match_operand:SI 2 "const_int_operand" "n")
2206 (const_int 0))
2207 (const_int 0)))
2208 (clobber (reg:CC CC_REGNUM))]
2209 "TARGET_ARM"
2210 "#"
2211 "TARGET_ARM"
2212 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2213 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2214 (const_int 0)))
2215 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2216 (set (match_dup 0)
2217 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2218 (match_dup 0) (const_int 1)))]
2219 "
2220 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2221 "
2222 [(set_attr "conds" "clob")
2223 (set_attr "length" "8")]
2224 )
2225
2226 (define_insn_and_split "*ite_ne_zeroextractsi"
2227 [(set (match_operand:SI 0 "s_register_operand" "=r")
2228 (if_then_else:SI (ne (zero_extract:SI
2229 (match_operand:SI 1 "s_register_operand" "r")
2230 (match_operand:SI 2 "const_int_operand" "n")
2231 (match_operand:SI 3 "const_int_operand" "n"))
2232 (const_int 0))
2233 (match_operand:SI 4 "arm_not_operand" "rIK")
2234 (const_int 0)))
2235 (clobber (reg:CC CC_REGNUM))]
2236 "TARGET_ARM
2237 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2238 && INTVAL (operands[2]) > 0
2239 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2240 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2241 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2242 "#"
2243 "TARGET_ARM
2244 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2245 && INTVAL (operands[2]) > 0
2246 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2247 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2248 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2249 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2250 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2251 (const_int 0)))
2252 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2253 (set (match_dup 0)
2254 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2255 (match_dup 0) (match_dup 4)))]
2256 "
2257 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2258 << INTVAL (operands[3]));
2259 "
2260 [(set_attr "conds" "clob")
2261 (set_attr "length" "8")]
2262 )
2263
2264 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2265 [(set (match_operand:SI 0 "s_register_operand" "=r")
2266 (if_then_else:SI (ne (zero_extract:SI
2267 (match_operand:SI 1 "s_register_operand" "r")
2268 (match_operand:SI 2 "const_int_operand" "n")
2269 (const_int 0))
2270 (const_int 0))
2271 (match_operand:SI 3 "arm_not_operand" "rIK")
2272 (const_int 0)))
2273 (clobber (reg:CC CC_REGNUM))]
2274 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2275 "#"
2276 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2277 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2278 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2279 (const_int 0)))
2280 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2281 (set (match_dup 0)
2282 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2283 (match_dup 0) (match_dup 3)))]
2284 "
2285 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2286 "
2287 [(set_attr "conds" "clob")
2288 (set_attr "length" "8")]
2289 )
2290
2291 (define_split
2292 [(set (match_operand:SI 0 "s_register_operand" "")
2293 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2294 (match_operand:SI 2 "const_int_operand" "")
2295 (match_operand:SI 3 "const_int_operand" "")))
2296 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2297 "TARGET_THUMB1"
2298 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2299 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2300 "{
2301 HOST_WIDE_INT temp = INTVAL (operands[2]);
2302
2303 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2304 operands[3] = GEN_INT (32 - temp);
2305 }"
2306 )
2307
2308 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2309 (define_split
2310 [(set (match_operand:SI 0 "s_register_operand" "")
2311 (match_operator:SI 1 "shiftable_operator"
2312 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2313 (match_operand:SI 3 "const_int_operand" "")
2314 (match_operand:SI 4 "const_int_operand" ""))
2315 (match_operand:SI 5 "s_register_operand" "")]))
2316 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2317 "TARGET_ARM"
2318 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2319 (set (match_dup 0)
2320 (match_op_dup 1
2321 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2322 (match_dup 5)]))]
2323 "{
2324 HOST_WIDE_INT temp = INTVAL (operands[3]);
2325
2326 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2327 operands[4] = GEN_INT (32 - temp);
2328 }"
2329 )
2330
2331 (define_split
2332 [(set (match_operand:SI 0 "s_register_operand" "")
2333 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2334 (match_operand:SI 2 "const_int_operand" "")
2335 (match_operand:SI 3 "const_int_operand" "")))]
2336 "TARGET_THUMB1"
2337 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2338 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2339 "{
2340 HOST_WIDE_INT temp = INTVAL (operands[2]);
2341
2342 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2343 operands[3] = GEN_INT (32 - temp);
2344 }"
2345 )
2346
2347 (define_split
2348 [(set (match_operand:SI 0 "s_register_operand" "")
2349 (match_operator:SI 1 "shiftable_operator"
2350 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2351 (match_operand:SI 3 "const_int_operand" "")
2352 (match_operand:SI 4 "const_int_operand" ""))
2353 (match_operand:SI 5 "s_register_operand" "")]))
2354 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2355 "TARGET_ARM"
2356 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2357 (set (match_dup 0)
2358 (match_op_dup 1
2359 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2360 (match_dup 5)]))]
2361 "{
2362 HOST_WIDE_INT temp = INTVAL (operands[3]);
2363
2364 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2365 operands[4] = GEN_INT (32 - temp);
2366 }"
2367 )
2368
2369 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2370 ;;; represented by the bitfield, then this will produce incorrect results.
2371 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2372 ;;; which have a real bit-field insert instruction, the truncation happens
2373 ;;; in the bit-field insert instruction itself. Since arm does not have a
2374 ;;; bit-field insert instruction, we would have to emit code here to truncate
2375 ;;; the value before we insert. This loses some of the advantage of having
2376 ;;; this insv pattern, so this pattern needs to be reevalutated.
2377
2378 (define_expand "insv"
2379 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2380 (match_operand:SI 1 "general_operand" "")
2381 (match_operand:SI 2 "general_operand" ""))
2382 (match_operand:SI 3 "reg_or_int_operand" ""))]
2383 "TARGET_ARM || arm_arch_thumb2"
2384 "
2385 {
2386 int start_bit = INTVAL (operands[2]);
2387 int width = INTVAL (operands[1]);
2388 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2389 rtx target, subtarget;
2390
2391 if (arm_arch_thumb2)
2392 {
2393 bool use_bfi = TRUE;
2394
2395 if (GET_CODE (operands[3]) == CONST_INT)
2396 {
2397 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2398
2399 if (val == 0)
2400 {
2401 emit_insn (gen_insv_zero (operands[0], operands[1],
2402 operands[2]));
2403 DONE;
2404 }
2405
2406 /* See if the set can be done with a single orr instruction. */
2407 if (val == mask && const_ok_for_arm (val << start_bit))
2408 use_bfi = FALSE;
2409 }
2410
2411 if (use_bfi)
2412 {
2413 if (GET_CODE (operands[3]) != REG)
2414 operands[3] = force_reg (SImode, operands[3]);
2415
2416 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2417 operands[3]));
2418 DONE;
2419 }
2420 }
2421
2422 target = copy_rtx (operands[0]);
2423 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2424 subreg as the final target. */
2425 if (GET_CODE (target) == SUBREG)
2426 {
2427 subtarget = gen_reg_rtx (SImode);
2428 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2429 < GET_MODE_SIZE (SImode))
2430 target = SUBREG_REG (target);
2431 }
2432 else
2433 subtarget = target;
2434
2435 if (GET_CODE (operands[3]) == CONST_INT)
2436 {
2437 /* Since we are inserting a known constant, we may be able to
2438 reduce the number of bits that we have to clear so that
2439 the mask becomes simple. */
2440 /* ??? This code does not check to see if the new mask is actually
2441 simpler. It may not be. */
2442 rtx op1 = gen_reg_rtx (SImode);
2443 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2444 start of this pattern. */
2445 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2446 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2447
2448 emit_insn (gen_andsi3 (op1, operands[0],
2449 gen_int_mode (~mask2, SImode)));
2450 emit_insn (gen_iorsi3 (subtarget, op1,
2451 gen_int_mode (op3_value << start_bit, SImode)));
2452 }
2453 else if (start_bit == 0
2454 && !(const_ok_for_arm (mask)
2455 || const_ok_for_arm (~mask)))
2456 {
2457 /* A Trick, since we are setting the bottom bits in the word,
2458 we can shift operand[3] up, operand[0] down, OR them together
2459 and rotate the result back again. This takes 3 insns, and
2460 the third might be mergeable into another op. */
2461 /* The shift up copes with the possibility that operand[3] is
2462 wider than the bitfield. */
2463 rtx op0 = gen_reg_rtx (SImode);
2464 rtx op1 = gen_reg_rtx (SImode);
2465
2466 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2467 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2468 emit_insn (gen_iorsi3 (op1, op1, op0));
2469 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2470 }
2471 else if ((width + start_bit == 32)
2472 && !(const_ok_for_arm (mask)
2473 || const_ok_for_arm (~mask)))
2474 {
2475 /* Similar trick, but slightly less efficient. */
2476
2477 rtx op0 = gen_reg_rtx (SImode);
2478 rtx op1 = gen_reg_rtx (SImode);
2479
2480 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2481 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2482 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2483 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2484 }
2485 else
2486 {
2487 rtx op0 = gen_int_mode (mask, SImode);
2488 rtx op1 = gen_reg_rtx (SImode);
2489 rtx op2 = gen_reg_rtx (SImode);
2490
2491 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2492 {
2493 rtx tmp = gen_reg_rtx (SImode);
2494
2495 emit_insn (gen_movsi (tmp, op0));
2496 op0 = tmp;
2497 }
2498
2499 /* Mask out any bits in operand[3] that are not needed. */
2500 emit_insn (gen_andsi3 (op1, operands[3], op0));
2501
2502 if (GET_CODE (op0) == CONST_INT
2503 && (const_ok_for_arm (mask << start_bit)
2504 || const_ok_for_arm (~(mask << start_bit))))
2505 {
2506 op0 = gen_int_mode (~(mask << start_bit), SImode);
2507 emit_insn (gen_andsi3 (op2, operands[0], op0));
2508 }
2509 else
2510 {
2511 if (GET_CODE (op0) == CONST_INT)
2512 {
2513 rtx tmp = gen_reg_rtx (SImode);
2514
2515 emit_insn (gen_movsi (tmp, op0));
2516 op0 = tmp;
2517 }
2518
2519 if (start_bit != 0)
2520 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2521
2522 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2523 }
2524
2525 if (start_bit != 0)
2526 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2527
2528 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2529 }
2530
2531 if (subtarget != target)
2532 {
2533 /* If TARGET is still a SUBREG, then it must be wider than a word,
2534 so we must be careful only to set the subword we were asked to. */
2535 if (GET_CODE (target) == SUBREG)
2536 emit_move_insn (target, subtarget);
2537 else
2538 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2539 }
2540
2541 DONE;
2542 }"
2543 )
2544
2545 (define_insn "insv_zero"
2546 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2547 (match_operand:SI 1 "const_int_operand" "M")
2548 (match_operand:SI 2 "const_int_operand" "M"))
2549 (const_int 0))]
2550 "arm_arch_thumb2"
2551 "bfc%?\t%0, %2, %1"
2552 [(set_attr "length" "4")
2553 (set_attr "predicable" "yes")]
2554 )
2555
2556 (define_insn "insv_t2"
2557 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2558 (match_operand:SI 1 "const_int_operand" "M")
2559 (match_operand:SI 2 "const_int_operand" "M"))
2560 (match_operand:SI 3 "s_register_operand" "r"))]
2561 "arm_arch_thumb2"
2562 "bfi%?\t%0, %3, %2, %1"
2563 [(set_attr "length" "4")
2564 (set_attr "predicable" "yes")]
2565 )
2566
2567 ; constants for op 2 will never be given to these patterns.
2568 (define_insn_and_split "*anddi_notdi_di"
2569 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2570 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2571 (match_operand:DI 2 "s_register_operand" "r,0")))]
2572 "TARGET_32BIT"
2573 "#"
2574 "TARGET_32BIT && reload_completed
2575 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2576 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2577 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2578 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2579 "
2580 {
2581 operands[3] = gen_highpart (SImode, operands[0]);
2582 operands[0] = gen_lowpart (SImode, operands[0]);
2583 operands[4] = gen_highpart (SImode, operands[1]);
2584 operands[1] = gen_lowpart (SImode, operands[1]);
2585 operands[5] = gen_highpart (SImode, operands[2]);
2586 operands[2] = gen_lowpart (SImode, operands[2]);
2587 }"
2588 [(set_attr "length" "8")
2589 (set_attr "predicable" "yes")]
2590 )
2591
2592 (define_insn_and_split "*anddi_notzesidi_di"
2593 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2594 (and:DI (not:DI (zero_extend:DI
2595 (match_operand:SI 2 "s_register_operand" "r,r")))
2596 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2597 "TARGET_32BIT"
2598 "@
2599 bic%?\\t%Q0, %Q1, %2
2600 #"
2601 ; (not (zero_extend ...)) allows us to just copy the high word from
2602 ; operand1 to operand0.
2603 "TARGET_32BIT
2604 && reload_completed
2605 && operands[0] != operands[1]"
2606 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2607 (set (match_dup 3) (match_dup 4))]
2608 "
2609 {
2610 operands[3] = gen_highpart (SImode, operands[0]);
2611 operands[0] = gen_lowpart (SImode, operands[0]);
2612 operands[4] = gen_highpart (SImode, operands[1]);
2613 operands[1] = gen_lowpart (SImode, operands[1]);
2614 }"
2615 [(set_attr "length" "4,8")
2616 (set_attr "predicable" "yes")]
2617 )
2618
2619 (define_insn_and_split "*anddi_notsesidi_di"
2620 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2621 (and:DI (not:DI (sign_extend:DI
2622 (match_operand:SI 2 "s_register_operand" "r,r")))
2623 (match_operand:DI 1 "s_register_operand" "0,r")))]
2624 "TARGET_32BIT"
2625 "#"
2626 "TARGET_32BIT && reload_completed"
2627 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2628 (set (match_dup 3) (and:SI (not:SI
2629 (ashiftrt:SI (match_dup 2) (const_int 31)))
2630 (match_dup 4)))]
2631 "
2632 {
2633 operands[3] = gen_highpart (SImode, operands[0]);
2634 operands[0] = gen_lowpart (SImode, operands[0]);
2635 operands[4] = gen_highpart (SImode, operands[1]);
2636 operands[1] = gen_lowpart (SImode, operands[1]);
2637 }"
2638 [(set_attr "length" "8")
2639 (set_attr "predicable" "yes")]
2640 )
2641
2642 (define_insn "andsi_notsi_si"
2643 [(set (match_operand:SI 0 "s_register_operand" "=r")
2644 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2645 (match_operand:SI 1 "s_register_operand" "r")))]
2646 "TARGET_32BIT"
2647 "bic%?\\t%0, %1, %2"
2648 [(set_attr "predicable" "yes")]
2649 )
2650
2651 (define_insn "thumb1_bicsi3"
2652 [(set (match_operand:SI 0 "register_operand" "=l")
2653 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2654 (match_operand:SI 2 "register_operand" "0")))]
2655 "TARGET_THUMB1"
2656 "bic\\t%0, %1"
2657 [(set_attr "length" "2")
2658 (set_attr "conds" "set")])
2659
2660 (define_insn "andsi_not_shiftsi_si"
2661 [(set (match_operand:SI 0 "s_register_operand" "=r")
2662 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2663 [(match_operand:SI 2 "s_register_operand" "r")
2664 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2665 (match_operand:SI 1 "s_register_operand" "r")))]
2666 "TARGET_ARM"
2667 "bic%?\\t%0, %1, %2%S4"
2668 [(set_attr "predicable" "yes")
2669 (set_attr "shift" "2")
2670 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2671 (const_string "alu_shift")
2672 (const_string "alu_shift_reg")))]
2673 )
2674
2675 (define_insn "*andsi_notsi_si_compare0"
2676 [(set (reg:CC_NOOV CC_REGNUM)
2677 (compare:CC_NOOV
2678 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2679 (match_operand:SI 1 "s_register_operand" "r"))
2680 (const_int 0)))
2681 (set (match_operand:SI 0 "s_register_operand" "=r")
2682 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2683 "TARGET_32BIT"
2684 "bic%.\\t%0, %1, %2"
2685 [(set_attr "conds" "set")]
2686 )
2687
2688 (define_insn "*andsi_notsi_si_compare0_scratch"
2689 [(set (reg:CC_NOOV CC_REGNUM)
2690 (compare:CC_NOOV
2691 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2692 (match_operand:SI 1 "s_register_operand" "r"))
2693 (const_int 0)))
2694 (clobber (match_scratch:SI 0 "=r"))]
2695 "TARGET_32BIT"
2696 "bic%.\\t%0, %1, %2"
2697 [(set_attr "conds" "set")]
2698 )
2699
2700 (define_expand "iordi3"
2701 [(set (match_operand:DI 0 "s_register_operand" "")
2702 (ior:DI (match_operand:DI 1 "s_register_operand" "")
2703 (match_operand:DI 2 "neon_logic_op2" "")))]
2704 "TARGET_32BIT"
2705 ""
2706 )
2707
2708 (define_insn "*iordi3_insn"
2709 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2710 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2711 (match_operand:DI 2 "s_register_operand" "r,r")))]
2712 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2713 "#"
2714 [(set_attr "length" "8")
2715 (set_attr "predicable" "yes")]
2716 )
2717
2718 (define_insn "*iordi_zesidi_di"
2719 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2720 (ior:DI (zero_extend:DI
2721 (match_operand:SI 2 "s_register_operand" "r,r"))
2722 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2723 "TARGET_32BIT"
2724 "@
2725 orr%?\\t%Q0, %Q1, %2
2726 #"
2727 [(set_attr "length" "4,8")
2728 (set_attr "predicable" "yes")]
2729 )
2730
2731 (define_insn "*iordi_sesidi_di"
2732 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2733 (ior:DI (sign_extend:DI
2734 (match_operand:SI 2 "s_register_operand" "r,r"))
2735 (match_operand:DI 1 "s_register_operand" "0,r")))]
2736 "TARGET_32BIT"
2737 "#"
2738 [(set_attr "length" "8")
2739 (set_attr "predicable" "yes")]
2740 )
2741
2742 (define_expand "iorsi3"
2743 [(set (match_operand:SI 0 "s_register_operand" "")
2744 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2745 (match_operand:SI 2 "reg_or_int_operand" "")))]
2746 "TARGET_EITHER"
2747 "
2748 if (GET_CODE (operands[2]) == CONST_INT)
2749 {
2750 if (TARGET_32BIT)
2751 {
2752 arm_split_constant (IOR, SImode, NULL_RTX,
2753 INTVAL (operands[2]), operands[0], operands[1],
2754 optimize && can_create_pseudo_p ());
2755 DONE;
2756 }
2757 else /* TARGET_THUMB1 */
2758 {
2759 rtx tmp = force_reg (SImode, operands[2]);
2760 if (rtx_equal_p (operands[0], operands[1]))
2761 operands[2] = tmp;
2762 else
2763 {
2764 operands[2] = operands[1];
2765 operands[1] = tmp;
2766 }
2767 }
2768 }
2769 "
2770 )
2771
2772 (define_insn_and_split "*iorsi3_insn"
2773 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2774 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
2775 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
2776 "TARGET_32BIT"
2777 "@
2778 orr%?\\t%0, %1, %2
2779 orn%?\\t%0, %1, #%B2
2780 #"
2781 "TARGET_32BIT
2782 && GET_CODE (operands[2]) == CONST_INT
2783 && !(const_ok_for_arm (INTVAL (operands[2]))
2784 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2785 [(clobber (const_int 0))]
2786 {
2787 arm_split_constant (IOR, SImode, curr_insn,
2788 INTVAL (operands[2]), operands[0], operands[1], 0);
2789 DONE;
2790 }
2791 [(set_attr "length" "4,4,16")
2792 (set_attr "arch" "32,t2,32")
2793 (set_attr "predicable" "yes")])
2794
2795 (define_insn "*thumb1_iorsi3_insn"
2796 [(set (match_operand:SI 0 "register_operand" "=l")
2797 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2798 (match_operand:SI 2 "register_operand" "l")))]
2799 "TARGET_THUMB1"
2800 "orr\\t%0, %2"
2801 [(set_attr "length" "2")
2802 (set_attr "conds" "set")])
2803
2804 (define_peephole2
2805 [(match_scratch:SI 3 "r")
2806 (set (match_operand:SI 0 "arm_general_register_operand" "")
2807 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2808 (match_operand:SI 2 "const_int_operand" "")))]
2809 "TARGET_ARM
2810 && !const_ok_for_arm (INTVAL (operands[2]))
2811 && const_ok_for_arm (~INTVAL (operands[2]))"
2812 [(set (match_dup 3) (match_dup 2))
2813 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2814 ""
2815 )
2816
2817 (define_insn "*iorsi3_compare0"
2818 [(set (reg:CC_NOOV CC_REGNUM)
2819 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2820 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2821 (const_int 0)))
2822 (set (match_operand:SI 0 "s_register_operand" "=r")
2823 (ior:SI (match_dup 1) (match_dup 2)))]
2824 "TARGET_32BIT"
2825 "orr%.\\t%0, %1, %2"
2826 [(set_attr "conds" "set")]
2827 )
2828
2829 (define_insn "*iorsi3_compare0_scratch"
2830 [(set (reg:CC_NOOV CC_REGNUM)
2831 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2832 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2833 (const_int 0)))
2834 (clobber (match_scratch:SI 0 "=r"))]
2835 "TARGET_32BIT"
2836 "orr%.\\t%0, %1, %2"
2837 [(set_attr "conds" "set")]
2838 )
2839
2840 (define_expand "xordi3"
2841 [(set (match_operand:DI 0 "s_register_operand" "")
2842 (xor:DI (match_operand:DI 1 "s_register_operand" "")
2843 (match_operand:DI 2 "s_register_operand" "")))]
2844 "TARGET_32BIT"
2845 ""
2846 )
2847
2848 (define_insn "*xordi3_insn"
2849 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2850 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2851 (match_operand:DI 2 "s_register_operand" "r,r")))]
2852 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2853 "#"
2854 [(set_attr "length" "8")
2855 (set_attr "predicable" "yes")]
2856 )
2857
2858 (define_insn "*xordi_zesidi_di"
2859 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2860 (xor:DI (zero_extend:DI
2861 (match_operand:SI 2 "s_register_operand" "r,r"))
2862 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2863 "TARGET_32BIT"
2864 "@
2865 eor%?\\t%Q0, %Q1, %2
2866 #"
2867 [(set_attr "length" "4,8")
2868 (set_attr "predicable" "yes")]
2869 )
2870
2871 (define_insn "*xordi_sesidi_di"
2872 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2873 (xor:DI (sign_extend:DI
2874 (match_operand:SI 2 "s_register_operand" "r,r"))
2875 (match_operand:DI 1 "s_register_operand" "0,r")))]
2876 "TARGET_32BIT"
2877 "#"
2878 [(set_attr "length" "8")
2879 (set_attr "predicable" "yes")]
2880 )
2881
2882 (define_expand "xorsi3"
2883 [(set (match_operand:SI 0 "s_register_operand" "")
2884 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2885 (match_operand:SI 2 "reg_or_int_operand" "")))]
2886 "TARGET_EITHER"
2887 "if (GET_CODE (operands[2]) == CONST_INT)
2888 {
2889 if (TARGET_32BIT)
2890 {
2891 arm_split_constant (XOR, SImode, NULL_RTX,
2892 INTVAL (operands[2]), operands[0], operands[1],
2893 optimize && can_create_pseudo_p ());
2894 DONE;
2895 }
2896 else /* TARGET_THUMB1 */
2897 {
2898 rtx tmp = force_reg (SImode, operands[2]);
2899 if (rtx_equal_p (operands[0], operands[1]))
2900 operands[2] = tmp;
2901 else
2902 {
2903 operands[2] = operands[1];
2904 operands[1] = tmp;
2905 }
2906 }
2907 }"
2908 )
2909
2910 (define_insn "*arm_xorsi3"
2911 [(set (match_operand:SI 0 "s_register_operand" "=r")
2912 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2913 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2914 "TARGET_32BIT"
2915 "eor%?\\t%0, %1, %2"
2916 [(set_attr "predicable" "yes")]
2917 )
2918
2919 (define_insn "*thumb1_xorsi3_insn"
2920 [(set (match_operand:SI 0 "register_operand" "=l")
2921 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2922 (match_operand:SI 2 "register_operand" "l")))]
2923 "TARGET_THUMB1"
2924 "eor\\t%0, %2"
2925 [(set_attr "length" "2")
2926 (set_attr "conds" "set")])
2927
2928 (define_insn "*xorsi3_compare0"
2929 [(set (reg:CC_NOOV CC_REGNUM)
2930 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2931 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2932 (const_int 0)))
2933 (set (match_operand:SI 0 "s_register_operand" "=r")
2934 (xor:SI (match_dup 1) (match_dup 2)))]
2935 "TARGET_32BIT"
2936 "eor%.\\t%0, %1, %2"
2937 [(set_attr "conds" "set")]
2938 )
2939
2940 (define_insn "*xorsi3_compare0_scratch"
2941 [(set (reg:CC_NOOV CC_REGNUM)
2942 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2943 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2944 (const_int 0)))]
2945 "TARGET_32BIT"
2946 "teq%?\\t%0, %1"
2947 [(set_attr "conds" "set")]
2948 )
2949
2950 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2951 ; (NOT D) we can sometimes merge the final NOT into one of the following
2952 ; insns.
2953
2954 (define_split
2955 [(set (match_operand:SI 0 "s_register_operand" "")
2956 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2957 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2958 (match_operand:SI 3 "arm_rhs_operand" "")))
2959 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2960 "TARGET_32BIT"
2961 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2962 (not:SI (match_dup 3))))
2963 (set (match_dup 0) (not:SI (match_dup 4)))]
2964 ""
2965 )
2966
2967 (define_insn "*andsi_iorsi3_notsi"
2968 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2969 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2970 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2971 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2972 "TARGET_32BIT"
2973 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2974 [(set_attr "length" "8")
2975 (set_attr "ce_count" "2")
2976 (set_attr "predicable" "yes")]
2977 )
2978
2979 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2980 ; insns are available?
2981 (define_split
2982 [(set (match_operand:SI 0 "s_register_operand" "")
2983 (match_operator:SI 1 "logical_binary_operator"
2984 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2985 (match_operand:SI 3 "const_int_operand" "")
2986 (match_operand:SI 4 "const_int_operand" ""))
2987 (match_operator:SI 9 "logical_binary_operator"
2988 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2989 (match_operand:SI 6 "const_int_operand" ""))
2990 (match_operand:SI 7 "s_register_operand" "")])]))
2991 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2992 "TARGET_32BIT
2993 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2994 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2995 [(set (match_dup 8)
2996 (match_op_dup 1
2997 [(ashift:SI (match_dup 2) (match_dup 4))
2998 (match_dup 5)]))
2999 (set (match_dup 0)
3000 (match_op_dup 1
3001 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3002 (match_dup 7)]))]
3003 "
3004 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3005 ")
3006
3007 (define_split
3008 [(set (match_operand:SI 0 "s_register_operand" "")
3009 (match_operator:SI 1 "logical_binary_operator"
3010 [(match_operator:SI 9 "logical_binary_operator"
3011 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3012 (match_operand:SI 6 "const_int_operand" ""))
3013 (match_operand:SI 7 "s_register_operand" "")])
3014 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3015 (match_operand:SI 3 "const_int_operand" "")
3016 (match_operand:SI 4 "const_int_operand" ""))]))
3017 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3018 "TARGET_32BIT
3019 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3020 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3021 [(set (match_dup 8)
3022 (match_op_dup 1
3023 [(ashift:SI (match_dup 2) (match_dup 4))
3024 (match_dup 5)]))
3025 (set (match_dup 0)
3026 (match_op_dup 1
3027 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3028 (match_dup 7)]))]
3029 "
3030 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3031 ")
3032
3033 (define_split
3034 [(set (match_operand:SI 0 "s_register_operand" "")
3035 (match_operator:SI 1 "logical_binary_operator"
3036 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3037 (match_operand:SI 3 "const_int_operand" "")
3038 (match_operand:SI 4 "const_int_operand" ""))
3039 (match_operator:SI 9 "logical_binary_operator"
3040 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3041 (match_operand:SI 6 "const_int_operand" ""))
3042 (match_operand:SI 7 "s_register_operand" "")])]))
3043 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3044 "TARGET_32BIT
3045 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3046 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3047 [(set (match_dup 8)
3048 (match_op_dup 1
3049 [(ashift:SI (match_dup 2) (match_dup 4))
3050 (match_dup 5)]))
3051 (set (match_dup 0)
3052 (match_op_dup 1
3053 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3054 (match_dup 7)]))]
3055 "
3056 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3057 ")
3058
3059 (define_split
3060 [(set (match_operand:SI 0 "s_register_operand" "")
3061 (match_operator:SI 1 "logical_binary_operator"
3062 [(match_operator:SI 9 "logical_binary_operator"
3063 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3064 (match_operand:SI 6 "const_int_operand" ""))
3065 (match_operand:SI 7 "s_register_operand" "")])
3066 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3067 (match_operand:SI 3 "const_int_operand" "")
3068 (match_operand:SI 4 "const_int_operand" ""))]))
3069 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3070 "TARGET_32BIT
3071 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3072 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3073 [(set (match_dup 8)
3074 (match_op_dup 1
3075 [(ashift:SI (match_dup 2) (match_dup 4))
3076 (match_dup 5)]))
3077 (set (match_dup 0)
3078 (match_op_dup 1
3079 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3080 (match_dup 7)]))]
3081 "
3082 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3083 ")
3084 \f
3085
3086 ;; Minimum and maximum insns
3087
3088 (define_expand "smaxsi3"
3089 [(parallel [
3090 (set (match_operand:SI 0 "s_register_operand" "")
3091 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3092 (match_operand:SI 2 "arm_rhs_operand" "")))
3093 (clobber (reg:CC CC_REGNUM))])]
3094 "TARGET_32BIT"
3095 "
3096 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3097 {
3098 /* No need for a clobber of the condition code register here. */
3099 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3100 gen_rtx_SMAX (SImode, operands[1],
3101 operands[2])));
3102 DONE;
3103 }
3104 ")
3105
3106 (define_insn "*smax_0"
3107 [(set (match_operand:SI 0 "s_register_operand" "=r")
3108 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3109 (const_int 0)))]
3110 "TARGET_32BIT"
3111 "bic%?\\t%0, %1, %1, asr #31"
3112 [(set_attr "predicable" "yes")]
3113 )
3114
3115 (define_insn "*smax_m1"
3116 [(set (match_operand:SI 0 "s_register_operand" "=r")
3117 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3118 (const_int -1)))]
3119 "TARGET_32BIT"
3120 "orr%?\\t%0, %1, %1, asr #31"
3121 [(set_attr "predicable" "yes")]
3122 )
3123
3124 (define_insn "*arm_smax_insn"
3125 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3126 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3127 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3128 (clobber (reg:CC CC_REGNUM))]
3129 "TARGET_ARM"
3130 "@
3131 cmp\\t%1, %2\;movlt\\t%0, %2
3132 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3133 [(set_attr "conds" "clob")
3134 (set_attr "length" "8,12")]
3135 )
3136
3137 (define_expand "sminsi3"
3138 [(parallel [
3139 (set (match_operand:SI 0 "s_register_operand" "")
3140 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3141 (match_operand:SI 2 "arm_rhs_operand" "")))
3142 (clobber (reg:CC CC_REGNUM))])]
3143 "TARGET_32BIT"
3144 "
3145 if (operands[2] == const0_rtx)
3146 {
3147 /* No need for a clobber of the condition code register here. */
3148 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3149 gen_rtx_SMIN (SImode, operands[1],
3150 operands[2])));
3151 DONE;
3152 }
3153 ")
3154
3155 (define_insn "*smin_0"
3156 [(set (match_operand:SI 0 "s_register_operand" "=r")
3157 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3158 (const_int 0)))]
3159 "TARGET_32BIT"
3160 "and%?\\t%0, %1, %1, asr #31"
3161 [(set_attr "predicable" "yes")]
3162 )
3163
3164 (define_insn "*arm_smin_insn"
3165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3166 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3167 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3168 (clobber (reg:CC CC_REGNUM))]
3169 "TARGET_ARM"
3170 "@
3171 cmp\\t%1, %2\;movge\\t%0, %2
3172 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3173 [(set_attr "conds" "clob")
3174 (set_attr "length" "8,12")]
3175 )
3176
3177 (define_expand "umaxsi3"
3178 [(parallel [
3179 (set (match_operand:SI 0 "s_register_operand" "")
3180 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3181 (match_operand:SI 2 "arm_rhs_operand" "")))
3182 (clobber (reg:CC CC_REGNUM))])]
3183 "TARGET_32BIT"
3184 ""
3185 )
3186
3187 (define_insn "*arm_umaxsi3"
3188 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3189 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3190 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3191 (clobber (reg:CC CC_REGNUM))]
3192 "TARGET_ARM"
3193 "@
3194 cmp\\t%1, %2\;movcc\\t%0, %2
3195 cmp\\t%1, %2\;movcs\\t%0, %1
3196 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3197 [(set_attr "conds" "clob")
3198 (set_attr "length" "8,8,12")]
3199 )
3200
3201 (define_expand "uminsi3"
3202 [(parallel [
3203 (set (match_operand:SI 0 "s_register_operand" "")
3204 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3205 (match_operand:SI 2 "arm_rhs_operand" "")))
3206 (clobber (reg:CC CC_REGNUM))])]
3207 "TARGET_32BIT"
3208 ""
3209 )
3210
3211 (define_insn "*arm_uminsi3"
3212 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3213 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3214 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3215 (clobber (reg:CC CC_REGNUM))]
3216 "TARGET_ARM"
3217 "@
3218 cmp\\t%1, %2\;movcs\\t%0, %2
3219 cmp\\t%1, %2\;movcc\\t%0, %1
3220 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3221 [(set_attr "conds" "clob")
3222 (set_attr "length" "8,8,12")]
3223 )
3224
3225 (define_insn "*store_minmaxsi"
3226 [(set (match_operand:SI 0 "memory_operand" "=m")
3227 (match_operator:SI 3 "minmax_operator"
3228 [(match_operand:SI 1 "s_register_operand" "r")
3229 (match_operand:SI 2 "s_register_operand" "r")]))
3230 (clobber (reg:CC CC_REGNUM))]
3231 "TARGET_32BIT"
3232 "*
3233 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3234 operands[1], operands[2]);
3235 output_asm_insn (\"cmp\\t%1, %2\", operands);
3236 if (TARGET_THUMB2)
3237 output_asm_insn (\"ite\t%d3\", operands);
3238 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3239 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3240 return \"\";
3241 "
3242 [(set_attr "conds" "clob")
3243 (set (attr "length")
3244 (if_then_else (eq_attr "is_thumb" "yes")
3245 (const_int 14)
3246 (const_int 12)))
3247 (set_attr "type" "store1")]
3248 )
3249
3250 ; Reject the frame pointer in operand[1], since reloading this after
3251 ; it has been eliminated can cause carnage.
3252 (define_insn "*minmax_arithsi"
3253 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3254 (match_operator:SI 4 "shiftable_operator"
3255 [(match_operator:SI 5 "minmax_operator"
3256 [(match_operand:SI 2 "s_register_operand" "r,r")
3257 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3258 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3259 (clobber (reg:CC CC_REGNUM))]
3260 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3261 "*
3262 {
3263 enum rtx_code code = GET_CODE (operands[4]);
3264 bool need_else;
3265
3266 if (which_alternative != 0 || operands[3] != const0_rtx
3267 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3268 need_else = true;
3269 else
3270 need_else = false;
3271
3272 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3273 operands[2], operands[3]);
3274 output_asm_insn (\"cmp\\t%2, %3\", operands);
3275 if (TARGET_THUMB2)
3276 {
3277 if (need_else)
3278 output_asm_insn (\"ite\\t%d5\", operands);
3279 else
3280 output_asm_insn (\"it\\t%d5\", operands);
3281 }
3282 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3283 if (need_else)
3284 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3285 return \"\";
3286 }"
3287 [(set_attr "conds" "clob")
3288 (set (attr "length")
3289 (if_then_else (eq_attr "is_thumb" "yes")
3290 (const_int 14)
3291 (const_int 12)))]
3292 )
3293
3294 \f
3295 ;; Shift and rotation insns
3296
3297 (define_expand "ashldi3"
3298 [(set (match_operand:DI 0 "s_register_operand" "")
3299 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3300 (match_operand:SI 2 "reg_or_int_operand" "")))]
3301 "TARGET_32BIT"
3302 "
3303 if (GET_CODE (operands[2]) == CONST_INT)
3304 {
3305 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3306 {
3307 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3308 DONE;
3309 }
3310 /* Ideally we shouldn't fail here if we could know that operands[1]
3311 ends up already living in an iwmmxt register. Otherwise it's
3312 cheaper to have the alternate code being generated than moving
3313 values to iwmmxt regs and back. */
3314 FAIL;
3315 }
3316 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3317 FAIL;
3318 "
3319 )
3320
3321 (define_insn "arm_ashldi3_1bit"
3322 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3323 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3324 (const_int 1)))
3325 (clobber (reg:CC CC_REGNUM))]
3326 "TARGET_32BIT"
3327 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3328 [(set_attr "conds" "clob")
3329 (set_attr "length" "8")]
3330 )
3331
3332 (define_expand "ashlsi3"
3333 [(set (match_operand:SI 0 "s_register_operand" "")
3334 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3335 (match_operand:SI 2 "arm_rhs_operand" "")))]
3336 "TARGET_EITHER"
3337 "
3338 if (GET_CODE (operands[2]) == CONST_INT
3339 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3340 {
3341 emit_insn (gen_movsi (operands[0], const0_rtx));
3342 DONE;
3343 }
3344 "
3345 )
3346
3347 (define_insn "*thumb1_ashlsi3"
3348 [(set (match_operand:SI 0 "register_operand" "=l,l")
3349 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3350 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3351 "TARGET_THUMB1"
3352 "lsl\\t%0, %1, %2"
3353 [(set_attr "length" "2")
3354 (set_attr "conds" "set")])
3355
3356 (define_expand "ashrdi3"
3357 [(set (match_operand:DI 0 "s_register_operand" "")
3358 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3359 (match_operand:SI 2 "reg_or_int_operand" "")))]
3360 "TARGET_32BIT"
3361 "
3362 if (GET_CODE (operands[2]) == CONST_INT)
3363 {
3364 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3365 {
3366 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3367 DONE;
3368 }
3369 /* Ideally we shouldn't fail here if we could know that operands[1]
3370 ends up already living in an iwmmxt register. Otherwise it's
3371 cheaper to have the alternate code being generated than moving
3372 values to iwmmxt regs and back. */
3373 FAIL;
3374 }
3375 else if (!TARGET_REALLY_IWMMXT)
3376 FAIL;
3377 "
3378 )
3379
3380 (define_insn "arm_ashrdi3_1bit"
3381 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3382 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3383 (const_int 1)))
3384 (clobber (reg:CC CC_REGNUM))]
3385 "TARGET_32BIT"
3386 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3387 [(set_attr "conds" "clob")
3388 (set_attr "length" "8")]
3389 )
3390
3391 (define_expand "ashrsi3"
3392 [(set (match_operand:SI 0 "s_register_operand" "")
3393 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3394 (match_operand:SI 2 "arm_rhs_operand" "")))]
3395 "TARGET_EITHER"
3396 "
3397 if (GET_CODE (operands[2]) == CONST_INT
3398 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3399 operands[2] = GEN_INT (31);
3400 "
3401 )
3402
3403 (define_insn "*thumb1_ashrsi3"
3404 [(set (match_operand:SI 0 "register_operand" "=l,l")
3405 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3406 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3407 "TARGET_THUMB1"
3408 "asr\\t%0, %1, %2"
3409 [(set_attr "length" "2")
3410 (set_attr "conds" "set")])
3411
3412 (define_expand "lshrdi3"
3413 [(set (match_operand:DI 0 "s_register_operand" "")
3414 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3415 (match_operand:SI 2 "reg_or_int_operand" "")))]
3416 "TARGET_32BIT"
3417 "
3418 if (GET_CODE (operands[2]) == CONST_INT)
3419 {
3420 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3421 {
3422 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3423 DONE;
3424 }
3425 /* Ideally we shouldn't fail here if we could know that operands[1]
3426 ends up already living in an iwmmxt register. Otherwise it's
3427 cheaper to have the alternate code being generated than moving
3428 values to iwmmxt regs and back. */
3429 FAIL;
3430 }
3431 else if (!TARGET_REALLY_IWMMXT)
3432 FAIL;
3433 "
3434 )
3435
3436 (define_insn "arm_lshrdi3_1bit"
3437 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3438 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3439 (const_int 1)))
3440 (clobber (reg:CC CC_REGNUM))]
3441 "TARGET_32BIT"
3442 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3443 [(set_attr "conds" "clob")
3444 (set_attr "length" "8")]
3445 )
3446
3447 (define_expand "lshrsi3"
3448 [(set (match_operand:SI 0 "s_register_operand" "")
3449 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3450 (match_operand:SI 2 "arm_rhs_operand" "")))]
3451 "TARGET_EITHER"
3452 "
3453 if (GET_CODE (operands[2]) == CONST_INT
3454 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3455 {
3456 emit_insn (gen_movsi (operands[0], const0_rtx));
3457 DONE;
3458 }
3459 "
3460 )
3461
3462 (define_insn "*thumb1_lshrsi3"
3463 [(set (match_operand:SI 0 "register_operand" "=l,l")
3464 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3465 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3466 "TARGET_THUMB1"
3467 "lsr\\t%0, %1, %2"
3468 [(set_attr "length" "2")
3469 (set_attr "conds" "set")])
3470
3471 (define_expand "rotlsi3"
3472 [(set (match_operand:SI 0 "s_register_operand" "")
3473 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3474 (match_operand:SI 2 "reg_or_int_operand" "")))]
3475 "TARGET_32BIT"
3476 "
3477 if (GET_CODE (operands[2]) == CONST_INT)
3478 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3479 else
3480 {
3481 rtx reg = gen_reg_rtx (SImode);
3482 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3483 operands[2] = reg;
3484 }
3485 "
3486 )
3487
3488 (define_expand "rotrsi3"
3489 [(set (match_operand:SI 0 "s_register_operand" "")
3490 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3491 (match_operand:SI 2 "arm_rhs_operand" "")))]
3492 "TARGET_EITHER"
3493 "
3494 if (TARGET_32BIT)
3495 {
3496 if (GET_CODE (operands[2]) == CONST_INT
3497 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3498 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3499 }
3500 else /* TARGET_THUMB1 */
3501 {
3502 if (GET_CODE (operands [2]) == CONST_INT)
3503 operands [2] = force_reg (SImode, operands[2]);
3504 }
3505 "
3506 )
3507
3508 (define_insn "*thumb1_rotrsi3"
3509 [(set (match_operand:SI 0 "register_operand" "=l")
3510 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3511 (match_operand:SI 2 "register_operand" "l")))]
3512 "TARGET_THUMB1"
3513 "ror\\t%0, %0, %2"
3514 [(set_attr "length" "2")]
3515 )
3516
3517 (define_insn "*arm_shiftsi3"
3518 [(set (match_operand:SI 0 "s_register_operand" "=r")
3519 (match_operator:SI 3 "shift_operator"
3520 [(match_operand:SI 1 "s_register_operand" "r")
3521 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3522 "TARGET_32BIT"
3523 "* return arm_output_shift(operands, 0);"
3524 [(set_attr "predicable" "yes")
3525 (set_attr "shift" "1")
3526 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3527 (const_string "alu_shift")
3528 (const_string "alu_shift_reg")))]
3529 )
3530
3531 (define_insn "*shiftsi3_compare0"
3532 [(set (reg:CC_NOOV CC_REGNUM)
3533 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3534 [(match_operand:SI 1 "s_register_operand" "r")
3535 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3536 (const_int 0)))
3537 (set (match_operand:SI 0 "s_register_operand" "=r")
3538 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3539 "TARGET_32BIT"
3540 "* return arm_output_shift(operands, 1);"
3541 [(set_attr "conds" "set")
3542 (set_attr "shift" "1")
3543 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3544 (const_string "alu_shift")
3545 (const_string "alu_shift_reg")))]
3546 )
3547
3548 (define_insn "*shiftsi3_compare0_scratch"
3549 [(set (reg:CC_NOOV CC_REGNUM)
3550 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3551 [(match_operand:SI 1 "s_register_operand" "r")
3552 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3553 (const_int 0)))
3554 (clobber (match_scratch:SI 0 "=r"))]
3555 "TARGET_32BIT"
3556 "* return arm_output_shift(operands, 1);"
3557 [(set_attr "conds" "set")
3558 (set_attr "shift" "1")]
3559 )
3560
3561 (define_insn "*not_shiftsi"
3562 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3563 (not:SI (match_operator:SI 3 "shift_operator"
3564 [(match_operand:SI 1 "s_register_operand" "r,r")
3565 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3566 "TARGET_32BIT"
3567 "mvn%?\\t%0, %1%S3"
3568 [(set_attr "predicable" "yes")
3569 (set_attr "shift" "1")
3570 (set_attr "arch" "32,a")
3571 (set_attr "type" "alu_shift,alu_shift_reg")])
3572
3573 (define_insn "*not_shiftsi_compare0"
3574 [(set (reg:CC_NOOV CC_REGNUM)
3575 (compare:CC_NOOV
3576 (not:SI (match_operator:SI 3 "shift_operator"
3577 [(match_operand:SI 1 "s_register_operand" "r,r")
3578 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3579 (const_int 0)))
3580 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3581 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3582 "TARGET_32BIT"
3583 "mvn%.\\t%0, %1%S3"
3584 [(set_attr "conds" "set")
3585 (set_attr "shift" "1")
3586 (set_attr "arch" "32,a")
3587 (set_attr "type" "alu_shift,alu_shift_reg")])
3588
3589 (define_insn "*not_shiftsi_compare0_scratch"
3590 [(set (reg:CC_NOOV CC_REGNUM)
3591 (compare:CC_NOOV
3592 (not:SI (match_operator:SI 3 "shift_operator"
3593 [(match_operand:SI 1 "s_register_operand" "r,r")
3594 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3595 (const_int 0)))
3596 (clobber (match_scratch:SI 0 "=r,r"))]
3597 "TARGET_32BIT"
3598 "mvn%.\\t%0, %1%S3"
3599 [(set_attr "conds" "set")
3600 (set_attr "shift" "1")
3601 (set_attr "arch" "32,a")
3602 (set_attr "type" "alu_shift,alu_shift_reg")])
3603
3604 ;; We don't really have extzv, but defining this using shifts helps
3605 ;; to reduce register pressure later on.
3606
3607 (define_expand "extzv"
3608 [(set (match_dup 4)
3609 (ashift:SI (match_operand:SI 1 "register_operand" "")
3610 (match_operand:SI 2 "const_int_operand" "")))
3611 (set (match_operand:SI 0 "register_operand" "")
3612 (lshiftrt:SI (match_dup 4)
3613 (match_operand:SI 3 "const_int_operand" "")))]
3614 "TARGET_THUMB1 || arm_arch_thumb2"
3615 "
3616 {
3617 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3618 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3619
3620 if (arm_arch_thumb2)
3621 {
3622 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3623 operands[3]));
3624 DONE;
3625 }
3626
3627 operands[3] = GEN_INT (rshift);
3628
3629 if (lshift == 0)
3630 {
3631 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3632 DONE;
3633 }
3634
3635 operands[2] = GEN_INT (lshift);
3636 operands[4] = gen_reg_rtx (SImode);
3637 }"
3638 )
3639
3640 (define_insn "extv"
3641 [(set (match_operand:SI 0 "s_register_operand" "=r")
3642 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3643 (match_operand:SI 2 "const_int_operand" "M")
3644 (match_operand:SI 3 "const_int_operand" "M")))]
3645 "arm_arch_thumb2"
3646 "sbfx%?\t%0, %1, %3, %2"
3647 [(set_attr "length" "4")
3648 (set_attr "predicable" "yes")]
3649 )
3650
3651 (define_insn "extzv_t2"
3652 [(set (match_operand:SI 0 "s_register_operand" "=r")
3653 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3654 (match_operand:SI 2 "const_int_operand" "M")
3655 (match_operand:SI 3 "const_int_operand" "M")))]
3656 "arm_arch_thumb2"
3657 "ubfx%?\t%0, %1, %3, %2"
3658 [(set_attr "length" "4")
3659 (set_attr "predicable" "yes")]
3660 )
3661
3662 \f
3663 ;; Unary arithmetic insns
3664
3665 (define_expand "negdi2"
3666 [(parallel
3667 [(set (match_operand:DI 0 "s_register_operand" "")
3668 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3669 (clobber (reg:CC CC_REGNUM))])]
3670 "TARGET_EITHER"
3671 ""
3672 )
3673
3674 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3675 ;; The first alternative allows the common case of a *full* overlap.
3676 (define_insn "*arm_negdi2"
3677 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3678 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3679 (clobber (reg:CC CC_REGNUM))]
3680 "TARGET_ARM"
3681 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3682 [(set_attr "conds" "clob")
3683 (set_attr "length" "8")]
3684 )
3685
3686 (define_insn "*thumb1_negdi2"
3687 [(set (match_operand:DI 0 "register_operand" "=&l")
3688 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3689 (clobber (reg:CC CC_REGNUM))]
3690 "TARGET_THUMB1"
3691 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3692 [(set_attr "length" "6")]
3693 )
3694
3695 (define_expand "negsi2"
3696 [(set (match_operand:SI 0 "s_register_operand" "")
3697 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3698 "TARGET_EITHER"
3699 ""
3700 )
3701
3702 (define_insn "*arm_negsi2"
3703 [(set (match_operand:SI 0 "s_register_operand" "=r")
3704 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3705 "TARGET_32BIT"
3706 "rsb%?\\t%0, %1, #0"
3707 [(set_attr "predicable" "yes")]
3708 )
3709
3710 (define_insn "*thumb1_negsi2"
3711 [(set (match_operand:SI 0 "register_operand" "=l")
3712 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3713 "TARGET_THUMB1"
3714 "neg\\t%0, %1"
3715 [(set_attr "length" "2")]
3716 )
3717
3718 (define_expand "negsf2"
3719 [(set (match_operand:SF 0 "s_register_operand" "")
3720 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3721 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3722 ""
3723 )
3724
3725 (define_expand "negdf2"
3726 [(set (match_operand:DF 0 "s_register_operand" "")
3727 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3728 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3729 "")
3730
3731 ;; abssi2 doesn't really clobber the condition codes if a different register
3732 ;; is being set. To keep things simple, assume during rtl manipulations that
3733 ;; it does, but tell the final scan operator the truth. Similarly for
3734 ;; (neg (abs...))
3735
3736 (define_expand "abssi2"
3737 [(parallel
3738 [(set (match_operand:SI 0 "s_register_operand" "")
3739 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3740 (clobber (match_dup 2))])]
3741 "TARGET_EITHER"
3742 "
3743 if (TARGET_THUMB1)
3744 operands[2] = gen_rtx_SCRATCH (SImode);
3745 else
3746 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3747 ")
3748
3749 (define_insn "*arm_abssi2"
3750 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3751 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3752 (clobber (reg:CC CC_REGNUM))]
3753 "TARGET_ARM"
3754 "@
3755 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3756 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3757 [(set_attr "conds" "clob,*")
3758 (set_attr "shift" "1")
3759 ;; predicable can't be set based on the variant, so left as no
3760 (set_attr "length" "8")]
3761 )
3762
3763 (define_insn_and_split "*thumb1_abssi2"
3764 [(set (match_operand:SI 0 "s_register_operand" "=l")
3765 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3766 (clobber (match_scratch:SI 2 "=&l"))]
3767 "TARGET_THUMB1"
3768 "#"
3769 "TARGET_THUMB1 && reload_completed"
3770 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3771 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3772 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3773 ""
3774 [(set_attr "length" "6")]
3775 )
3776
3777 (define_insn "*arm_neg_abssi2"
3778 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3779 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3780 (clobber (reg:CC CC_REGNUM))]
3781 "TARGET_ARM"
3782 "@
3783 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3784 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3785 [(set_attr "conds" "clob,*")
3786 (set_attr "shift" "1")
3787 ;; predicable can't be set based on the variant, so left as no
3788 (set_attr "length" "8")]
3789 )
3790
3791 (define_insn_and_split "*thumb1_neg_abssi2"
3792 [(set (match_operand:SI 0 "s_register_operand" "=l")
3793 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3794 (clobber (match_scratch:SI 2 "=&l"))]
3795 "TARGET_THUMB1"
3796 "#"
3797 "TARGET_THUMB1 && reload_completed"
3798 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3799 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3800 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3801 ""
3802 [(set_attr "length" "6")]
3803 )
3804
3805 (define_expand "abssf2"
3806 [(set (match_operand:SF 0 "s_register_operand" "")
3807 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3808 "TARGET_32BIT && TARGET_HARD_FLOAT"
3809 "")
3810
3811 (define_expand "absdf2"
3812 [(set (match_operand:DF 0 "s_register_operand" "")
3813 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3814 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3815 "")
3816
3817 (define_expand "sqrtsf2"
3818 [(set (match_operand:SF 0 "s_register_operand" "")
3819 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3820 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3821 "")
3822
3823 (define_expand "sqrtdf2"
3824 [(set (match_operand:DF 0 "s_register_operand" "")
3825 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3826 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3827 "")
3828
3829 (define_insn_and_split "one_cmpldi2"
3830 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3831 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3832 "TARGET_32BIT"
3833 "#"
3834 "TARGET_32BIT && reload_completed"
3835 [(set (match_dup 0) (not:SI (match_dup 1)))
3836 (set (match_dup 2) (not:SI (match_dup 3)))]
3837 "
3838 {
3839 operands[2] = gen_highpart (SImode, operands[0]);
3840 operands[0] = gen_lowpart (SImode, operands[0]);
3841 operands[3] = gen_highpart (SImode, operands[1]);
3842 operands[1] = gen_lowpart (SImode, operands[1]);
3843 }"
3844 [(set_attr "length" "8")
3845 (set_attr "predicable" "yes")]
3846 )
3847
3848 (define_expand "one_cmplsi2"
3849 [(set (match_operand:SI 0 "s_register_operand" "")
3850 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3851 "TARGET_EITHER"
3852 ""
3853 )
3854
3855 (define_insn "*arm_one_cmplsi2"
3856 [(set (match_operand:SI 0 "s_register_operand" "=r")
3857 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3858 "TARGET_32BIT"
3859 "mvn%?\\t%0, %1"
3860 [(set_attr "predicable" "yes")]
3861 )
3862
3863 (define_insn "*thumb1_one_cmplsi2"
3864 [(set (match_operand:SI 0 "register_operand" "=l")
3865 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3866 "TARGET_THUMB1"
3867 "mvn\\t%0, %1"
3868 [(set_attr "length" "2")]
3869 )
3870
3871 (define_insn "*notsi_compare0"
3872 [(set (reg:CC_NOOV CC_REGNUM)
3873 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3874 (const_int 0)))
3875 (set (match_operand:SI 0 "s_register_operand" "=r")
3876 (not:SI (match_dup 1)))]
3877 "TARGET_32BIT"
3878 "mvn%.\\t%0, %1"
3879 [(set_attr "conds" "set")]
3880 )
3881
3882 (define_insn "*notsi_compare0_scratch"
3883 [(set (reg:CC_NOOV CC_REGNUM)
3884 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3885 (const_int 0)))
3886 (clobber (match_scratch:SI 0 "=r"))]
3887 "TARGET_32BIT"
3888 "mvn%.\\t%0, %1"
3889 [(set_attr "conds" "set")]
3890 )
3891 \f
3892 ;; Fixed <--> Floating conversion insns
3893
3894 (define_expand "floatsihf2"
3895 [(set (match_operand:HF 0 "general_operand" "")
3896 (float:HF (match_operand:SI 1 "general_operand" "")))]
3897 "TARGET_EITHER"
3898 "
3899 {
3900 rtx op1 = gen_reg_rtx (SFmode);
3901 expand_float (op1, operands[1], 0);
3902 op1 = convert_to_mode (HFmode, op1, 0);
3903 emit_move_insn (operands[0], op1);
3904 DONE;
3905 }"
3906 )
3907
3908 (define_expand "floatdihf2"
3909 [(set (match_operand:HF 0 "general_operand" "")
3910 (float:HF (match_operand:DI 1 "general_operand" "")))]
3911 "TARGET_EITHER"
3912 "
3913 {
3914 rtx op1 = gen_reg_rtx (SFmode);
3915 expand_float (op1, operands[1], 0);
3916 op1 = convert_to_mode (HFmode, op1, 0);
3917 emit_move_insn (operands[0], op1);
3918 DONE;
3919 }"
3920 )
3921
3922 (define_expand "floatsisf2"
3923 [(set (match_operand:SF 0 "s_register_operand" "")
3924 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3925 "TARGET_32BIT && TARGET_HARD_FLOAT"
3926 "
3927 if (TARGET_MAVERICK)
3928 {
3929 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3930 DONE;
3931 }
3932 ")
3933
3934 (define_expand "floatsidf2"
3935 [(set (match_operand:DF 0 "s_register_operand" "")
3936 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3937 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3938 "
3939 if (TARGET_MAVERICK)
3940 {
3941 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3942 DONE;
3943 }
3944 ")
3945
3946 (define_expand "fix_trunchfsi2"
3947 [(set (match_operand:SI 0 "general_operand" "")
3948 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3949 "TARGET_EITHER"
3950 "
3951 {
3952 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3953 expand_fix (operands[0], op1, 0);
3954 DONE;
3955 }"
3956 )
3957
3958 (define_expand "fix_trunchfdi2"
3959 [(set (match_operand:DI 0 "general_operand" "")
3960 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3961 "TARGET_EITHER"
3962 "
3963 {
3964 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3965 expand_fix (operands[0], op1, 0);
3966 DONE;
3967 }"
3968 )
3969
3970 (define_expand "fix_truncsfsi2"
3971 [(set (match_operand:SI 0 "s_register_operand" "")
3972 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3973 "TARGET_32BIT && TARGET_HARD_FLOAT"
3974 "
3975 if (TARGET_MAVERICK)
3976 {
3977 if (!cirrus_fp_register (operands[0], SImode))
3978 operands[0] = force_reg (SImode, operands[0]);
3979 if (!cirrus_fp_register (operands[1], SFmode))
3980 operands[1] = force_reg (SFmode, operands[0]);
3981 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3982 DONE;
3983 }
3984 ")
3985
3986 (define_expand "fix_truncdfsi2"
3987 [(set (match_operand:SI 0 "s_register_operand" "")
3988 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3989 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3990 "
3991 if (TARGET_MAVERICK)
3992 {
3993 if (!cirrus_fp_register (operands[1], DFmode))
3994 operands[1] = force_reg (DFmode, operands[0]);
3995 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3996 DONE;
3997 }
3998 ")
3999
4000 ;; Truncation insns
4001
4002 (define_expand "truncdfsf2"
4003 [(set (match_operand:SF 0 "s_register_operand" "")
4004 (float_truncate:SF
4005 (match_operand:DF 1 "s_register_operand" "")))]
4006 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4007 ""
4008 )
4009
4010 /* DFmode -> HFmode conversions have to go through SFmode. */
4011 (define_expand "truncdfhf2"
4012 [(set (match_operand:HF 0 "general_operand" "")
4013 (float_truncate:HF
4014 (match_operand:DF 1 "general_operand" "")))]
4015 "TARGET_EITHER"
4016 "
4017 {
4018 rtx op1;
4019 op1 = convert_to_mode (SFmode, operands[1], 0);
4020 op1 = convert_to_mode (HFmode, op1, 0);
4021 emit_move_insn (operands[0], op1);
4022 DONE;
4023 }"
4024 )
4025 \f
4026 ;; Zero and sign extension instructions.
4027
4028 (define_expand "zero_extendsidi2"
4029 [(set (match_operand:DI 0 "s_register_operand" "")
4030 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
4031 "TARGET_32BIT"
4032 ""
4033 )
4034
4035 (define_insn "*arm_zero_extendsidi2"
4036 [(set (match_operand:DI 0 "s_register_operand" "=r")
4037 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
4038 "TARGET_ARM"
4039 "*
4040 if (REGNO (operands[1])
4041 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
4042 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4043 return \"mov%?\\t%R0, #0\";
4044 "
4045 [(set_attr "length" "8")
4046 (set_attr "predicable" "yes")]
4047 )
4048
4049 (define_expand "zero_extendqidi2"
4050 [(set (match_operand:DI 0 "s_register_operand" "")
4051 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
4052 "TARGET_32BIT"
4053 ""
4054 )
4055
4056 (define_insn "*arm_zero_extendqidi2"
4057 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
4058 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4059 "TARGET_ARM"
4060 "@
4061 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
4062 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
4063 [(set_attr "length" "8")
4064 (set_attr "predicable" "yes")
4065 (set_attr "type" "*,load_byte")
4066 (set_attr "pool_range" "*,4092")
4067 (set_attr "neg_pool_range" "*,4084")]
4068 )
4069
4070 (define_expand "extendsidi2"
4071 [(set (match_operand:DI 0 "s_register_operand" "")
4072 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
4073 "TARGET_32BIT"
4074 ""
4075 )
4076
4077 (define_insn "*arm_extendsidi2"
4078 [(set (match_operand:DI 0 "s_register_operand" "=r")
4079 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
4080 "TARGET_ARM"
4081 "*
4082 if (REGNO (operands[1])
4083 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
4084 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
4085 return \"mov%?\\t%R0, %Q0, asr #31\";
4086 "
4087 [(set_attr "length" "8")
4088 (set_attr "shift" "1")
4089 (set_attr "predicable" "yes")]
4090 )
4091
4092 (define_expand "zero_extendhisi2"
4093 [(set (match_operand:SI 0 "s_register_operand" "")
4094 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4095 "TARGET_EITHER"
4096 {
4097 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4098 {
4099 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4100 DONE;
4101 }
4102 if (!arm_arch6 && !MEM_P (operands[1]))
4103 {
4104 rtx t = gen_lowpart (SImode, operands[1]);
4105 rtx tmp = gen_reg_rtx (SImode);
4106 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4107 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4108 DONE;
4109 }
4110 })
4111
4112 (define_split
4113 [(set (match_operand:SI 0 "register_operand" "")
4114 (zero_extend:SI (match_operand:HI 1 "register_operand" "")))]
4115 "!TARGET_THUMB2 && !arm_arch6"
4116 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4117 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4118 {
4119 operands[2] = gen_lowpart (SImode, operands[1]);
4120 })
4121
4122 (define_insn "*thumb1_zero_extendhisi2"
4123 [(set (match_operand:SI 0 "register_operand" "=l,l")
4124 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4125 "TARGET_THUMB1"
4126 "*
4127 rtx mem;
4128
4129 if (which_alternative == 0 && arm_arch6)
4130 return \"uxth\\t%0, %1\";
4131 if (which_alternative == 0)
4132 return \"#\";
4133
4134 mem = XEXP (operands[1], 0);
4135
4136 if (GET_CODE (mem) == CONST)
4137 mem = XEXP (mem, 0);
4138
4139 if (GET_CODE (mem) == LABEL_REF)
4140 return \"ldr\\t%0, %1\";
4141
4142 if (GET_CODE (mem) == PLUS)
4143 {
4144 rtx a = XEXP (mem, 0);
4145 rtx b = XEXP (mem, 1);
4146
4147 /* This can happen due to bugs in reload. */
4148 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4149 {
4150 rtx ops[2];
4151 ops[0] = operands[0];
4152 ops[1] = a;
4153
4154 output_asm_insn (\"mov %0, %1\", ops);
4155
4156 XEXP (mem, 0) = operands[0];
4157 }
4158
4159 else if ( GET_CODE (a) == LABEL_REF
4160 && GET_CODE (b) == CONST_INT)
4161 return \"ldr\\t%0, %1\";
4162 }
4163
4164 return \"ldrh\\t%0, %1\";
4165 "
4166 [(set_attr_alternative "length"
4167 [(if_then_else (eq_attr "is_arch6" "yes")
4168 (const_int 2) (const_int 4))
4169 (const_int 4)])
4170 (set_attr "type" "alu_shift,load_byte")
4171 (set_attr "pool_range" "*,60")]
4172 )
4173
4174 (define_insn "*arm_zero_extendhisi2"
4175 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4176 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4177 "TARGET_ARM && arm_arch4 && !arm_arch6"
4178 "@
4179 #
4180 ldr%(h%)\\t%0, %1"
4181 [(set_attr "type" "alu_shift,load_byte")
4182 (set_attr "predicable" "yes")
4183 (set_attr "pool_range" "*,256")
4184 (set_attr "neg_pool_range" "*,244")]
4185 )
4186
4187 (define_insn "*arm_zero_extendhisi2_v6"
4188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4189 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4190 "TARGET_ARM && arm_arch6"
4191 "@
4192 uxth%?\\t%0, %1
4193 ldr%(h%)\\t%0, %1"
4194 [(set_attr "type" "alu_shift,load_byte")
4195 (set_attr "predicable" "yes")
4196 (set_attr "pool_range" "*,256")
4197 (set_attr "neg_pool_range" "*,244")]
4198 )
4199
4200 (define_insn "*arm_zero_extendhisi2addsi"
4201 [(set (match_operand:SI 0 "s_register_operand" "=r")
4202 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4203 (match_operand:SI 2 "s_register_operand" "r")))]
4204 "TARGET_INT_SIMD"
4205 "uxtah%?\\t%0, %2, %1"
4206 [(set_attr "type" "alu_shift")
4207 (set_attr "predicable" "yes")]
4208 )
4209
4210 (define_expand "zero_extendqisi2"
4211 [(set (match_operand:SI 0 "s_register_operand" "")
4212 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4213 "TARGET_EITHER"
4214 {
4215 if (TARGET_ARM && !arm_arch6 && GET_CODE (operands[1]) != MEM)
4216 {
4217 emit_insn (gen_andsi3 (operands[0],
4218 gen_lowpart (SImode, operands[1]),
4219 GEN_INT (255)));
4220 DONE;
4221 }
4222 if (!arm_arch6 && !MEM_P (operands[1]))
4223 {
4224 rtx t = gen_lowpart (SImode, operands[1]);
4225 rtx tmp = gen_reg_rtx (SImode);
4226 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4227 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4228 DONE;
4229 }
4230 })
4231
4232 (define_split
4233 [(set (match_operand:SI 0 "register_operand" "")
4234 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
4235 "!arm_arch6"
4236 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4237 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4238 {
4239 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4240 if (TARGET_ARM)
4241 {
4242 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4243 DONE;
4244 }
4245 })
4246
4247 (define_insn "*thumb1_zero_extendqisi2"
4248 [(set (match_operand:SI 0 "register_operand" "=l,l")
4249 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4250 "TARGET_THUMB1 && !arm_arch6"
4251 "@
4252 #
4253 ldrb\\t%0, %1"
4254 [(set_attr "length" "4,2")
4255 (set_attr "type" "alu_shift,load_byte")
4256 (set_attr "pool_range" "*,32")]
4257 )
4258
4259 (define_insn "*thumb1_zero_extendqisi2_v6"
4260 [(set (match_operand:SI 0 "register_operand" "=l,l")
4261 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4262 "TARGET_THUMB1 && arm_arch6"
4263 "@
4264 uxtb\\t%0, %1
4265 ldrb\\t%0, %1"
4266 [(set_attr "length" "2,2")
4267 (set_attr "type" "alu_shift,load_byte")
4268 (set_attr "pool_range" "*,32")]
4269 )
4270
4271 (define_insn "*arm_zero_extendqisi2"
4272 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4273 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4274 "TARGET_ARM && !arm_arch6"
4275 "@
4276 #
4277 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4278 [(set_attr "length" "8,4")
4279 (set_attr "type" "alu_shift,load_byte")
4280 (set_attr "predicable" "yes")
4281 (set_attr "pool_range" "*,4096")
4282 (set_attr "neg_pool_range" "*,4084")]
4283 )
4284
4285 (define_insn "*arm_zero_extendqisi2_v6"
4286 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4287 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4288 "TARGET_ARM && arm_arch6"
4289 "@
4290 uxtb%(%)\\t%0, %1
4291 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4292 [(set_attr "type" "alu_shift,load_byte")
4293 (set_attr "predicable" "yes")
4294 (set_attr "pool_range" "*,4096")
4295 (set_attr "neg_pool_range" "*,4084")]
4296 )
4297
4298 (define_insn "*arm_zero_extendqisi2addsi"
4299 [(set (match_operand:SI 0 "s_register_operand" "=r")
4300 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4301 (match_operand:SI 2 "s_register_operand" "r")))]
4302 "TARGET_INT_SIMD"
4303 "uxtab%?\\t%0, %2, %1"
4304 [(set_attr "predicable" "yes")
4305 (set_attr "insn" "xtab")
4306 (set_attr "type" "alu_shift")]
4307 )
4308
4309 (define_split
4310 [(set (match_operand:SI 0 "s_register_operand" "")
4311 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4312 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4313 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4314 [(set (match_dup 2) (match_dup 1))
4315 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4316 ""
4317 )
4318
4319 (define_split
4320 [(set (match_operand:SI 0 "s_register_operand" "")
4321 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4322 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4323 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4324 [(set (match_dup 2) (match_dup 1))
4325 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4326 ""
4327 )
4328
4329
4330 (define_split
4331 [(set (match_operand:SI 0 "s_register_operand" "")
4332 (ior_xor:SI (and:SI (ashift:SI
4333 (match_operand:SI 1 "s_register_operand" "")
4334 (match_operand:SI 2 "const_int_operand" ""))
4335 (match_operand:SI 3 "const_int_operand" ""))
4336 (zero_extend:SI
4337 (match_operator 5 "subreg_lowpart_operator"
4338 [(match_operand:SI 4 "s_register_operand" "")]))))]
4339 "TARGET_32BIT
4340 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4341 == (GET_MODE_MASK (GET_MODE (operands[5]))
4342 & (GET_MODE_MASK (GET_MODE (operands[5]))
4343 << (INTVAL (operands[2])))))"
4344 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4345 (match_dup 4)))
4346 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4347 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4348 )
4349
4350 (define_insn "*compareqi_eq0"
4351 [(set (reg:CC_Z CC_REGNUM)
4352 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4353 (const_int 0)))]
4354 "TARGET_32BIT"
4355 "tst\\t%0, #255"
4356 [(set_attr "conds" "set")]
4357 )
4358
4359 (define_expand "extendhisi2"
4360 [(set (match_operand:SI 0 "s_register_operand" "")
4361 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4362 "TARGET_EITHER"
4363 {
4364 if (TARGET_THUMB1)
4365 {
4366 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4367 DONE;
4368 }
4369 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4370 {
4371 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4372 DONE;
4373 }
4374
4375 if (!arm_arch6 && !MEM_P (operands[1]))
4376 {
4377 rtx t = gen_lowpart (SImode, operands[1]);
4378 rtx tmp = gen_reg_rtx (SImode);
4379 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4380 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4381 DONE;
4382 }
4383 })
4384
4385 (define_split
4386 [(parallel
4387 [(set (match_operand:SI 0 "register_operand" "")
4388 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4389 (clobber (match_scratch:SI 2 ""))])]
4390 "!arm_arch6"
4391 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4392 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4393 {
4394 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4395 })
4396
4397 ;; We used to have an early-clobber on the scratch register here.
4398 ;; However, there's a bug somewhere in reload which means that this
4399 ;; can be partially ignored during spill allocation if the memory
4400 ;; address also needs reloading; this causes us to die later on when
4401 ;; we try to verify the operands. Fortunately, we don't really need
4402 ;; the early-clobber: we can always use operand 0 if operand 2
4403 ;; overlaps the address.
4404 (define_insn "thumb1_extendhisi2"
4405 [(set (match_operand:SI 0 "register_operand" "=l,l")
4406 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4407 (clobber (match_scratch:SI 2 "=X,l"))]
4408 "TARGET_THUMB1"
4409 "*
4410 {
4411 rtx ops[4];
4412 rtx mem;
4413
4414 if (which_alternative == 0 && !arm_arch6)
4415 return \"#\";
4416 if (which_alternative == 0)
4417 return \"sxth\\t%0, %1\";
4418
4419 mem = XEXP (operands[1], 0);
4420
4421 /* This code used to try to use 'V', and fix the address only if it was
4422 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4423 range of QImode offsets, and offsettable_address_p does a QImode
4424 address check. */
4425
4426 if (GET_CODE (mem) == CONST)
4427 mem = XEXP (mem, 0);
4428
4429 if (GET_CODE (mem) == LABEL_REF)
4430 return \"ldr\\t%0, %1\";
4431
4432 if (GET_CODE (mem) == PLUS)
4433 {
4434 rtx a = XEXP (mem, 0);
4435 rtx b = XEXP (mem, 1);
4436
4437 if (GET_CODE (a) == LABEL_REF
4438 && GET_CODE (b) == CONST_INT)
4439 return \"ldr\\t%0, %1\";
4440
4441 if (GET_CODE (b) == REG)
4442 return \"ldrsh\\t%0, %1\";
4443
4444 ops[1] = a;
4445 ops[2] = b;
4446 }
4447 else
4448 {
4449 ops[1] = mem;
4450 ops[2] = const0_rtx;
4451 }
4452
4453 gcc_assert (GET_CODE (ops[1]) == REG);
4454
4455 ops[0] = operands[0];
4456 if (reg_mentioned_p (operands[2], ops[1]))
4457 ops[3] = ops[0];
4458 else
4459 ops[3] = operands[2];
4460 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4461 return \"\";
4462 }"
4463 [(set_attr_alternative "length"
4464 [(if_then_else (eq_attr "is_arch6" "yes")
4465 (const_int 2) (const_int 4))
4466 (const_int 4)])
4467 (set_attr "type" "alu_shift,load_byte")
4468 (set_attr "pool_range" "*,1020")]
4469 )
4470
4471 ;; This pattern will only be used when ldsh is not available
4472 (define_expand "extendhisi2_mem"
4473 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4474 (set (match_dup 3)
4475 (zero_extend:SI (match_dup 7)))
4476 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4477 (set (match_operand:SI 0 "" "")
4478 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4479 "TARGET_ARM"
4480 "
4481 {
4482 rtx mem1, mem2;
4483 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4484
4485 mem1 = change_address (operands[1], QImode, addr);
4486 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4487 operands[0] = gen_lowpart (SImode, operands[0]);
4488 operands[1] = mem1;
4489 operands[2] = gen_reg_rtx (SImode);
4490 operands[3] = gen_reg_rtx (SImode);
4491 operands[6] = gen_reg_rtx (SImode);
4492 operands[7] = mem2;
4493
4494 if (BYTES_BIG_ENDIAN)
4495 {
4496 operands[4] = operands[2];
4497 operands[5] = operands[3];
4498 }
4499 else
4500 {
4501 operands[4] = operands[3];
4502 operands[5] = operands[2];
4503 }
4504 }"
4505 )
4506
4507 (define_split
4508 [(set (match_operand:SI 0 "register_operand" "")
4509 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4510 "!arm_arch6"
4511 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4512 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4513 {
4514 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4515 })
4516
4517 (define_insn "*arm_extendhisi2"
4518 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4519 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4520 "TARGET_ARM && arm_arch4 && !arm_arch6"
4521 "@
4522 #
4523 ldr%(sh%)\\t%0, %1"
4524 [(set_attr "length" "8,4")
4525 (set_attr "type" "alu_shift,load_byte")
4526 (set_attr "predicable" "yes")
4527 (set_attr "pool_range" "*,256")
4528 (set_attr "neg_pool_range" "*,244")]
4529 )
4530
4531 ;; ??? Check Thumb-2 pool range
4532 (define_insn "*arm_extendhisi2_v6"
4533 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4534 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4535 "TARGET_32BIT && arm_arch6"
4536 "@
4537 sxth%?\\t%0, %1
4538 ldr%(sh%)\\t%0, %1"
4539 [(set_attr "type" "alu_shift,load_byte")
4540 (set_attr "predicable" "yes")
4541 (set_attr "pool_range" "*,256")
4542 (set_attr "neg_pool_range" "*,244")]
4543 )
4544
4545 (define_insn "*arm_extendhisi2addsi"
4546 [(set (match_operand:SI 0 "s_register_operand" "=r")
4547 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4548 (match_operand:SI 2 "s_register_operand" "r")))]
4549 "TARGET_INT_SIMD"
4550 "sxtah%?\\t%0, %2, %1"
4551 )
4552
4553 (define_expand "extendqihi2"
4554 [(set (match_dup 2)
4555 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4556 (const_int 24)))
4557 (set (match_operand:HI 0 "s_register_operand" "")
4558 (ashiftrt:SI (match_dup 2)
4559 (const_int 24)))]
4560 "TARGET_ARM"
4561 "
4562 {
4563 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4564 {
4565 emit_insn (gen_rtx_SET (VOIDmode,
4566 operands[0],
4567 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4568 DONE;
4569 }
4570 if (!s_register_operand (operands[1], QImode))
4571 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4572 operands[0] = gen_lowpart (SImode, operands[0]);
4573 operands[1] = gen_lowpart (SImode, operands[1]);
4574 operands[2] = gen_reg_rtx (SImode);
4575 }"
4576 )
4577
4578 (define_insn "*arm_extendqihi_insn"
4579 [(set (match_operand:HI 0 "s_register_operand" "=r")
4580 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4581 "TARGET_ARM && arm_arch4"
4582 "ldr%(sb%)\\t%0, %1"
4583 [(set_attr "type" "load_byte")
4584 (set_attr "predicable" "yes")
4585 (set_attr "pool_range" "256")
4586 (set_attr "neg_pool_range" "244")]
4587 )
4588
4589 (define_expand "extendqisi2"
4590 [(set (match_operand:SI 0 "s_register_operand" "")
4591 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
4592 "TARGET_EITHER"
4593 {
4594 if (!arm_arch4 && MEM_P (operands[1]))
4595 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4596
4597 if (!arm_arch6 && !MEM_P (operands[1]))
4598 {
4599 rtx t = gen_lowpart (SImode, operands[1]);
4600 rtx tmp = gen_reg_rtx (SImode);
4601 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4602 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4603 DONE;
4604 }
4605 })
4606
4607 (define_split
4608 [(set (match_operand:SI 0 "register_operand" "")
4609 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4610 "!arm_arch6"
4611 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4612 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4613 {
4614 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4615 })
4616
4617 (define_insn "*arm_extendqisi"
4618 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4619 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4620 "TARGET_ARM && arm_arch4 && !arm_arch6"
4621 "@
4622 #
4623 ldr%(sb%)\\t%0, %1"
4624 [(set_attr "length" "8,4")
4625 (set_attr "type" "alu_shift,load_byte")
4626 (set_attr "predicable" "yes")
4627 (set_attr "pool_range" "*,256")
4628 (set_attr "neg_pool_range" "*,244")]
4629 )
4630
4631 (define_insn "*arm_extendqisi_v6"
4632 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4633 (sign_extend:SI
4634 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4635 "TARGET_ARM && arm_arch6"
4636 "@
4637 sxtb%?\\t%0, %1
4638 ldr%(sb%)\\t%0, %1"
4639 [(set_attr "type" "alu_shift,load_byte")
4640 (set_attr "predicable" "yes")
4641 (set_attr "pool_range" "*,256")
4642 (set_attr "neg_pool_range" "*,244")]
4643 )
4644
4645 (define_insn "*arm_extendqisi2addsi"
4646 [(set (match_operand:SI 0 "s_register_operand" "=r")
4647 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4648 (match_operand:SI 2 "s_register_operand" "r")))]
4649 "TARGET_INT_SIMD"
4650 "sxtab%?\\t%0, %2, %1"
4651 [(set_attr "type" "alu_shift")
4652 (set_attr "insn" "xtab")
4653 (set_attr "predicable" "yes")]
4654 )
4655
4656 (define_split
4657 [(set (match_operand:SI 0 "register_operand" "")
4658 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
4659 "TARGET_THUMB1 && reload_completed"
4660 [(set (match_dup 0) (match_dup 2))
4661 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
4662 {
4663 rtx addr = XEXP (operands[1], 0);
4664
4665 if (GET_CODE (addr) == CONST)
4666 addr = XEXP (addr, 0);
4667
4668 if (GET_CODE (addr) == PLUS
4669 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4670 /* No split necessary. */
4671 FAIL;
4672
4673 if (GET_CODE (addr) == PLUS
4674 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
4675 FAIL;
4676
4677 if (reg_overlap_mentioned_p (operands[0], addr))
4678 {
4679 rtx t = gen_lowpart (QImode, operands[0]);
4680 emit_move_insn (t, operands[1]);
4681 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
4682 DONE;
4683 }
4684
4685 if (REG_P (addr))
4686 {
4687 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
4688 operands[2] = const0_rtx;
4689 }
4690 else if (GET_CODE (addr) != PLUS)
4691 FAIL;
4692 else if (REG_P (XEXP (addr, 0)))
4693 {
4694 operands[2] = XEXP (addr, 1);
4695 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
4696 }
4697 else
4698 {
4699 operands[2] = XEXP (addr, 0);
4700 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
4701 }
4702
4703 operands[3] = change_address (operands[1], QImode, addr);
4704 })
4705
4706 (define_peephole2
4707 [(set (match_operand:SI 0 "register_operand" "")
4708 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
4709 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
4710 (set (match_operand:SI 3 "register_operand" "")
4711 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
4712 "TARGET_THUMB1
4713 && GET_CODE (XEXP (operands[4], 0)) == PLUS
4714 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
4715 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
4716 && (peep2_reg_dead_p (3, operands[0])
4717 || rtx_equal_p (operands[0], operands[3]))
4718 && (peep2_reg_dead_p (3, operands[2])
4719 || rtx_equal_p (operands[2], operands[3]))"
4720 [(set (match_dup 2) (match_dup 1))
4721 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
4722 {
4723 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
4724 operands[4] = change_address (operands[4], QImode, addr);
4725 })
4726
4727 (define_insn "thumb1_extendqisi2"
4728 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4729 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4730 "TARGET_THUMB1"
4731 {
4732 rtx addr;
4733
4734 if (which_alternative == 0 && arm_arch6)
4735 return "sxtb\\t%0, %1";
4736 if (which_alternative == 0)
4737 return "#";
4738
4739 addr = XEXP (operands[1], 0);
4740 if (GET_CODE (addr) == PLUS
4741 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
4742 return "ldrsb\\t%0, %1";
4743
4744 return "#";
4745 }
4746 [(set_attr_alternative "length"
4747 [(if_then_else (eq_attr "is_arch6" "yes")
4748 (const_int 2) (const_int 4))
4749 (const_int 2)
4750 (if_then_else (eq_attr "is_arch6" "yes")
4751 (const_int 4) (const_int 6))])
4752 (set_attr "type" "alu_shift,load_byte,load_byte")]
4753 )
4754
4755 (define_expand "extendsfdf2"
4756 [(set (match_operand:DF 0 "s_register_operand" "")
4757 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4758 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4759 ""
4760 )
4761
4762 /* HFmode -> DFmode conversions have to go through SFmode. */
4763 (define_expand "extendhfdf2"
4764 [(set (match_operand:DF 0 "general_operand" "")
4765 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4766 "TARGET_EITHER"
4767 "
4768 {
4769 rtx op1;
4770 op1 = convert_to_mode (SFmode, operands[1], 0);
4771 op1 = convert_to_mode (DFmode, op1, 0);
4772 emit_insn (gen_movdf (operands[0], op1));
4773 DONE;
4774 }"
4775 )
4776 \f
4777 ;; Move insns (including loads and stores)
4778
4779 ;; XXX Just some ideas about movti.
4780 ;; I don't think these are a good idea on the arm, there just aren't enough
4781 ;; registers
4782 ;;(define_expand "loadti"
4783 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4784 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4785 ;; "" "")
4786
4787 ;;(define_expand "storeti"
4788 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4789 ;; (match_operand:TI 1 "s_register_operand" ""))]
4790 ;; "" "")
4791
4792 ;;(define_expand "movti"
4793 ;; [(set (match_operand:TI 0 "general_operand" "")
4794 ;; (match_operand:TI 1 "general_operand" ""))]
4795 ;; ""
4796 ;; "
4797 ;;{
4798 ;; rtx insn;
4799 ;;
4800 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4801 ;; operands[1] = copy_to_reg (operands[1]);
4802 ;; if (GET_CODE (operands[0]) == MEM)
4803 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4804 ;; else if (GET_CODE (operands[1]) == MEM)
4805 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4806 ;; else
4807 ;; FAIL;
4808 ;;
4809 ;; emit_insn (insn);
4810 ;; DONE;
4811 ;;}")
4812
4813 ;; Recognize garbage generated above.
4814
4815 ;;(define_insn ""
4816 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4817 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4818 ;; ""
4819 ;; "*
4820 ;; {
4821 ;; register mem = (which_alternative < 3);
4822 ;; register const char *template;
4823 ;;
4824 ;; operands[mem] = XEXP (operands[mem], 0);
4825 ;; switch (which_alternative)
4826 ;; {
4827 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4828 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4829 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4830 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4831 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4832 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4833 ;; }
4834 ;; output_asm_insn (template, operands);
4835 ;; return \"\";
4836 ;; }")
4837
4838 (define_expand "movdi"
4839 [(set (match_operand:DI 0 "general_operand" "")
4840 (match_operand:DI 1 "general_operand" ""))]
4841 "TARGET_EITHER"
4842 "
4843 if (can_create_pseudo_p ())
4844 {
4845 if (GET_CODE (operands[0]) != REG)
4846 operands[1] = force_reg (DImode, operands[1]);
4847 }
4848 "
4849 )
4850
4851 (define_insn "*arm_movdi"
4852 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4853 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4854 "TARGET_32BIT
4855 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4856 && !TARGET_IWMMXT
4857 && ( register_operand (operands[0], DImode)
4858 || register_operand (operands[1], DImode))"
4859 "*
4860 switch (which_alternative)
4861 {
4862 case 0:
4863 case 1:
4864 case 2:
4865 return \"#\";
4866 default:
4867 return output_move_double (operands);
4868 }
4869 "
4870 [(set_attr "length" "8,12,16,8,8")
4871 (set_attr "type" "*,*,*,load2,store2")
4872 (set_attr "arm_pool_range" "*,*,*,1020,*")
4873 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
4874 (set_attr "thumb2_pool_range" "*,*,*,4096,*")
4875 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4876 )
4877
4878 (define_split
4879 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4880 (match_operand:ANY64 1 "const_double_operand" ""))]
4881 "TARGET_32BIT
4882 && reload_completed
4883 && (arm_const_double_inline_cost (operands[1])
4884 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4885 [(const_int 0)]
4886 "
4887 arm_split_constant (SET, SImode, curr_insn,
4888 INTVAL (gen_lowpart (SImode, operands[1])),
4889 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4890 arm_split_constant (SET, SImode, curr_insn,
4891 INTVAL (gen_highpart_mode (SImode,
4892 GET_MODE (operands[0]),
4893 operands[1])),
4894 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4895 DONE;
4896 "
4897 )
4898
4899 ; If optimizing for size, or if we have load delay slots, then
4900 ; we want to split the constant into two separate operations.
4901 ; In both cases this may split a trivial part into a single data op
4902 ; leaving a single complex constant to load. We can also get longer
4903 ; offsets in a LDR which means we get better chances of sharing the pool
4904 ; entries. Finally, we can normally do a better job of scheduling
4905 ; LDR instructions than we can with LDM.
4906 ; This pattern will only match if the one above did not.
4907 (define_split
4908 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4909 (match_operand:ANY64 1 "const_double_operand" ""))]
4910 "TARGET_ARM && reload_completed
4911 && arm_const_double_by_parts (operands[1])"
4912 [(set (match_dup 0) (match_dup 1))
4913 (set (match_dup 2) (match_dup 3))]
4914 "
4915 operands[2] = gen_highpart (SImode, operands[0]);
4916 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4917 operands[1]);
4918 operands[0] = gen_lowpart (SImode, operands[0]);
4919 operands[1] = gen_lowpart (SImode, operands[1]);
4920 "
4921 )
4922
4923 (define_split
4924 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4925 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4926 "TARGET_EITHER && reload_completed"
4927 [(set (match_dup 0) (match_dup 1))
4928 (set (match_dup 2) (match_dup 3))]
4929 "
4930 operands[2] = gen_highpart (SImode, operands[0]);
4931 operands[3] = gen_highpart (SImode, operands[1]);
4932 operands[0] = gen_lowpart (SImode, operands[0]);
4933 operands[1] = gen_lowpart (SImode, operands[1]);
4934
4935 /* Handle a partial overlap. */
4936 if (rtx_equal_p (operands[0], operands[3]))
4937 {
4938 rtx tmp0 = operands[0];
4939 rtx tmp1 = operands[1];
4940
4941 operands[0] = operands[2];
4942 operands[1] = operands[3];
4943 operands[2] = tmp0;
4944 operands[3] = tmp1;
4945 }
4946 "
4947 )
4948
4949 ;; We can't actually do base+index doubleword loads if the index and
4950 ;; destination overlap. Split here so that we at least have chance to
4951 ;; schedule.
4952 (define_split
4953 [(set (match_operand:DI 0 "s_register_operand" "")
4954 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4955 (match_operand:SI 2 "s_register_operand" ""))))]
4956 "TARGET_LDRD
4957 && reg_overlap_mentioned_p (operands[0], operands[1])
4958 && reg_overlap_mentioned_p (operands[0], operands[2])"
4959 [(set (match_dup 4)
4960 (plus:SI (match_dup 1)
4961 (match_dup 2)))
4962 (set (match_dup 0)
4963 (mem:DI (match_dup 4)))]
4964 "
4965 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4966 "
4967 )
4968
4969 ;;; ??? This should have alternatives for constants.
4970 ;;; ??? This was originally identical to the movdf_insn pattern.
4971 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4972 ;;; thumb_reorg with a memory reference.
4973 (define_insn "*thumb1_movdi_insn"
4974 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4975 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4976 "TARGET_THUMB1
4977 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4978 && ( register_operand (operands[0], DImode)
4979 || register_operand (operands[1], DImode))"
4980 "*
4981 {
4982 switch (which_alternative)
4983 {
4984 default:
4985 case 0:
4986 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4987 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4988 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4989 case 1:
4990 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4991 case 2:
4992 operands[1] = GEN_INT (- INTVAL (operands[1]));
4993 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4994 case 3:
4995 return \"ldmia\\t%1, {%0, %H0}\";
4996 case 4:
4997 return \"stmia\\t%0, {%1, %H1}\";
4998 case 5:
4999 return thumb_load_double_from_address (operands);
5000 case 6:
5001 operands[2] = gen_rtx_MEM (SImode,
5002 plus_constant (XEXP (operands[0], 0), 4));
5003 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5004 return \"\";
5005 case 7:
5006 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5007 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5008 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5009 }
5010 }"
5011 [(set_attr "length" "4,4,6,2,2,6,4,4")
5012 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5013 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5014 )
5015
5016 (define_expand "movsi"
5017 [(set (match_operand:SI 0 "general_operand" "")
5018 (match_operand:SI 1 "general_operand" ""))]
5019 "TARGET_EITHER"
5020 "
5021 {
5022 rtx base, offset, tmp;
5023
5024 if (TARGET_32BIT)
5025 {
5026 /* Everything except mem = const or mem = mem can be done easily. */
5027 if (GET_CODE (operands[0]) == MEM)
5028 operands[1] = force_reg (SImode, operands[1]);
5029 if (arm_general_register_operand (operands[0], SImode)
5030 && GET_CODE (operands[1]) == CONST_INT
5031 && !(const_ok_for_arm (INTVAL (operands[1]))
5032 || const_ok_for_arm (~INTVAL (operands[1]))))
5033 {
5034 arm_split_constant (SET, SImode, NULL_RTX,
5035 INTVAL (operands[1]), operands[0], NULL_RTX,
5036 optimize && can_create_pseudo_p ());
5037 DONE;
5038 }
5039
5040 if (TARGET_USE_MOVT && !target_word_relocations
5041 && GET_CODE (operands[1]) == SYMBOL_REF
5042 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5043 {
5044 arm_emit_movpair (operands[0], operands[1]);
5045 DONE;
5046 }
5047 }
5048 else /* TARGET_THUMB1... */
5049 {
5050 if (can_create_pseudo_p ())
5051 {
5052 if (GET_CODE (operands[0]) != REG)
5053 operands[1] = force_reg (SImode, operands[1]);
5054 }
5055 }
5056
5057 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5058 {
5059 split_const (operands[1], &base, &offset);
5060 if (GET_CODE (base) == SYMBOL_REF
5061 && !offset_within_block_p (base, INTVAL (offset)))
5062 {
5063 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5064 emit_move_insn (tmp, base);
5065 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5066 DONE;
5067 }
5068 }
5069
5070 /* Recognize the case where operand[1] is a reference to thread-local
5071 data and load its address to a register. */
5072 if (arm_tls_referenced_p (operands[1]))
5073 {
5074 rtx tmp = operands[1];
5075 rtx addend = NULL;
5076
5077 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5078 {
5079 addend = XEXP (XEXP (tmp, 0), 1);
5080 tmp = XEXP (XEXP (tmp, 0), 0);
5081 }
5082
5083 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5084 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5085
5086 tmp = legitimize_tls_address (tmp,
5087 !can_create_pseudo_p () ? operands[0] : 0);
5088 if (addend)
5089 {
5090 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5091 tmp = force_operand (tmp, operands[0]);
5092 }
5093 operands[1] = tmp;
5094 }
5095 else if (flag_pic
5096 && (CONSTANT_P (operands[1])
5097 || symbol_mentioned_p (operands[1])
5098 || label_mentioned_p (operands[1])))
5099 operands[1] = legitimize_pic_address (operands[1], SImode,
5100 (!can_create_pseudo_p ()
5101 ? operands[0]
5102 : 0));
5103 }
5104 "
5105 )
5106
5107 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5108 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5109 ;; so this does not matter.
5110 (define_insn "*arm_movt"
5111 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5112 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5113 (match_operand:SI 2 "general_operand" "i")))]
5114 "TARGET_32BIT"
5115 "movt%?\t%0, #:upper16:%c2"
5116 [(set_attr "predicable" "yes")
5117 (set_attr "length" "4")]
5118 )
5119
5120 (define_insn "*arm_movsi_insn"
5121 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5122 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5123 "TARGET_ARM && ! TARGET_IWMMXT
5124 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5125 && ( register_operand (operands[0], SImode)
5126 || register_operand (operands[1], SImode))"
5127 "@
5128 mov%?\\t%0, %1
5129 mov%?\\t%0, %1
5130 mvn%?\\t%0, #%B1
5131 movw%?\\t%0, %1
5132 ldr%?\\t%0, %1
5133 str%?\\t%1, %0"
5134 [(set_attr "type" "*,*,*,*,load1,store1")
5135 (set_attr "predicable" "yes")
5136 (set_attr "pool_range" "*,*,*,*,4096,*")
5137 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5138 )
5139
5140 (define_split
5141 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5142 (match_operand:SI 1 "const_int_operand" ""))]
5143 "TARGET_32BIT
5144 && (!(const_ok_for_arm (INTVAL (operands[1]))
5145 || const_ok_for_arm (~INTVAL (operands[1]))))"
5146 [(clobber (const_int 0))]
5147 "
5148 arm_split_constant (SET, SImode, NULL_RTX,
5149 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5150 DONE;
5151 "
5152 )
5153
5154 (define_insn "*thumb1_movsi_insn"
5155 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5156 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5157 "TARGET_THUMB1
5158 && ( register_operand (operands[0], SImode)
5159 || register_operand (operands[1], SImode))"
5160 "@
5161 mov %0, %1
5162 mov %0, %1
5163 #
5164 #
5165 ldmia\\t%1, {%0}
5166 stmia\\t%0, {%1}
5167 ldr\\t%0, %1
5168 str\\t%1, %0
5169 mov\\t%0, %1"
5170 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5171 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5172 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")
5173 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5174
5175 (define_split
5176 [(set (match_operand:SI 0 "register_operand" "")
5177 (match_operand:SI 1 "const_int_operand" ""))]
5178 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5179 [(set (match_dup 2) (match_dup 1))
5180 (set (match_dup 0) (neg:SI (match_dup 2)))]
5181 "
5182 {
5183 operands[1] = GEN_INT (- INTVAL (operands[1]));
5184 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5185 }"
5186 )
5187
5188 (define_split
5189 [(set (match_operand:SI 0 "register_operand" "")
5190 (match_operand:SI 1 "const_int_operand" ""))]
5191 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5192 [(set (match_dup 2) (match_dup 1))
5193 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5194 "
5195 {
5196 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5197 unsigned HOST_WIDE_INT mask = 0xff;
5198 int i;
5199
5200 for (i = 0; i < 25; i++)
5201 if ((val & (mask << i)) == val)
5202 break;
5203
5204 /* Don't split if the shift is zero. */
5205 if (i == 0)
5206 FAIL;
5207
5208 operands[1] = GEN_INT (val >> i);
5209 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5210 operands[3] = GEN_INT (i);
5211 }"
5212 )
5213
5214 ;; When generating pic, we need to load the symbol offset into a register.
5215 ;; So that the optimizer does not confuse this with a normal symbol load
5216 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5217 ;; since that is the only type of relocation we can use.
5218
5219 ;; Wrap calculation of the whole PIC address in a single pattern for the
5220 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5221 ;; a PIC address involves two loads from memory, so we want to CSE it
5222 ;; as often as possible.
5223 ;; This pattern will be split into one of the pic_load_addr_* patterns
5224 ;; and a move after GCSE optimizations.
5225 ;;
5226 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5227 (define_expand "calculate_pic_address"
5228 [(set (match_operand:SI 0 "register_operand" "")
5229 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5230 (unspec:SI [(match_operand:SI 2 "" "")]
5231 UNSPEC_PIC_SYM))))]
5232 "flag_pic"
5233 )
5234
5235 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5236 (define_split
5237 [(set (match_operand:SI 0 "register_operand" "")
5238 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5239 (unspec:SI [(match_operand:SI 2 "" "")]
5240 UNSPEC_PIC_SYM))))]
5241 "flag_pic"
5242 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5243 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5244 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5245 )
5246
5247 ;; The rather odd constraints on the following are to force reload to leave
5248 ;; the insn alone, and to force the minipool generation pass to then move
5249 ;; the GOT symbol to memory.
5250
5251 (define_insn "pic_load_addr_32bit"
5252 [(set (match_operand:SI 0 "s_register_operand" "=r")
5253 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5254 "TARGET_32BIT && flag_pic"
5255 "ldr%?\\t%0, %1"
5256 [(set_attr "type" "load1")
5257 (set_attr "pool_range" "4096")
5258 (set (attr "neg_pool_range")
5259 (if_then_else (eq_attr "is_thumb" "no")
5260 (const_int 4084)
5261 (const_int 0)))]
5262 )
5263
5264 (define_insn "pic_load_addr_thumb1"
5265 [(set (match_operand:SI 0 "s_register_operand" "=l")
5266 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5267 "TARGET_THUMB1 && flag_pic"
5268 "ldr\\t%0, %1"
5269 [(set_attr "type" "load1")
5270 (set (attr "pool_range") (const_int 1024))]
5271 )
5272
5273 (define_insn "pic_add_dot_plus_four"
5274 [(set (match_operand:SI 0 "register_operand" "=r")
5275 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5276 (const_int 4)
5277 (match_operand 2 "" "")]
5278 UNSPEC_PIC_BASE))]
5279 "TARGET_THUMB"
5280 "*
5281 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5282 INTVAL (operands[2]));
5283 return \"add\\t%0, %|pc\";
5284 "
5285 [(set_attr "length" "2")]
5286 )
5287
5288 (define_insn "pic_add_dot_plus_eight"
5289 [(set (match_operand:SI 0 "register_operand" "=r")
5290 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5291 (const_int 8)
5292 (match_operand 2 "" "")]
5293 UNSPEC_PIC_BASE))]
5294 "TARGET_ARM"
5295 "*
5296 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5297 INTVAL (operands[2]));
5298 return \"add%?\\t%0, %|pc, %1\";
5299 "
5300 [(set_attr "predicable" "yes")]
5301 )
5302
5303 (define_insn "tls_load_dot_plus_eight"
5304 [(set (match_operand:SI 0 "register_operand" "=r")
5305 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5306 (const_int 8)
5307 (match_operand 2 "" "")]
5308 UNSPEC_PIC_BASE)))]
5309 "TARGET_ARM"
5310 "*
5311 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5312 INTVAL (operands[2]));
5313 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5314 "
5315 [(set_attr "predicable" "yes")]
5316 )
5317
5318 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5319 ;; followed by a load. These sequences can be crunched down to
5320 ;; tls_load_dot_plus_eight by a peephole.
5321
5322 (define_peephole2
5323 [(set (match_operand:SI 0 "register_operand" "")
5324 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5325 (const_int 8)
5326 (match_operand 1 "" "")]
5327 UNSPEC_PIC_BASE))
5328 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5329 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5330 [(set (match_dup 2)
5331 (mem:SI (unspec:SI [(match_dup 3)
5332 (const_int 8)
5333 (match_dup 1)]
5334 UNSPEC_PIC_BASE)))]
5335 ""
5336 )
5337
5338 (define_insn "pic_offset_arm"
5339 [(set (match_operand:SI 0 "register_operand" "=r")
5340 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5341 (unspec:SI [(match_operand:SI 2 "" "X")]
5342 UNSPEC_PIC_OFFSET))))]
5343 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5344 "ldr%?\\t%0, [%1,%2]"
5345 [(set_attr "type" "load1")]
5346 )
5347
5348 (define_expand "builtin_setjmp_receiver"
5349 [(label_ref (match_operand 0 "" ""))]
5350 "flag_pic"
5351 "
5352 {
5353 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5354 register. */
5355 if (arm_pic_register != INVALID_REGNUM)
5356 arm_load_pic_register (1UL << 3);
5357 DONE;
5358 }")
5359
5360 ;; If copying one reg to another we can set the condition codes according to
5361 ;; its value. Such a move is common after a return from subroutine and the
5362 ;; result is being tested against zero.
5363
5364 (define_insn "*movsi_compare0"
5365 [(set (reg:CC CC_REGNUM)
5366 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5367 (const_int 0)))
5368 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5369 (match_dup 1))]
5370 "TARGET_32BIT"
5371 "@
5372 cmp%?\\t%0, #0
5373 sub%.\\t%0, %1, #0"
5374 [(set_attr "conds" "set")]
5375 )
5376
5377 ;; Subroutine to store a half word from a register into memory.
5378 ;; Operand 0 is the source register (HImode)
5379 ;; Operand 1 is the destination address in a register (SImode)
5380
5381 ;; In both this routine and the next, we must be careful not to spill
5382 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5383 ;; can generate unrecognizable rtl.
5384
5385 (define_expand "storehi"
5386 [;; store the low byte
5387 (set (match_operand 1 "" "") (match_dup 3))
5388 ;; extract the high byte
5389 (set (match_dup 2)
5390 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5391 ;; store the high byte
5392 (set (match_dup 4) (match_dup 5))]
5393 "TARGET_ARM"
5394 "
5395 {
5396 rtx op1 = operands[1];
5397 rtx addr = XEXP (op1, 0);
5398 enum rtx_code code = GET_CODE (addr);
5399
5400 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5401 || code == MINUS)
5402 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5403
5404 operands[4] = adjust_address (op1, QImode, 1);
5405 operands[1] = adjust_address (operands[1], QImode, 0);
5406 operands[3] = gen_lowpart (QImode, operands[0]);
5407 operands[0] = gen_lowpart (SImode, operands[0]);
5408 operands[2] = gen_reg_rtx (SImode);
5409 operands[5] = gen_lowpart (QImode, operands[2]);
5410 }"
5411 )
5412
5413 (define_expand "storehi_bigend"
5414 [(set (match_dup 4) (match_dup 3))
5415 (set (match_dup 2)
5416 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5417 (set (match_operand 1 "" "") (match_dup 5))]
5418 "TARGET_ARM"
5419 "
5420 {
5421 rtx op1 = operands[1];
5422 rtx addr = XEXP (op1, 0);
5423 enum rtx_code code = GET_CODE (addr);
5424
5425 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5426 || code == MINUS)
5427 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5428
5429 operands[4] = adjust_address (op1, QImode, 1);
5430 operands[1] = adjust_address (operands[1], QImode, 0);
5431 operands[3] = gen_lowpart (QImode, operands[0]);
5432 operands[0] = gen_lowpart (SImode, operands[0]);
5433 operands[2] = gen_reg_rtx (SImode);
5434 operands[5] = gen_lowpart (QImode, operands[2]);
5435 }"
5436 )
5437
5438 ;; Subroutine to store a half word integer constant into memory.
5439 (define_expand "storeinthi"
5440 [(set (match_operand 0 "" "")
5441 (match_operand 1 "" ""))
5442 (set (match_dup 3) (match_dup 2))]
5443 "TARGET_ARM"
5444 "
5445 {
5446 HOST_WIDE_INT value = INTVAL (operands[1]);
5447 rtx addr = XEXP (operands[0], 0);
5448 rtx op0 = operands[0];
5449 enum rtx_code code = GET_CODE (addr);
5450
5451 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5452 || code == MINUS)
5453 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5454
5455 operands[1] = gen_reg_rtx (SImode);
5456 if (BYTES_BIG_ENDIAN)
5457 {
5458 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5459 if ((value & 255) == ((value >> 8) & 255))
5460 operands[2] = operands[1];
5461 else
5462 {
5463 operands[2] = gen_reg_rtx (SImode);
5464 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5465 }
5466 }
5467 else
5468 {
5469 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5470 if ((value & 255) == ((value >> 8) & 255))
5471 operands[2] = operands[1];
5472 else
5473 {
5474 operands[2] = gen_reg_rtx (SImode);
5475 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5476 }
5477 }
5478
5479 operands[3] = adjust_address (op0, QImode, 1);
5480 operands[0] = adjust_address (operands[0], QImode, 0);
5481 operands[2] = gen_lowpart (QImode, operands[2]);
5482 operands[1] = gen_lowpart (QImode, operands[1]);
5483 }"
5484 )
5485
5486 (define_expand "storehi_single_op"
5487 [(set (match_operand:HI 0 "memory_operand" "")
5488 (match_operand:HI 1 "general_operand" ""))]
5489 "TARGET_32BIT && arm_arch4"
5490 "
5491 if (!s_register_operand (operands[1], HImode))
5492 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5493 "
5494 )
5495
5496 (define_expand "movhi"
5497 [(set (match_operand:HI 0 "general_operand" "")
5498 (match_operand:HI 1 "general_operand" ""))]
5499 "TARGET_EITHER"
5500 "
5501 if (TARGET_ARM)
5502 {
5503 if (can_create_pseudo_p ())
5504 {
5505 if (GET_CODE (operands[0]) == MEM)
5506 {
5507 if (arm_arch4)
5508 {
5509 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5510 DONE;
5511 }
5512 if (GET_CODE (operands[1]) == CONST_INT)
5513 emit_insn (gen_storeinthi (operands[0], operands[1]));
5514 else
5515 {
5516 if (GET_CODE (operands[1]) == MEM)
5517 operands[1] = force_reg (HImode, operands[1]);
5518 if (BYTES_BIG_ENDIAN)
5519 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5520 else
5521 emit_insn (gen_storehi (operands[1], operands[0]));
5522 }
5523 DONE;
5524 }
5525 /* Sign extend a constant, and keep it in an SImode reg. */
5526 else if (GET_CODE (operands[1]) == CONST_INT)
5527 {
5528 rtx reg = gen_reg_rtx (SImode);
5529 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5530
5531 /* If the constant is already valid, leave it alone. */
5532 if (!const_ok_for_arm (val))
5533 {
5534 /* If setting all the top bits will make the constant
5535 loadable in a single instruction, then set them.
5536 Otherwise, sign extend the number. */
5537
5538 if (const_ok_for_arm (~(val | ~0xffff)))
5539 val |= ~0xffff;
5540 else if (val & 0x8000)
5541 val |= ~0xffff;
5542 }
5543
5544 emit_insn (gen_movsi (reg, GEN_INT (val)));
5545 operands[1] = gen_lowpart (HImode, reg);
5546 }
5547 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5548 && GET_CODE (operands[1]) == MEM)
5549 {
5550 rtx reg = gen_reg_rtx (SImode);
5551
5552 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5553 operands[1] = gen_lowpart (HImode, reg);
5554 }
5555 else if (!arm_arch4)
5556 {
5557 if (GET_CODE (operands[1]) == MEM)
5558 {
5559 rtx base;
5560 rtx offset = const0_rtx;
5561 rtx reg = gen_reg_rtx (SImode);
5562
5563 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5564 || (GET_CODE (base) == PLUS
5565 && (GET_CODE (offset = XEXP (base, 1))
5566 == CONST_INT)
5567 && ((INTVAL(offset) & 1) != 1)
5568 && GET_CODE (base = XEXP (base, 0)) == REG))
5569 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5570 {
5571 rtx new_rtx;
5572
5573 new_rtx = widen_memory_access (operands[1], SImode,
5574 ((INTVAL (offset) & ~3)
5575 - INTVAL (offset)));
5576 emit_insn (gen_movsi (reg, new_rtx));
5577 if (((INTVAL (offset) & 2) != 0)
5578 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5579 {
5580 rtx reg2 = gen_reg_rtx (SImode);
5581
5582 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5583 reg = reg2;
5584 }
5585 }
5586 else
5587 emit_insn (gen_movhi_bytes (reg, operands[1]));
5588
5589 operands[1] = gen_lowpart (HImode, reg);
5590 }
5591 }
5592 }
5593 /* Handle loading a large integer during reload. */
5594 else if (GET_CODE (operands[1]) == CONST_INT
5595 && !const_ok_for_arm (INTVAL (operands[1]))
5596 && !const_ok_for_arm (~INTVAL (operands[1])))
5597 {
5598 /* Writing a constant to memory needs a scratch, which should
5599 be handled with SECONDARY_RELOADs. */
5600 gcc_assert (GET_CODE (operands[0]) == REG);
5601
5602 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5603 emit_insn (gen_movsi (operands[0], operands[1]));
5604 DONE;
5605 }
5606 }
5607 else if (TARGET_THUMB2)
5608 {
5609 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5610 if (can_create_pseudo_p ())
5611 {
5612 if (GET_CODE (operands[0]) != REG)
5613 operands[1] = force_reg (HImode, operands[1]);
5614 /* Zero extend a constant, and keep it in an SImode reg. */
5615 else if (GET_CODE (operands[1]) == CONST_INT)
5616 {
5617 rtx reg = gen_reg_rtx (SImode);
5618 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5619
5620 emit_insn (gen_movsi (reg, GEN_INT (val)));
5621 operands[1] = gen_lowpart (HImode, reg);
5622 }
5623 }
5624 }
5625 else /* TARGET_THUMB1 */
5626 {
5627 if (can_create_pseudo_p ())
5628 {
5629 if (GET_CODE (operands[1]) == CONST_INT)
5630 {
5631 rtx reg = gen_reg_rtx (SImode);
5632
5633 emit_insn (gen_movsi (reg, operands[1]));
5634 operands[1] = gen_lowpart (HImode, reg);
5635 }
5636
5637 /* ??? We shouldn't really get invalid addresses here, but this can
5638 happen if we are passed a SP (never OK for HImode/QImode) or
5639 virtual register (also rejected as illegitimate for HImode/QImode)
5640 relative address. */
5641 /* ??? This should perhaps be fixed elsewhere, for instance, in
5642 fixup_stack_1, by checking for other kinds of invalid addresses,
5643 e.g. a bare reference to a virtual register. This may confuse the
5644 alpha though, which must handle this case differently. */
5645 if (GET_CODE (operands[0]) == MEM
5646 && !memory_address_p (GET_MODE (operands[0]),
5647 XEXP (operands[0], 0)))
5648 operands[0]
5649 = replace_equiv_address (operands[0],
5650 copy_to_reg (XEXP (operands[0], 0)));
5651
5652 if (GET_CODE (operands[1]) == MEM
5653 && !memory_address_p (GET_MODE (operands[1]),
5654 XEXP (operands[1], 0)))
5655 operands[1]
5656 = replace_equiv_address (operands[1],
5657 copy_to_reg (XEXP (operands[1], 0)));
5658
5659 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5660 {
5661 rtx reg = gen_reg_rtx (SImode);
5662
5663 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5664 operands[1] = gen_lowpart (HImode, reg);
5665 }
5666
5667 if (GET_CODE (operands[0]) == MEM)
5668 operands[1] = force_reg (HImode, operands[1]);
5669 }
5670 else if (GET_CODE (operands[1]) == CONST_INT
5671 && !satisfies_constraint_I (operands[1]))
5672 {
5673 /* Handle loading a large integer during reload. */
5674
5675 /* Writing a constant to memory needs a scratch, which should
5676 be handled with SECONDARY_RELOADs. */
5677 gcc_assert (GET_CODE (operands[0]) == REG);
5678
5679 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5680 emit_insn (gen_movsi (operands[0], operands[1]));
5681 DONE;
5682 }
5683 }
5684 "
5685 )
5686
5687 (define_insn "*thumb1_movhi_insn"
5688 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5689 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5690 "TARGET_THUMB1
5691 && ( register_operand (operands[0], HImode)
5692 || register_operand (operands[1], HImode))"
5693 "*
5694 switch (which_alternative)
5695 {
5696 case 0: return \"add %0, %1, #0\";
5697 case 2: return \"strh %1, %0\";
5698 case 3: return \"mov %0, %1\";
5699 case 4: return \"mov %0, %1\";
5700 case 5: return \"mov %0, %1\";
5701 default: gcc_unreachable ();
5702 case 1:
5703 /* The stack pointer can end up being taken as an index register.
5704 Catch this case here and deal with it. */
5705 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5706 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5707 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5708 {
5709 rtx ops[2];
5710 ops[0] = operands[0];
5711 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5712
5713 output_asm_insn (\"mov %0, %1\", ops);
5714
5715 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5716
5717 }
5718 return \"ldrh %0, %1\";
5719 }"
5720 [(set_attr "length" "2,4,2,2,2,2")
5721 (set_attr "type" "*,load1,store1,*,*,*")
5722 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5723
5724
5725 (define_expand "movhi_bytes"
5726 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5727 (set (match_dup 3)
5728 (zero_extend:SI (match_dup 6)))
5729 (set (match_operand:SI 0 "" "")
5730 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5731 "TARGET_ARM"
5732 "
5733 {
5734 rtx mem1, mem2;
5735 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5736
5737 mem1 = change_address (operands[1], QImode, addr);
5738 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5739 operands[0] = gen_lowpart (SImode, operands[0]);
5740 operands[1] = mem1;
5741 operands[2] = gen_reg_rtx (SImode);
5742 operands[3] = gen_reg_rtx (SImode);
5743 operands[6] = mem2;
5744
5745 if (BYTES_BIG_ENDIAN)
5746 {
5747 operands[4] = operands[2];
5748 operands[5] = operands[3];
5749 }
5750 else
5751 {
5752 operands[4] = operands[3];
5753 operands[5] = operands[2];
5754 }
5755 }"
5756 )
5757
5758 (define_expand "movhi_bigend"
5759 [(set (match_dup 2)
5760 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5761 (const_int 16)))
5762 (set (match_dup 3)
5763 (ashiftrt:SI (match_dup 2) (const_int 16)))
5764 (set (match_operand:HI 0 "s_register_operand" "")
5765 (match_dup 4))]
5766 "TARGET_ARM"
5767 "
5768 operands[2] = gen_reg_rtx (SImode);
5769 operands[3] = gen_reg_rtx (SImode);
5770 operands[4] = gen_lowpart (HImode, operands[3]);
5771 "
5772 )
5773
5774 ;; Pattern to recognize insn generated default case above
5775 (define_insn "*movhi_insn_arch4"
5776 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5777 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5778 "TARGET_ARM
5779 && arm_arch4
5780 && (GET_CODE (operands[1]) != CONST_INT
5781 || const_ok_for_arm (INTVAL (operands[1]))
5782 || const_ok_for_arm (~INTVAL (operands[1])))"
5783 "@
5784 mov%?\\t%0, %1\\t%@ movhi
5785 mvn%?\\t%0, #%B1\\t%@ movhi
5786 str%(h%)\\t%1, %0\\t%@ movhi
5787 ldr%(h%)\\t%0, %1\\t%@ movhi"
5788 [(set_attr "type" "*,*,store1,load1")
5789 (set_attr "predicable" "yes")
5790 (set_attr "pool_range" "*,*,*,256")
5791 (set_attr "neg_pool_range" "*,*,*,244")]
5792 )
5793
5794 (define_insn "*movhi_bytes"
5795 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5796 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5797 "TARGET_ARM"
5798 "@
5799 mov%?\\t%0, %1\\t%@ movhi
5800 mvn%?\\t%0, #%B1\\t%@ movhi"
5801 [(set_attr "predicable" "yes")]
5802 )
5803
5804 (define_expand "thumb_movhi_clobber"
5805 [(set (match_operand:HI 0 "memory_operand" "")
5806 (match_operand:HI 1 "register_operand" ""))
5807 (clobber (match_operand:DI 2 "register_operand" ""))]
5808 "TARGET_THUMB1"
5809 "
5810 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5811 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5812 {
5813 emit_insn (gen_movhi (operands[0], operands[1]));
5814 DONE;
5815 }
5816 /* XXX Fixme, need to handle other cases here as well. */
5817 gcc_unreachable ();
5818 "
5819 )
5820
5821 ;; We use a DImode scratch because we may occasionally need an additional
5822 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5823 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5824 (define_expand "reload_outhi"
5825 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5826 (match_operand:HI 1 "s_register_operand" "r")
5827 (match_operand:DI 2 "s_register_operand" "=&l")])]
5828 "TARGET_EITHER"
5829 "if (TARGET_ARM)
5830 arm_reload_out_hi (operands);
5831 else
5832 thumb_reload_out_hi (operands);
5833 DONE;
5834 "
5835 )
5836
5837 (define_expand "reload_inhi"
5838 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5839 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5840 (match_operand:DI 2 "s_register_operand" "=&r")])]
5841 "TARGET_EITHER"
5842 "
5843 if (TARGET_ARM)
5844 arm_reload_in_hi (operands);
5845 else
5846 thumb_reload_out_hi (operands);
5847 DONE;
5848 ")
5849
5850 (define_expand "movqi"
5851 [(set (match_operand:QI 0 "general_operand" "")
5852 (match_operand:QI 1 "general_operand" ""))]
5853 "TARGET_EITHER"
5854 "
5855 /* Everything except mem = const or mem = mem can be done easily */
5856
5857 if (can_create_pseudo_p ())
5858 {
5859 if (GET_CODE (operands[1]) == CONST_INT)
5860 {
5861 rtx reg = gen_reg_rtx (SImode);
5862
5863 /* For thumb we want an unsigned immediate, then we are more likely
5864 to be able to use a movs insn. */
5865 if (TARGET_THUMB)
5866 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5867
5868 emit_insn (gen_movsi (reg, operands[1]));
5869 operands[1] = gen_lowpart (QImode, reg);
5870 }
5871
5872 if (TARGET_THUMB)
5873 {
5874 /* ??? We shouldn't really get invalid addresses here, but this can
5875 happen if we are passed a SP (never OK for HImode/QImode) or
5876 virtual register (also rejected as illegitimate for HImode/QImode)
5877 relative address. */
5878 /* ??? This should perhaps be fixed elsewhere, for instance, in
5879 fixup_stack_1, by checking for other kinds of invalid addresses,
5880 e.g. a bare reference to a virtual register. This may confuse the
5881 alpha though, which must handle this case differently. */
5882 if (GET_CODE (operands[0]) == MEM
5883 && !memory_address_p (GET_MODE (operands[0]),
5884 XEXP (operands[0], 0)))
5885 operands[0]
5886 = replace_equiv_address (operands[0],
5887 copy_to_reg (XEXP (operands[0], 0)));
5888 if (GET_CODE (operands[1]) == MEM
5889 && !memory_address_p (GET_MODE (operands[1]),
5890 XEXP (operands[1], 0)))
5891 operands[1]
5892 = replace_equiv_address (operands[1],
5893 copy_to_reg (XEXP (operands[1], 0)));
5894 }
5895
5896 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5897 {
5898 rtx reg = gen_reg_rtx (SImode);
5899
5900 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5901 operands[1] = gen_lowpart (QImode, reg);
5902 }
5903
5904 if (GET_CODE (operands[0]) == MEM)
5905 operands[1] = force_reg (QImode, operands[1]);
5906 }
5907 else if (TARGET_THUMB
5908 && GET_CODE (operands[1]) == CONST_INT
5909 && !satisfies_constraint_I (operands[1]))
5910 {
5911 /* Handle loading a large integer during reload. */
5912
5913 /* Writing a constant to memory needs a scratch, which should
5914 be handled with SECONDARY_RELOADs. */
5915 gcc_assert (GET_CODE (operands[0]) == REG);
5916
5917 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5918 emit_insn (gen_movsi (operands[0], operands[1]));
5919 DONE;
5920 }
5921 "
5922 )
5923
5924
5925 (define_insn "*arm_movqi_insn"
5926 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5927 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5928 "TARGET_32BIT
5929 && ( register_operand (operands[0], QImode)
5930 || register_operand (operands[1], QImode))"
5931 "@
5932 mov%?\\t%0, %1
5933 mvn%?\\t%0, #%B1
5934 ldr%(b%)\\t%0, %1
5935 str%(b%)\\t%1, %0"
5936 [(set_attr "type" "*,*,load1,store1")
5937 (set_attr "predicable" "yes")]
5938 )
5939
5940 (define_insn "*thumb1_movqi_insn"
5941 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5942 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5943 "TARGET_THUMB1
5944 && ( register_operand (operands[0], QImode)
5945 || register_operand (operands[1], QImode))"
5946 "@
5947 add\\t%0, %1, #0
5948 ldrb\\t%0, %1
5949 strb\\t%1, %0
5950 mov\\t%0, %1
5951 mov\\t%0, %1
5952 mov\\t%0, %1"
5953 [(set_attr "length" "2")
5954 (set_attr "type" "*,load1,store1,*,*,*")
5955 (set_attr "pool_range" "*,32,*,*,*,*")
5956 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
5957
5958 ;; HFmode moves
5959 (define_expand "movhf"
5960 [(set (match_operand:HF 0 "general_operand" "")
5961 (match_operand:HF 1 "general_operand" ""))]
5962 "TARGET_EITHER"
5963 "
5964 if (TARGET_32BIT)
5965 {
5966 if (GET_CODE (operands[0]) == MEM)
5967 operands[1] = force_reg (HFmode, operands[1]);
5968 }
5969 else /* TARGET_THUMB1 */
5970 {
5971 if (can_create_pseudo_p ())
5972 {
5973 if (GET_CODE (operands[0]) != REG)
5974 operands[1] = force_reg (HFmode, operands[1]);
5975 }
5976 }
5977 "
5978 )
5979
5980 (define_insn "*arm32_movhf"
5981 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5982 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5983 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5984 && ( s_register_operand (operands[0], HFmode)
5985 || s_register_operand (operands[1], HFmode))"
5986 "*
5987 switch (which_alternative)
5988 {
5989 case 0: /* ARM register from memory */
5990 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5991 case 1: /* memory from ARM register */
5992 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5993 case 2: /* ARM register from ARM register */
5994 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5995 case 3: /* ARM register from constant */
5996 {
5997 REAL_VALUE_TYPE r;
5998 long bits;
5999 rtx ops[4];
6000
6001 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6002 bits = real_to_target (NULL, &r, HFmode);
6003 ops[0] = operands[0];
6004 ops[1] = GEN_INT (bits);
6005 ops[2] = GEN_INT (bits & 0xff00);
6006 ops[3] = GEN_INT (bits & 0x00ff);
6007
6008 if (arm_arch_thumb2)
6009 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6010 else
6011 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6012 return \"\";
6013 }
6014 default:
6015 gcc_unreachable ();
6016 }
6017 "
6018 [(set_attr "conds" "unconditional")
6019 (set_attr "type" "load1,store1,*,*")
6020 (set_attr "length" "4,4,4,8")
6021 (set_attr "predicable" "yes")
6022 ]
6023 )
6024
6025 (define_insn "*thumb1_movhf"
6026 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6027 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6028 "TARGET_THUMB1
6029 && ( s_register_operand (operands[0], HFmode)
6030 || s_register_operand (operands[1], HFmode))"
6031 "*
6032 switch (which_alternative)
6033 {
6034 case 1:
6035 {
6036 rtx addr;
6037 gcc_assert (GET_CODE(operands[1]) == MEM);
6038 addr = XEXP (operands[1], 0);
6039 if (GET_CODE (addr) == LABEL_REF
6040 || (GET_CODE (addr) == CONST
6041 && GET_CODE (XEXP (addr, 0)) == PLUS
6042 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6043 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6044 {
6045 /* Constant pool entry. */
6046 return \"ldr\\t%0, %1\";
6047 }
6048 return \"ldrh\\t%0, %1\";
6049 }
6050 case 2: return \"strh\\t%1, %0\";
6051 default: return \"mov\\t%0, %1\";
6052 }
6053 "
6054 [(set_attr "length" "2")
6055 (set_attr "type" "*,load1,store1,*,*")
6056 (set_attr "pool_range" "*,1020,*,*,*")
6057 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6058
6059 (define_expand "movsf"
6060 [(set (match_operand:SF 0 "general_operand" "")
6061 (match_operand:SF 1 "general_operand" ""))]
6062 "TARGET_EITHER"
6063 "
6064 if (TARGET_32BIT)
6065 {
6066 if (GET_CODE (operands[0]) == MEM)
6067 operands[1] = force_reg (SFmode, operands[1]);
6068 }
6069 else /* TARGET_THUMB1 */
6070 {
6071 if (can_create_pseudo_p ())
6072 {
6073 if (GET_CODE (operands[0]) != REG)
6074 operands[1] = force_reg (SFmode, operands[1]);
6075 }
6076 }
6077 "
6078 )
6079
6080 ;; Transform a floating-point move of a constant into a core register into
6081 ;; an SImode operation.
6082 (define_split
6083 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6084 (match_operand:SF 1 "immediate_operand" ""))]
6085 "TARGET_EITHER
6086 && reload_completed
6087 && GET_CODE (operands[1]) == CONST_DOUBLE"
6088 [(set (match_dup 2) (match_dup 3))]
6089 "
6090 operands[2] = gen_lowpart (SImode, operands[0]);
6091 operands[3] = gen_lowpart (SImode, operands[1]);
6092 if (operands[2] == 0 || operands[3] == 0)
6093 FAIL;
6094 "
6095 )
6096
6097 (define_insn "*arm_movsf_soft_insn"
6098 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6099 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6100 "TARGET_32BIT
6101 && TARGET_SOFT_FLOAT
6102 && (GET_CODE (operands[0]) != MEM
6103 || register_operand (operands[1], SFmode))"
6104 "@
6105 mov%?\\t%0, %1
6106 ldr%?\\t%0, %1\\t%@ float
6107 str%?\\t%1, %0\\t%@ float"
6108 [(set_attr "predicable" "yes")
6109 (set_attr "type" "*,load1,store1")
6110 (set_attr "pool_range" "*,4096,*")
6111 (set_attr "arm_neg_pool_range" "*,4084,*")
6112 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6113 )
6114
6115 ;;; ??? This should have alternatives for constants.
6116 (define_insn "*thumb1_movsf_insn"
6117 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6118 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6119 "TARGET_THUMB1
6120 && ( register_operand (operands[0], SFmode)
6121 || register_operand (operands[1], SFmode))"
6122 "@
6123 add\\t%0, %1, #0
6124 ldmia\\t%1, {%0}
6125 stmia\\t%0, {%1}
6126 ldr\\t%0, %1
6127 str\\t%1, %0
6128 mov\\t%0, %1
6129 mov\\t%0, %1"
6130 [(set_attr "length" "2")
6131 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6132 (set_attr "pool_range" "*,*,*,1020,*,*,*")
6133 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6134 )
6135
6136 (define_expand "movdf"
6137 [(set (match_operand:DF 0 "general_operand" "")
6138 (match_operand:DF 1 "general_operand" ""))]
6139 "TARGET_EITHER"
6140 "
6141 if (TARGET_32BIT)
6142 {
6143 if (GET_CODE (operands[0]) == MEM)
6144 operands[1] = force_reg (DFmode, operands[1]);
6145 }
6146 else /* TARGET_THUMB */
6147 {
6148 if (can_create_pseudo_p ())
6149 {
6150 if (GET_CODE (operands[0]) != REG)
6151 operands[1] = force_reg (DFmode, operands[1]);
6152 }
6153 }
6154 "
6155 )
6156
6157 ;; Reloading a df mode value stored in integer regs to memory can require a
6158 ;; scratch reg.
6159 (define_expand "reload_outdf"
6160 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6161 (match_operand:DF 1 "s_register_operand" "r")
6162 (match_operand:SI 2 "s_register_operand" "=&r")]
6163 "TARGET_32BIT"
6164 "
6165 {
6166 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6167
6168 if (code == REG)
6169 operands[2] = XEXP (operands[0], 0);
6170 else if (code == POST_INC || code == PRE_DEC)
6171 {
6172 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6173 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6174 emit_insn (gen_movdi (operands[0], operands[1]));
6175 DONE;
6176 }
6177 else if (code == PRE_INC)
6178 {
6179 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6180
6181 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6182 operands[2] = reg;
6183 }
6184 else if (code == POST_DEC)
6185 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6186 else
6187 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6188 XEXP (XEXP (operands[0], 0), 1)));
6189
6190 emit_insn (gen_rtx_SET (VOIDmode,
6191 replace_equiv_address (operands[0], operands[2]),
6192 operands[1]));
6193
6194 if (code == POST_DEC)
6195 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6196
6197 DONE;
6198 }"
6199 )
6200
6201 (define_insn "*movdf_soft_insn"
6202 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6203 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6204 "TARGET_32BIT && TARGET_SOFT_FLOAT
6205 && ( register_operand (operands[0], DFmode)
6206 || register_operand (operands[1], DFmode))"
6207 "*
6208 switch (which_alternative)
6209 {
6210 case 0:
6211 case 1:
6212 case 2:
6213 return \"#\";
6214 default:
6215 return output_move_double (operands);
6216 }
6217 "
6218 [(set_attr "length" "8,12,16,8,8")
6219 (set_attr "type" "*,*,*,load2,store2")
6220 (set_attr "pool_range" "*,*,*,1020,*")
6221 (set_attr "arm_neg_pool_range" "*,*,*,1008,*")
6222 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6223 )
6224
6225 ;;; ??? This should have alternatives for constants.
6226 ;;; ??? This was originally identical to the movdi_insn pattern.
6227 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6228 ;;; thumb_reorg with a memory reference.
6229 (define_insn "*thumb_movdf_insn"
6230 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6231 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6232 "TARGET_THUMB1
6233 && ( register_operand (operands[0], DFmode)
6234 || register_operand (operands[1], DFmode))"
6235 "*
6236 switch (which_alternative)
6237 {
6238 default:
6239 case 0:
6240 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6241 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6242 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6243 case 1:
6244 return \"ldmia\\t%1, {%0, %H0}\";
6245 case 2:
6246 return \"stmia\\t%0, {%1, %H1}\";
6247 case 3:
6248 return thumb_load_double_from_address (operands);
6249 case 4:
6250 operands[2] = gen_rtx_MEM (SImode,
6251 plus_constant (XEXP (operands[0], 0), 4));
6252 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6253 return \"\";
6254 case 5:
6255 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6256 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6257 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6258 }
6259 "
6260 [(set_attr "length" "4,2,2,6,4,4")
6261 (set_attr "type" "*,load2,store2,load2,store2,*")
6262 (set_attr "pool_range" "*,*,*,1020,*,*")]
6263 )
6264
6265 (define_expand "movxf"
6266 [(set (match_operand:XF 0 "general_operand" "")
6267 (match_operand:XF 1 "general_operand" ""))]
6268 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6269 "
6270 if (GET_CODE (operands[0]) == MEM)
6271 operands[1] = force_reg (XFmode, operands[1]);
6272 "
6273 )
6274
6275 \f
6276
6277 ;; load- and store-multiple insns
6278 ;; The arm can load/store any set of registers, provided that they are in
6279 ;; ascending order, but these expanders assume a contiguous set.
6280
6281 (define_expand "load_multiple"
6282 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6283 (match_operand:SI 1 "" ""))
6284 (use (match_operand:SI 2 "" ""))])]
6285 "TARGET_32BIT"
6286 {
6287 HOST_WIDE_INT offset = 0;
6288
6289 /* Support only fixed point registers. */
6290 if (GET_CODE (operands[2]) != CONST_INT
6291 || INTVAL (operands[2]) > 14
6292 || INTVAL (operands[2]) < 2
6293 || GET_CODE (operands[1]) != MEM
6294 || GET_CODE (operands[0]) != REG
6295 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6296 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6297 FAIL;
6298
6299 operands[3]
6300 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6301 INTVAL (operands[2]),
6302 force_reg (SImode, XEXP (operands[1], 0)),
6303 FALSE, operands[1], &offset);
6304 })
6305
6306 (define_expand "store_multiple"
6307 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6308 (match_operand:SI 1 "" ""))
6309 (use (match_operand:SI 2 "" ""))])]
6310 "TARGET_32BIT"
6311 {
6312 HOST_WIDE_INT offset = 0;
6313
6314 /* Support only fixed point registers. */
6315 if (GET_CODE (operands[2]) != CONST_INT
6316 || INTVAL (operands[2]) > 14
6317 || INTVAL (operands[2]) < 2
6318 || GET_CODE (operands[1]) != REG
6319 || GET_CODE (operands[0]) != MEM
6320 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6321 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6322 FAIL;
6323
6324 operands[3]
6325 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6326 INTVAL (operands[2]),
6327 force_reg (SImode, XEXP (operands[0], 0)),
6328 FALSE, operands[0], &offset);
6329 })
6330
6331
6332 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6333 ;; We could let this apply for blocks of less than this, but it clobbers so
6334 ;; many registers that there is then probably a better way.
6335
6336 (define_expand "movmemqi"
6337 [(match_operand:BLK 0 "general_operand" "")
6338 (match_operand:BLK 1 "general_operand" "")
6339 (match_operand:SI 2 "const_int_operand" "")
6340 (match_operand:SI 3 "const_int_operand" "")]
6341 "TARGET_EITHER"
6342 "
6343 if (TARGET_32BIT)
6344 {
6345 if (arm_gen_movmemqi (operands))
6346 DONE;
6347 FAIL;
6348 }
6349 else /* TARGET_THUMB1 */
6350 {
6351 if ( INTVAL (operands[3]) != 4
6352 || INTVAL (operands[2]) > 48)
6353 FAIL;
6354
6355 thumb_expand_movmemqi (operands);
6356 DONE;
6357 }
6358 "
6359 )
6360
6361 ;; Thumb block-move insns
6362
6363 (define_insn "movmem12b"
6364 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6365 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6366 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6367 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6368 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6369 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6370 (set (match_operand:SI 0 "register_operand" "=l")
6371 (plus:SI (match_dup 2) (const_int 12)))
6372 (set (match_operand:SI 1 "register_operand" "=l")
6373 (plus:SI (match_dup 3) (const_int 12)))
6374 (clobber (match_scratch:SI 4 "=&l"))
6375 (clobber (match_scratch:SI 5 "=&l"))
6376 (clobber (match_scratch:SI 6 "=&l"))]
6377 "TARGET_THUMB1"
6378 "* return thumb_output_move_mem_multiple (3, operands);"
6379 [(set_attr "length" "4")
6380 ; This isn't entirely accurate... It loads as well, but in terms of
6381 ; scheduling the following insn it is better to consider it as a store
6382 (set_attr "type" "store3")]
6383 )
6384
6385 (define_insn "movmem8b"
6386 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6387 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6388 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6389 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6390 (set (match_operand:SI 0 "register_operand" "=l")
6391 (plus:SI (match_dup 2) (const_int 8)))
6392 (set (match_operand:SI 1 "register_operand" "=l")
6393 (plus:SI (match_dup 3) (const_int 8)))
6394 (clobber (match_scratch:SI 4 "=&l"))
6395 (clobber (match_scratch:SI 5 "=&l"))]
6396 "TARGET_THUMB1"
6397 "* return thumb_output_move_mem_multiple (2, operands);"
6398 [(set_attr "length" "4")
6399 ; This isn't entirely accurate... It loads as well, but in terms of
6400 ; scheduling the following insn it is better to consider it as a store
6401 (set_attr "type" "store2")]
6402 )
6403
6404 \f
6405
6406 ;; Compare & branch insns
6407 ;; The range calculations are based as follows:
6408 ;; For forward branches, the address calculation returns the address of
6409 ;; the next instruction. This is 2 beyond the branch instruction.
6410 ;; For backward branches, the address calculation returns the address of
6411 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6412 ;; instruction for the shortest sequence, and 4 before the branch instruction
6413 ;; if we have to jump around an unconditional branch.
6414 ;; To the basic branch range the PC offset must be added (this is +4).
6415 ;; So for forward branches we have
6416 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6417 ;; And for backward branches we have
6418 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6419 ;;
6420 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6421 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6422
6423 (define_expand "cbranchsi4"
6424 [(set (pc) (if_then_else
6425 (match_operator 0 "arm_comparison_operator"
6426 [(match_operand:SI 1 "s_register_operand" "")
6427 (match_operand:SI 2 "nonmemory_operand" "")])
6428 (label_ref (match_operand 3 "" ""))
6429 (pc)))]
6430 "TARGET_THUMB1 || TARGET_32BIT"
6431 "
6432 if (!TARGET_THUMB1)
6433 {
6434 if (!arm_add_operand (operands[2], SImode))
6435 operands[2] = force_reg (SImode, operands[2]);
6436 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6437 operands[3]));
6438 DONE;
6439 }
6440 if (thumb1_cmpneg_operand (operands[2], SImode))
6441 {
6442 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6443 operands[3], operands[0]));
6444 DONE;
6445 }
6446 if (!thumb1_cmp_operand (operands[2], SImode))
6447 operands[2] = force_reg (SImode, operands[2]);
6448 ")
6449
6450 ;; A pattern to recognize a special situation and optimize for it.
6451 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6452 ;; due to the available addressing modes. Hence, convert a signed comparison
6453 ;; with zero into an unsigned comparison with 127 if possible.
6454 (define_expand "cbranchqi4"
6455 [(set (pc) (if_then_else
6456 (match_operator 0 "lt_ge_comparison_operator"
6457 [(match_operand:QI 1 "memory_operand" "")
6458 (match_operand:QI 2 "const0_operand" "")])
6459 (label_ref (match_operand 3 "" ""))
6460 (pc)))]
6461 "TARGET_THUMB1"
6462 {
6463 rtx xops[4];
6464 xops[1] = gen_reg_rtx (SImode);
6465 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6466 xops[2] = GEN_INT (127);
6467 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6468 VOIDmode, xops[1], xops[2]);
6469 xops[3] = operands[3];
6470 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6471 DONE;
6472 })
6473
6474 (define_expand "cbranchsf4"
6475 [(set (pc) (if_then_else
6476 (match_operator 0 "arm_comparison_operator"
6477 [(match_operand:SF 1 "s_register_operand" "")
6478 (match_operand:SF 2 "arm_float_compare_operand" "")])
6479 (label_ref (match_operand 3 "" ""))
6480 (pc)))]
6481 "TARGET_32BIT && TARGET_HARD_FLOAT"
6482 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6483 operands[3])); DONE;"
6484 )
6485
6486 (define_expand "cbranchdf4"
6487 [(set (pc) (if_then_else
6488 (match_operator 0 "arm_comparison_operator"
6489 [(match_operand:DF 1 "s_register_operand" "")
6490 (match_operand:DF 2 "arm_float_compare_operand" "")])
6491 (label_ref (match_operand 3 "" ""))
6492 (pc)))]
6493 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6494 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6495 operands[3])); DONE;"
6496 )
6497
6498 (define_expand "cbranchdi4"
6499 [(set (pc) (if_then_else
6500 (match_operator 0 "arm_comparison_operator"
6501 [(match_operand:DI 1 "cmpdi_operand" "")
6502 (match_operand:DI 2 "cmpdi_operand" "")])
6503 (label_ref (match_operand 3 "" ""))
6504 (pc)))]
6505 "TARGET_32BIT"
6506 "{
6507 rtx swap = NULL_RTX;
6508 enum rtx_code code = GET_CODE (operands[0]);
6509
6510 /* We should not have two constants. */
6511 gcc_assert (GET_MODE (operands[1]) == DImode
6512 || GET_MODE (operands[2]) == DImode);
6513
6514 /* Flip unimplemented DImode comparisons to a form that
6515 arm_gen_compare_reg can handle. */
6516 switch (code)
6517 {
6518 case GT:
6519 swap = gen_rtx_LT (VOIDmode, operands[2], operands[1]); break;
6520 case LE:
6521 swap = gen_rtx_GE (VOIDmode, operands[2], operands[1]); break;
6522 case GTU:
6523 swap = gen_rtx_LTU (VOIDmode, operands[2], operands[1]); break;
6524 case LEU:
6525 swap = gen_rtx_GEU (VOIDmode, operands[2], operands[1]); break;
6526 default:
6527 break;
6528 }
6529 if (swap)
6530 emit_jump_insn (gen_cbranch_cc (swap, operands[2], operands[1],
6531 operands[3]));
6532 else
6533 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6534 operands[3]));
6535 DONE;
6536 }"
6537 )
6538
6539 (define_insn "cbranchsi4_insn"
6540 [(set (pc) (if_then_else
6541 (match_operator 0 "arm_comparison_operator"
6542 [(match_operand:SI 1 "s_register_operand" "l,l*h")
6543 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6544 (label_ref (match_operand 3 "" ""))
6545 (pc)))]
6546 "TARGET_THUMB1"
6547 {
6548 rtx t = cfun->machine->thumb1_cc_insn;
6549 if (t != NULL_RTX)
6550 {
6551 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
6552 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
6553 t = NULL_RTX;
6554 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
6555 {
6556 if (!noov_comparison_operator (operands[0], VOIDmode))
6557 t = NULL_RTX;
6558 }
6559 else if (cfun->machine->thumb1_cc_mode != CCmode)
6560 t = NULL_RTX;
6561 }
6562 if (t == NULL_RTX)
6563 {
6564 output_asm_insn ("cmp\t%1, %2", operands);
6565 cfun->machine->thumb1_cc_insn = insn;
6566 cfun->machine->thumb1_cc_op0 = operands[1];
6567 cfun->machine->thumb1_cc_op1 = operands[2];
6568 cfun->machine->thumb1_cc_mode = CCmode;
6569 }
6570 else
6571 /* Ensure we emit the right type of condition code on the jump. */
6572 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
6573 CC_REGNUM);
6574
6575 switch (get_attr_length (insn))
6576 {
6577 case 4: return \"b%d0\\t%l3\";
6578 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6579 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6580 }
6581 }
6582 [(set (attr "far_jump")
6583 (if_then_else
6584 (eq_attr "length" "8")
6585 (const_string "yes")
6586 (const_string "no")))
6587 (set (attr "length")
6588 (if_then_else
6589 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6590 (le (minus (match_dup 3) (pc)) (const_int 256)))
6591 (const_int 4)
6592 (if_then_else
6593 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6594 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6595 (const_int 6)
6596 (const_int 8))))]
6597 )
6598
6599 (define_insn "cbranchsi4_scratch"
6600 [(set (pc) (if_then_else
6601 (match_operator 4 "arm_comparison_operator"
6602 [(match_operand:SI 1 "s_register_operand" "l,0")
6603 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6604 (label_ref (match_operand 3 "" ""))
6605 (pc)))
6606 (clobber (match_scratch:SI 0 "=l,l"))]
6607 "TARGET_THUMB1"
6608 "*
6609 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6610
6611 switch (get_attr_length (insn))
6612 {
6613 case 4: return \"b%d4\\t%l3\";
6614 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6615 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6616 }
6617 "
6618 [(set (attr "far_jump")
6619 (if_then_else
6620 (eq_attr "length" "8")
6621 (const_string "yes")
6622 (const_string "no")))
6623 (set (attr "length")
6624 (if_then_else
6625 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6626 (le (minus (match_dup 3) (pc)) (const_int 256)))
6627 (const_int 4)
6628 (if_then_else
6629 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6630 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6631 (const_int 6)
6632 (const_int 8))))]
6633 )
6634
6635 ;; Two peepholes to generate subtract of 0 instead of a move if the
6636 ;; condition codes will be useful.
6637 (define_peephole2
6638 [(set (match_operand:SI 0 "low_register_operand" "")
6639 (match_operand:SI 1 "low_register_operand" ""))
6640 (set (pc)
6641 (if_then_else (match_operator 2 "arm_comparison_operator"
6642 [(match_dup 1) (const_int 0)])
6643 (label_ref (match_operand 3 "" ""))
6644 (pc)))]
6645 "TARGET_THUMB1"
6646 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6647 (set (pc)
6648 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6649 (label_ref (match_dup 3))
6650 (pc)))]
6651 "")
6652
6653 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6654 ;; merge cases like this because the op1 is a hard register in
6655 ;; CLASS_LIKELY_SPILLED_P.
6656 (define_peephole2
6657 [(set (match_operand:SI 0 "low_register_operand" "")
6658 (match_operand:SI 1 "low_register_operand" ""))
6659 (set (pc)
6660 (if_then_else (match_operator 2 "arm_comparison_operator"
6661 [(match_dup 0) (const_int 0)])
6662 (label_ref (match_operand 3 "" ""))
6663 (pc)))]
6664 "TARGET_THUMB1"
6665 [(set (match_dup 0) (minus:SI (match_dup 1) (const_int 0)))
6666 (set (pc)
6667 (if_then_else (match_op_dup 2 [(match_dup 0) (const_int 0)])
6668 (label_ref (match_dup 3))
6669 (pc)))]
6670 "")
6671
6672 (define_insn "*negated_cbranchsi4"
6673 [(set (pc)
6674 (if_then_else
6675 (match_operator 0 "equality_operator"
6676 [(match_operand:SI 1 "s_register_operand" "l")
6677 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6678 (label_ref (match_operand 3 "" ""))
6679 (pc)))]
6680 "TARGET_THUMB1"
6681 "*
6682 output_asm_insn (\"cmn\\t%1, %2\", operands);
6683 switch (get_attr_length (insn))
6684 {
6685 case 4: return \"b%d0\\t%l3\";
6686 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6687 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6688 }
6689 "
6690 [(set (attr "far_jump")
6691 (if_then_else
6692 (eq_attr "length" "8")
6693 (const_string "yes")
6694 (const_string "no")))
6695 (set (attr "length")
6696 (if_then_else
6697 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6698 (le (minus (match_dup 3) (pc)) (const_int 256)))
6699 (const_int 4)
6700 (if_then_else
6701 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6702 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6703 (const_int 6)
6704 (const_int 8))))]
6705 )
6706
6707 (define_insn "*tbit_cbranch"
6708 [(set (pc)
6709 (if_then_else
6710 (match_operator 0 "equality_operator"
6711 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6712 (const_int 1)
6713 (match_operand:SI 2 "const_int_operand" "i"))
6714 (const_int 0)])
6715 (label_ref (match_operand 3 "" ""))
6716 (pc)))
6717 (clobber (match_scratch:SI 4 "=l"))]
6718 "TARGET_THUMB1"
6719 "*
6720 {
6721 rtx op[3];
6722 op[0] = operands[4];
6723 op[1] = operands[1];
6724 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6725
6726 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6727 switch (get_attr_length (insn))
6728 {
6729 case 4: return \"b%d0\\t%l3\";
6730 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6731 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6732 }
6733 }"
6734 [(set (attr "far_jump")
6735 (if_then_else
6736 (eq_attr "length" "8")
6737 (const_string "yes")
6738 (const_string "no")))
6739 (set (attr "length")
6740 (if_then_else
6741 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6742 (le (minus (match_dup 3) (pc)) (const_int 256)))
6743 (const_int 4)
6744 (if_then_else
6745 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6746 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6747 (const_int 6)
6748 (const_int 8))))]
6749 )
6750
6751 (define_insn "*tlobits_cbranch"
6752 [(set (pc)
6753 (if_then_else
6754 (match_operator 0 "equality_operator"
6755 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6756 (match_operand:SI 2 "const_int_operand" "i")
6757 (const_int 0))
6758 (const_int 0)])
6759 (label_ref (match_operand 3 "" ""))
6760 (pc)))
6761 (clobber (match_scratch:SI 4 "=l"))]
6762 "TARGET_THUMB1"
6763 "*
6764 {
6765 rtx op[3];
6766 op[0] = operands[4];
6767 op[1] = operands[1];
6768 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6769
6770 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6771 switch (get_attr_length (insn))
6772 {
6773 case 4: return \"b%d0\\t%l3\";
6774 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6775 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6776 }
6777 }"
6778 [(set (attr "far_jump")
6779 (if_then_else
6780 (eq_attr "length" "8")
6781 (const_string "yes")
6782 (const_string "no")))
6783 (set (attr "length")
6784 (if_then_else
6785 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6786 (le (minus (match_dup 3) (pc)) (const_int 256)))
6787 (const_int 4)
6788 (if_then_else
6789 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6790 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6791 (const_int 6)
6792 (const_int 8))))]
6793 )
6794
6795 (define_insn "*tstsi3_cbranch"
6796 [(set (pc)
6797 (if_then_else
6798 (match_operator 3 "equality_operator"
6799 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6800 (match_operand:SI 1 "s_register_operand" "l"))
6801 (const_int 0)])
6802 (label_ref (match_operand 2 "" ""))
6803 (pc)))]
6804 "TARGET_THUMB1"
6805 "*
6806 {
6807 output_asm_insn (\"tst\\t%0, %1\", operands);
6808 switch (get_attr_length (insn))
6809 {
6810 case 4: return \"b%d3\\t%l2\";
6811 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6812 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6813 }
6814 }"
6815 [(set (attr "far_jump")
6816 (if_then_else
6817 (eq_attr "length" "8")
6818 (const_string "yes")
6819 (const_string "no")))
6820 (set (attr "length")
6821 (if_then_else
6822 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6823 (le (minus (match_dup 2) (pc)) (const_int 256)))
6824 (const_int 4)
6825 (if_then_else
6826 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6827 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6828 (const_int 6)
6829 (const_int 8))))]
6830 )
6831
6832 (define_insn "*cbranchne_decr1"
6833 [(set (pc)
6834 (if_then_else (match_operator 3 "equality_operator"
6835 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6836 (const_int 0)])
6837 (label_ref (match_operand 4 "" ""))
6838 (pc)))
6839 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6840 (plus:SI (match_dup 2) (const_int -1)))
6841 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6842 "TARGET_THUMB1"
6843 "*
6844 {
6845 rtx cond[2];
6846 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6847 ? GEU : LTU),
6848 VOIDmode, operands[2], const1_rtx);
6849 cond[1] = operands[4];
6850
6851 if (which_alternative == 0)
6852 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6853 else if (which_alternative == 1)
6854 {
6855 /* We must provide an alternative for a hi reg because reload
6856 cannot handle output reloads on a jump instruction, but we
6857 can't subtract into that. Fortunately a mov from lo to hi
6858 does not clobber the condition codes. */
6859 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6860 output_asm_insn (\"mov\\t%0, %1\", operands);
6861 }
6862 else
6863 {
6864 /* Similarly, but the target is memory. */
6865 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6866 output_asm_insn (\"str\\t%1, %0\", operands);
6867 }
6868
6869 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6870 {
6871 case 4:
6872 output_asm_insn (\"b%d0\\t%l1\", cond);
6873 return \"\";
6874 case 6:
6875 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6876 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
6877 default:
6878 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
6879 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6880 }
6881 }
6882 "
6883 [(set (attr "far_jump")
6884 (if_then_else
6885 (ior (and (eq (symbol_ref ("which_alternative"))
6886 (const_int 0))
6887 (eq_attr "length" "8"))
6888 (eq_attr "length" "10"))
6889 (const_string "yes")
6890 (const_string "no")))
6891 (set_attr_alternative "length"
6892 [
6893 ;; Alternative 0
6894 (if_then_else
6895 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6896 (le (minus (match_dup 4) (pc)) (const_int 256)))
6897 (const_int 4)
6898 (if_then_else
6899 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6900 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6901 (const_int 6)
6902 (const_int 8)))
6903 ;; Alternative 1
6904 (if_then_else
6905 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6906 (le (minus (match_dup 4) (pc)) (const_int 256)))
6907 (const_int 6)
6908 (if_then_else
6909 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6910 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6911 (const_int 8)
6912 (const_int 10)))
6913 ;; Alternative 2
6914 (if_then_else
6915 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6916 (le (minus (match_dup 4) (pc)) (const_int 256)))
6917 (const_int 6)
6918 (if_then_else
6919 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6920 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6921 (const_int 8)
6922 (const_int 10)))
6923 ;; Alternative 3
6924 (if_then_else
6925 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6926 (le (minus (match_dup 4) (pc)) (const_int 256)))
6927 (const_int 6)
6928 (if_then_else
6929 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6930 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6931 (const_int 8)
6932 (const_int 10)))])]
6933 )
6934
6935 (define_insn "*addsi3_cbranch"
6936 [(set (pc)
6937 (if_then_else
6938 (match_operator 4 "arm_comparison_operator"
6939 [(plus:SI
6940 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
6941 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
6942 (const_int 0)])
6943 (label_ref (match_operand 5 "" ""))
6944 (pc)))
6945 (set
6946 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
6947 (plus:SI (match_dup 2) (match_dup 3)))
6948 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
6949 "TARGET_THUMB1
6950 && (GET_CODE (operands[4]) == EQ
6951 || GET_CODE (operands[4]) == NE
6952 || GET_CODE (operands[4]) == GE
6953 || GET_CODE (operands[4]) == LT)"
6954 "*
6955 {
6956 rtx cond[3];
6957
6958 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
6959 cond[1] = operands[2];
6960 cond[2] = operands[3];
6961
6962 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
6963 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
6964 else
6965 output_asm_insn (\"add\\t%0, %1, %2\", cond);
6966
6967 if (which_alternative >= 2
6968 && which_alternative < 4)
6969 output_asm_insn (\"mov\\t%0, %1\", operands);
6970 else if (which_alternative >= 4)
6971 output_asm_insn (\"str\\t%1, %0\", operands);
6972
6973 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
6974 {
6975 case 4:
6976 return \"b%d4\\t%l5\";
6977 case 6:
6978 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
6979 default:
6980 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
6981 }
6982 }
6983 "
6984 [(set (attr "far_jump")
6985 (if_then_else
6986 (ior (and (lt (symbol_ref ("which_alternative"))
6987 (const_int 2))
6988 (eq_attr "length" "8"))
6989 (eq_attr "length" "10"))
6990 (const_string "yes")
6991 (const_string "no")))
6992 (set (attr "length")
6993 (if_then_else
6994 (lt (symbol_ref ("which_alternative"))
6995 (const_int 2))
6996 (if_then_else
6997 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
6998 (le (minus (match_dup 5) (pc)) (const_int 256)))
6999 (const_int 4)
7000 (if_then_else
7001 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7002 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7003 (const_int 6)
7004 (const_int 8)))
7005 (if_then_else
7006 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7007 (le (minus (match_dup 5) (pc)) (const_int 256)))
7008 (const_int 6)
7009 (if_then_else
7010 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7011 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7012 (const_int 8)
7013 (const_int 10)))))]
7014 )
7015
7016 (define_insn "*addsi3_cbranch_scratch"
7017 [(set (pc)
7018 (if_then_else
7019 (match_operator 3 "arm_comparison_operator"
7020 [(plus:SI
7021 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7022 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7023 (const_int 0)])
7024 (label_ref (match_operand 4 "" ""))
7025 (pc)))
7026 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7027 "TARGET_THUMB1
7028 && (GET_CODE (operands[3]) == EQ
7029 || GET_CODE (operands[3]) == NE
7030 || GET_CODE (operands[3]) == GE
7031 || GET_CODE (operands[3]) == LT)"
7032 "*
7033 {
7034 switch (which_alternative)
7035 {
7036 case 0:
7037 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7038 break;
7039 case 1:
7040 output_asm_insn (\"cmn\t%1, %2\", operands);
7041 break;
7042 case 2:
7043 if (INTVAL (operands[2]) < 0)
7044 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7045 else
7046 output_asm_insn (\"add\t%0, %1, %2\", operands);
7047 break;
7048 case 3:
7049 if (INTVAL (operands[2]) < 0)
7050 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7051 else
7052 output_asm_insn (\"add\t%0, %0, %2\", operands);
7053 break;
7054 }
7055
7056 switch (get_attr_length (insn))
7057 {
7058 case 4:
7059 return \"b%d3\\t%l4\";
7060 case 6:
7061 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7062 default:
7063 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7064 }
7065 }
7066 "
7067 [(set (attr "far_jump")
7068 (if_then_else
7069 (eq_attr "length" "8")
7070 (const_string "yes")
7071 (const_string "no")))
7072 (set (attr "length")
7073 (if_then_else
7074 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7075 (le (minus (match_dup 4) (pc)) (const_int 256)))
7076 (const_int 4)
7077 (if_then_else
7078 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7079 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7080 (const_int 6)
7081 (const_int 8))))]
7082 )
7083
7084
7085 ;; Comparison and test insns
7086
7087 (define_insn "*arm_cmpsi_insn"
7088 [(set (reg:CC CC_REGNUM)
7089 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7090 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7091 "TARGET_32BIT"
7092 "@
7093 cmp%?\\t%0, %1
7094 cmn%?\\t%0, #%n1"
7095 [(set_attr "conds" "set")]
7096 )
7097
7098 (define_insn "*cmpsi_shiftsi"
7099 [(set (reg:CC CC_REGNUM)
7100 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7101 (match_operator:SI 3 "shift_operator"
7102 [(match_operand:SI 1 "s_register_operand" "r,r")
7103 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7104 "TARGET_32BIT"
7105 "cmp%?\\t%0, %1%S3"
7106 [(set_attr "conds" "set")
7107 (set_attr "shift" "1")
7108 (set_attr "arch" "32,a")
7109 (set_attr "type" "alu_shift,alu_shift_reg")])
7110
7111 (define_insn "*cmpsi_shiftsi_swp"
7112 [(set (reg:CC_SWP CC_REGNUM)
7113 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7114 [(match_operand:SI 1 "s_register_operand" "r,r")
7115 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7116 (match_operand:SI 0 "s_register_operand" "r,r")))]
7117 "TARGET_32BIT"
7118 "cmp%?\\t%0, %1%S3"
7119 [(set_attr "conds" "set")
7120 (set_attr "shift" "1")
7121 (set_attr "arch" "32,a")
7122 (set_attr "type" "alu_shift,alu_shift_reg")])
7123
7124 (define_insn "*arm_cmpsi_negshiftsi_si"
7125 [(set (reg:CC_Z CC_REGNUM)
7126 (compare:CC_Z
7127 (neg:SI (match_operator:SI 1 "shift_operator"
7128 [(match_operand:SI 2 "s_register_operand" "r")
7129 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7130 (match_operand:SI 0 "s_register_operand" "r")))]
7131 "TARGET_ARM"
7132 "cmn%?\\t%0, %2%S1"
7133 [(set_attr "conds" "set")
7134 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7135 (const_string "alu_shift")
7136 (const_string "alu_shift_reg")))]
7137 )
7138
7139 ;; DImode comparisons. The generic code generates branches that
7140 ;; if-conversion can not reduce to a conditional compare, so we do
7141 ;; that directly.
7142
7143 (define_insn "*arm_cmpdi_insn"
7144 [(set (reg:CC_NCV CC_REGNUM)
7145 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7146 (match_operand:DI 1 "arm_di_operand" "rDi")))
7147 (clobber (match_scratch:SI 2 "=r"))]
7148 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
7149 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7150 [(set_attr "conds" "set")
7151 (set_attr "length" "8")]
7152 )
7153
7154 (define_insn "*arm_cmpdi_unsigned"
7155 [(set (reg:CC_CZ CC_REGNUM)
7156 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
7157 (match_operand:DI 1 "arm_di_operand" "rDi")))]
7158 "TARGET_ARM"
7159 "cmp%?\\t%R0, %R1\;cmpeq\\t%Q0, %Q1"
7160 [(set_attr "conds" "set")
7161 (set_attr "length" "8")]
7162 )
7163
7164 (define_insn "*arm_cmpdi_zero"
7165 [(set (reg:CC_Z CC_REGNUM)
7166 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7167 (const_int 0)))
7168 (clobber (match_scratch:SI 1 "=r"))]
7169 "TARGET_32BIT"
7170 "orr%.\\t%1, %Q0, %R0"
7171 [(set_attr "conds" "set")]
7172 )
7173
7174 (define_insn "*thumb_cmpdi_zero"
7175 [(set (reg:CC_Z CC_REGNUM)
7176 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7177 (const_int 0)))
7178 (clobber (match_scratch:SI 1 "=l"))]
7179 "TARGET_THUMB1"
7180 "orr\\t%1, %Q0, %R0"
7181 [(set_attr "conds" "set")
7182 (set_attr "length" "2")]
7183 )
7184
7185 ;; Cirrus SF compare instruction
7186 (define_insn "*cirrus_cmpsf"
7187 [(set (reg:CCFP CC_REGNUM)
7188 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7189 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7190 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7191 "cfcmps%?\\tr15, %V0, %V1"
7192 [(set_attr "type" "mav_farith")
7193 (set_attr "cirrus" "compare")]
7194 )
7195
7196 ;; Cirrus DF compare instruction
7197 (define_insn "*cirrus_cmpdf"
7198 [(set (reg:CCFP CC_REGNUM)
7199 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7200 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7201 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7202 "cfcmpd%?\\tr15, %V0, %V1"
7203 [(set_attr "type" "mav_farith")
7204 (set_attr "cirrus" "compare")]
7205 )
7206
7207 (define_insn "*cirrus_cmpdi"
7208 [(set (reg:CC CC_REGNUM)
7209 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7210 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7211 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7212 "cfcmp64%?\\tr15, %V0, %V1"
7213 [(set_attr "type" "mav_farith")
7214 (set_attr "cirrus" "compare")]
7215 )
7216
7217 ; This insn allows redundant compares to be removed by cse, nothing should
7218 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7219 ; is deleted later on. The match_dup will match the mode here, so that
7220 ; mode changes of the condition codes aren't lost by this even though we don't
7221 ; specify what they are.
7222
7223 (define_insn "*deleted_compare"
7224 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7225 "TARGET_32BIT"
7226 "\\t%@ deleted compare"
7227 [(set_attr "conds" "set")
7228 (set_attr "length" "0")]
7229 )
7230
7231 \f
7232 ;; Conditional branch insns
7233
7234 (define_expand "cbranch_cc"
7235 [(set (pc)
7236 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7237 (match_operand 2 "" "")])
7238 (label_ref (match_operand 3 "" ""))
7239 (pc)))]
7240 "TARGET_32BIT"
7241 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7242 operands[1], operands[2]);
7243 operands[2] = const0_rtx;"
7244 )
7245
7246 ;;
7247 ;; Patterns to match conditional branch insns.
7248 ;;
7249
7250 (define_insn "*arm_cond_branch"
7251 [(set (pc)
7252 (if_then_else (match_operator 1 "arm_comparison_operator"
7253 [(match_operand 2 "cc_register" "") (const_int 0)])
7254 (label_ref (match_operand 0 "" ""))
7255 (pc)))]
7256 "TARGET_32BIT"
7257 "*
7258 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7259 {
7260 arm_ccfsm_state += 2;
7261 return \"\";
7262 }
7263 return \"b%d1\\t%l0\";
7264 "
7265 [(set_attr "conds" "use")
7266 (set_attr "type" "branch")]
7267 )
7268
7269 (define_insn "*arm_cond_branch_reversed"
7270 [(set (pc)
7271 (if_then_else (match_operator 1 "arm_comparison_operator"
7272 [(match_operand 2 "cc_register" "") (const_int 0)])
7273 (pc)
7274 (label_ref (match_operand 0 "" ""))))]
7275 "TARGET_32BIT"
7276 "*
7277 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7278 {
7279 arm_ccfsm_state += 2;
7280 return \"\";
7281 }
7282 return \"b%D1\\t%l0\";
7283 "
7284 [(set_attr "conds" "use")
7285 (set_attr "type" "branch")]
7286 )
7287
7288 \f
7289
7290 ; scc insns
7291
7292 (define_expand "cstore_cc"
7293 [(set (match_operand:SI 0 "s_register_operand" "")
7294 (match_operator:SI 1 "" [(match_operand 2 "" "")
7295 (match_operand 3 "" "")]))]
7296 "TARGET_32BIT"
7297 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7298 operands[2], operands[3]);
7299 operands[3] = const0_rtx;"
7300 )
7301
7302 (define_insn "*mov_scc"
7303 [(set (match_operand:SI 0 "s_register_operand" "=r")
7304 (match_operator:SI 1 "arm_comparison_operator"
7305 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7306 "TARGET_ARM"
7307 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7308 [(set_attr "conds" "use")
7309 (set_attr "length" "8")]
7310 )
7311
7312 (define_insn "*mov_negscc"
7313 [(set (match_operand:SI 0 "s_register_operand" "=r")
7314 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7315 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7316 "TARGET_ARM"
7317 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7318 [(set_attr "conds" "use")
7319 (set_attr "length" "8")]
7320 )
7321
7322 (define_insn "*mov_notscc"
7323 [(set (match_operand:SI 0 "s_register_operand" "=r")
7324 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7325 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7326 "TARGET_ARM"
7327 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7328 [(set_attr "conds" "use")
7329 (set_attr "length" "8")]
7330 )
7331
7332 (define_expand "cstoresi4"
7333 [(set (match_operand:SI 0 "s_register_operand" "")
7334 (match_operator:SI 1 "arm_comparison_operator"
7335 [(match_operand:SI 2 "s_register_operand" "")
7336 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7337 "TARGET_32BIT || TARGET_THUMB1"
7338 "{
7339 rtx op3, scratch, scratch2;
7340
7341 if (!TARGET_THUMB1)
7342 {
7343 if (!arm_add_operand (operands[3], SImode))
7344 operands[3] = force_reg (SImode, operands[3]);
7345 emit_insn (gen_cstore_cc (operands[0], operands[1],
7346 operands[2], operands[3]));
7347 DONE;
7348 }
7349
7350 if (operands[3] == const0_rtx)
7351 {
7352 switch (GET_CODE (operands[1]))
7353 {
7354 case EQ:
7355 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7356 break;
7357
7358 case NE:
7359 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7360 break;
7361
7362 case LE:
7363 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7364 NULL_RTX, 0, OPTAB_WIDEN);
7365 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7366 NULL_RTX, 0, OPTAB_WIDEN);
7367 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7368 operands[0], 1, OPTAB_WIDEN);
7369 break;
7370
7371 case GE:
7372 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7373 NULL_RTX, 1);
7374 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7375 NULL_RTX, 1, OPTAB_WIDEN);
7376 break;
7377
7378 case GT:
7379 scratch = expand_binop (SImode, ashr_optab, operands[2],
7380 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7381 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7382 NULL_RTX, 0, OPTAB_WIDEN);
7383 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7384 0, OPTAB_WIDEN);
7385 break;
7386
7387 /* LT is handled by generic code. No need for unsigned with 0. */
7388 default:
7389 FAIL;
7390 }
7391 DONE;
7392 }
7393
7394 switch (GET_CODE (operands[1]))
7395 {
7396 case EQ:
7397 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7398 NULL_RTX, 0, OPTAB_WIDEN);
7399 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7400 break;
7401
7402 case NE:
7403 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7404 NULL_RTX, 0, OPTAB_WIDEN);
7405 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7406 break;
7407
7408 case LE:
7409 op3 = force_reg (SImode, operands[3]);
7410
7411 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7412 NULL_RTX, 1, OPTAB_WIDEN);
7413 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7414 NULL_RTX, 0, OPTAB_WIDEN);
7415 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7416 op3, operands[2]));
7417 break;
7418
7419 case GE:
7420 op3 = operands[3];
7421 if (!thumb1_cmp_operand (op3, SImode))
7422 op3 = force_reg (SImode, op3);
7423 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7424 NULL_RTX, 0, OPTAB_WIDEN);
7425 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7426 NULL_RTX, 1, OPTAB_WIDEN);
7427 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7428 operands[2], op3));
7429 break;
7430
7431 case LEU:
7432 op3 = force_reg (SImode, operands[3]);
7433 scratch = force_reg (SImode, const0_rtx);
7434 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7435 op3, operands[2]));
7436 break;
7437
7438 case GEU:
7439 op3 = operands[3];
7440 if (!thumb1_cmp_operand (op3, SImode))
7441 op3 = force_reg (SImode, op3);
7442 scratch = force_reg (SImode, const0_rtx);
7443 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7444 operands[2], op3));
7445 break;
7446
7447 case LTU:
7448 op3 = operands[3];
7449 if (!thumb1_cmp_operand (op3, SImode))
7450 op3 = force_reg (SImode, op3);
7451 scratch = gen_reg_rtx (SImode);
7452 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7453 break;
7454
7455 case GTU:
7456 op3 = force_reg (SImode, operands[3]);
7457 scratch = gen_reg_rtx (SImode);
7458 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7459 break;
7460
7461 /* No good sequences for GT, LT. */
7462 default:
7463 FAIL;
7464 }
7465 DONE;
7466 }")
7467
7468 (define_expand "cstoresf4"
7469 [(set (match_operand:SI 0 "s_register_operand" "")
7470 (match_operator:SI 1 "arm_comparison_operator"
7471 [(match_operand:SF 2 "s_register_operand" "")
7472 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7473 "TARGET_32BIT && TARGET_HARD_FLOAT"
7474 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7475 operands[2], operands[3])); DONE;"
7476 )
7477
7478 (define_expand "cstoredf4"
7479 [(set (match_operand:SI 0 "s_register_operand" "")
7480 (match_operator:SI 1 "arm_comparison_operator"
7481 [(match_operand:DF 2 "s_register_operand" "")
7482 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7483 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7484 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7485 operands[2], operands[3])); DONE;"
7486 )
7487
7488 (define_expand "cstoredi4"
7489 [(set (match_operand:SI 0 "s_register_operand" "")
7490 (match_operator:SI 1 "arm_comparison_operator"
7491 [(match_operand:DI 2 "cmpdi_operand" "")
7492 (match_operand:DI 3 "cmpdi_operand" "")]))]
7493 "TARGET_32BIT"
7494 "{
7495 rtx swap = NULL_RTX;
7496 enum rtx_code code = GET_CODE (operands[1]);
7497
7498 /* We should not have two constants. */
7499 gcc_assert (GET_MODE (operands[2]) == DImode
7500 || GET_MODE (operands[3]) == DImode);
7501
7502 /* Flip unimplemented DImode comparisons to a form that
7503 arm_gen_compare_reg can handle. */
7504 switch (code)
7505 {
7506 case GT:
7507 swap = gen_rtx_LT (VOIDmode, operands[3], operands[2]); break;
7508 case LE:
7509 swap = gen_rtx_GE (VOIDmode, operands[3], operands[2]); break;
7510 case GTU:
7511 swap = gen_rtx_LTU (VOIDmode, operands[3], operands[2]); break;
7512 case LEU:
7513 swap = gen_rtx_GEU (VOIDmode, operands[3], operands[2]); break;
7514 default:
7515 break;
7516 }
7517 if (swap)
7518 emit_insn (gen_cstore_cc (operands[0], swap, operands[3],
7519 operands[2]));
7520 else
7521 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7522 operands[3]));
7523 DONE;
7524 }"
7525 )
7526
7527 (define_expand "cstoresi_eq0_thumb1"
7528 [(parallel
7529 [(set (match_operand:SI 0 "s_register_operand" "")
7530 (eq:SI (match_operand:SI 1 "s_register_operand" "")
7531 (const_int 0)))
7532 (clobber (match_dup:SI 2))])]
7533 "TARGET_THUMB1"
7534 "operands[2] = gen_reg_rtx (SImode);"
7535 )
7536
7537 (define_expand "cstoresi_ne0_thumb1"
7538 [(parallel
7539 [(set (match_operand:SI 0 "s_register_operand" "")
7540 (ne:SI (match_operand:SI 1 "s_register_operand" "")
7541 (const_int 0)))
7542 (clobber (match_dup:SI 2))])]
7543 "TARGET_THUMB1"
7544 "operands[2] = gen_reg_rtx (SImode);"
7545 )
7546
7547 (define_insn "*cstoresi_eq0_thumb1_insn"
7548 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7549 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7550 (const_int 0)))
7551 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7552 "TARGET_THUMB1"
7553 "@
7554 neg\\t%0, %1\;adc\\t%0, %0, %1
7555 neg\\t%2, %1\;adc\\t%0, %1, %2"
7556 [(set_attr "length" "4")]
7557 )
7558
7559 (define_insn "*cstoresi_ne0_thumb1_insn"
7560 [(set (match_operand:SI 0 "s_register_operand" "=l")
7561 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
7562 (const_int 0)))
7563 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
7564 "TARGET_THUMB1"
7565 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
7566 [(set_attr "length" "4")]
7567 )
7568
7569 ;; Used as part of the expansion of thumb ltu and gtu sequences
7570 (define_insn "cstoresi_nltu_thumb1"
7571 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7572 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7573 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
7574 "TARGET_THUMB1"
7575 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
7576 [(set_attr "length" "4")]
7577 )
7578
7579 (define_insn_and_split "cstoresi_ltu_thumb1"
7580 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
7581 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
7582 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
7583 "TARGET_THUMB1"
7584 "#"
7585 "TARGET_THUMB1"
7586 [(set (match_dup 3)
7587 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
7588 (set (match_dup 0) (neg:SI (match_dup 3)))]
7589 "operands[3] = gen_reg_rtx (SImode);"
7590 [(set_attr "length" "4")]
7591 )
7592
7593 ;; Used as part of the expansion of thumb les sequence.
7594 (define_insn "thumb1_addsi3_addgeu"
7595 [(set (match_operand:SI 0 "s_register_operand" "=l")
7596 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
7597 (match_operand:SI 2 "s_register_operand" "l"))
7598 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
7599 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
7600 "TARGET_THUMB1"
7601 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
7602 [(set_attr "length" "4")]
7603 )
7604
7605 \f
7606 ;; Conditional move insns
7607
7608 (define_expand "movsicc"
7609 [(set (match_operand:SI 0 "s_register_operand" "")
7610 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
7611 (match_operand:SI 2 "arm_not_operand" "")
7612 (match_operand:SI 3 "arm_not_operand" "")))]
7613 "TARGET_32BIT"
7614 "
7615 {
7616 enum rtx_code code = GET_CODE (operands[1]);
7617 rtx ccreg;
7618
7619 if (code == UNEQ || code == LTGT)
7620 FAIL;
7621
7622 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7623 XEXP (operands[1], 1));
7624 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7625 }"
7626 )
7627
7628 (define_expand "movsfcc"
7629 [(set (match_operand:SF 0 "s_register_operand" "")
7630 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
7631 (match_operand:SF 2 "s_register_operand" "")
7632 (match_operand:SF 3 "nonmemory_operand" "")))]
7633 "TARGET_32BIT && TARGET_HARD_FLOAT"
7634 "
7635 {
7636 enum rtx_code code = GET_CODE (operands[1]);
7637 rtx ccreg;
7638
7639 if (code == UNEQ || code == LTGT)
7640 FAIL;
7641
7642 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
7643 Otherwise, ensure it is a valid FP add operand */
7644 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
7645 || (!arm_float_add_operand (operands[3], SFmode)))
7646 operands[3] = force_reg (SFmode, operands[3]);
7647
7648 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7649 XEXP (operands[1], 1));
7650 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7651 }"
7652 )
7653
7654 (define_expand "movdfcc"
7655 [(set (match_operand:DF 0 "s_register_operand" "")
7656 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
7657 (match_operand:DF 2 "s_register_operand" "")
7658 (match_operand:DF 3 "arm_float_add_operand" "")))]
7659 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
7660 "
7661 {
7662 enum rtx_code code = GET_CODE (operands[1]);
7663 rtx ccreg;
7664
7665 if (code == UNEQ || code == LTGT)
7666 FAIL;
7667
7668 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7669 XEXP (operands[1], 1));
7670 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7671 }"
7672 )
7673
7674 (define_insn "*movsicc_insn"
7675 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7676 (if_then_else:SI
7677 (match_operator 3 "arm_comparison_operator"
7678 [(match_operand 4 "cc_register" "") (const_int 0)])
7679 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7680 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7681 "TARGET_ARM"
7682 "@
7683 mov%D3\\t%0, %2
7684 mvn%D3\\t%0, #%B2
7685 mov%d3\\t%0, %1
7686 mvn%d3\\t%0, #%B1
7687 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7688 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7689 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7690 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7691 [(set_attr "length" "4,4,4,4,8,8,8,8")
7692 (set_attr "conds" "use")]
7693 )
7694
7695 (define_insn "*movsfcc_soft_insn"
7696 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7697 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7698 [(match_operand 4 "cc_register" "") (const_int 0)])
7699 (match_operand:SF 1 "s_register_operand" "0,r")
7700 (match_operand:SF 2 "s_register_operand" "r,0")))]
7701 "TARGET_ARM && TARGET_SOFT_FLOAT"
7702 "@
7703 mov%D3\\t%0, %2
7704 mov%d3\\t%0, %1"
7705 [(set_attr "conds" "use")]
7706 )
7707
7708 \f
7709 ;; Jump and linkage insns
7710
7711 (define_expand "jump"
7712 [(set (pc)
7713 (label_ref (match_operand 0 "" "")))]
7714 "TARGET_EITHER"
7715 ""
7716 )
7717
7718 (define_insn "*arm_jump"
7719 [(set (pc)
7720 (label_ref (match_operand 0 "" "")))]
7721 "TARGET_32BIT"
7722 "*
7723 {
7724 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7725 {
7726 arm_ccfsm_state += 2;
7727 return \"\";
7728 }
7729 return \"b%?\\t%l0\";
7730 }
7731 "
7732 [(set_attr "predicable" "yes")]
7733 )
7734
7735 (define_insn "*thumb_jump"
7736 [(set (pc)
7737 (label_ref (match_operand 0 "" "")))]
7738 "TARGET_THUMB1"
7739 "*
7740 if (get_attr_length (insn) == 2)
7741 return \"b\\t%l0\";
7742 return \"bl\\t%l0\\t%@ far jump\";
7743 "
7744 [(set (attr "far_jump")
7745 (if_then_else
7746 (eq_attr "length" "4")
7747 (const_string "yes")
7748 (const_string "no")))
7749 (set (attr "length")
7750 (if_then_else
7751 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7752 (le (minus (match_dup 0) (pc)) (const_int 2048)))
7753 (const_int 2)
7754 (const_int 4)))]
7755 )
7756
7757 (define_expand "call"
7758 [(parallel [(call (match_operand 0 "memory_operand" "")
7759 (match_operand 1 "general_operand" ""))
7760 (use (match_operand 2 "" ""))
7761 (clobber (reg:SI LR_REGNUM))])]
7762 "TARGET_EITHER"
7763 "
7764 {
7765 rtx callee, pat;
7766
7767 /* In an untyped call, we can get NULL for operand 2. */
7768 if (operands[2] == NULL_RTX)
7769 operands[2] = const0_rtx;
7770
7771 /* Decide if we should generate indirect calls by loading the
7772 32-bit address of the callee into a register before performing the
7773 branch and link. */
7774 callee = XEXP (operands[0], 0);
7775 if (GET_CODE (callee) == SYMBOL_REF
7776 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7777 : !REG_P (callee))
7778 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7779
7780 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7781 arm_emit_call_insn (pat, XEXP (operands[0], 0));
7782 DONE;
7783 }"
7784 )
7785
7786 (define_expand "call_internal"
7787 [(parallel [(call (match_operand 0 "memory_operand" "")
7788 (match_operand 1 "general_operand" ""))
7789 (use (match_operand 2 "" ""))
7790 (clobber (reg:SI LR_REGNUM))])])
7791
7792 (define_insn "*call_reg_armv5"
7793 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7794 (match_operand 1 "" ""))
7795 (use (match_operand 2 "" ""))
7796 (clobber (reg:SI LR_REGNUM))]
7797 "TARGET_ARM && arm_arch5"
7798 "blx%?\\t%0"
7799 [(set_attr "type" "call")]
7800 )
7801
7802 (define_insn "*call_reg_arm"
7803 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7804 (match_operand 1 "" ""))
7805 (use (match_operand 2 "" ""))
7806 (clobber (reg:SI LR_REGNUM))]
7807 "TARGET_ARM && !arm_arch5"
7808 "*
7809 return output_call (operands);
7810 "
7811 ;; length is worst case, normally it is only two
7812 [(set_attr "length" "12")
7813 (set_attr "type" "call")]
7814 )
7815
7816
7817 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
7818 ;; considered a function call by the branch predictor of some cores (PR40887).
7819 ;; Falls back to blx rN (*call_reg_armv5).
7820
7821 (define_insn "*call_mem"
7822 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
7823 (match_operand 1 "" ""))
7824 (use (match_operand 2 "" ""))
7825 (clobber (reg:SI LR_REGNUM))]
7826 "TARGET_ARM && !arm_arch5"
7827 "*
7828 return output_call_mem (operands);
7829 "
7830 [(set_attr "length" "12")
7831 (set_attr "type" "call")]
7832 )
7833
7834 (define_insn "*call_reg_thumb1_v5"
7835 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7836 (match_operand 1 "" ""))
7837 (use (match_operand 2 "" ""))
7838 (clobber (reg:SI LR_REGNUM))]
7839 "TARGET_THUMB1 && arm_arch5"
7840 "blx\\t%0"
7841 [(set_attr "length" "2")
7842 (set_attr "type" "call")]
7843 )
7844
7845 (define_insn "*call_reg_thumb1"
7846 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
7847 (match_operand 1 "" ""))
7848 (use (match_operand 2 "" ""))
7849 (clobber (reg:SI LR_REGNUM))]
7850 "TARGET_THUMB1 && !arm_arch5"
7851 "*
7852 {
7853 if (!TARGET_CALLER_INTERWORKING)
7854 return thumb_call_via_reg (operands[0]);
7855 else if (operands[1] == const0_rtx)
7856 return \"bl\\t%__interwork_call_via_%0\";
7857 else if (frame_pointer_needed)
7858 return \"bl\\t%__interwork_r7_call_via_%0\";
7859 else
7860 return \"bl\\t%__interwork_r11_call_via_%0\";
7861 }"
7862 [(set_attr "type" "call")]
7863 )
7864
7865 (define_expand "call_value"
7866 [(parallel [(set (match_operand 0 "" "")
7867 (call (match_operand 1 "memory_operand" "")
7868 (match_operand 2 "general_operand" "")))
7869 (use (match_operand 3 "" ""))
7870 (clobber (reg:SI LR_REGNUM))])]
7871 "TARGET_EITHER"
7872 "
7873 {
7874 rtx pat, callee;
7875
7876 /* In an untyped call, we can get NULL for operand 2. */
7877 if (operands[3] == 0)
7878 operands[3] = const0_rtx;
7879
7880 /* Decide if we should generate indirect calls by loading the
7881 32-bit address of the callee into a register before performing the
7882 branch and link. */
7883 callee = XEXP (operands[1], 0);
7884 if (GET_CODE (callee) == SYMBOL_REF
7885 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7886 : !REG_P (callee))
7887 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7888
7889 pat = gen_call_value_internal (operands[0], operands[1],
7890 operands[2], operands[3]);
7891 arm_emit_call_insn (pat, XEXP (operands[1], 0));
7892 DONE;
7893 }"
7894 )
7895
7896 (define_expand "call_value_internal"
7897 [(parallel [(set (match_operand 0 "" "")
7898 (call (match_operand 1 "memory_operand" "")
7899 (match_operand 2 "general_operand" "")))
7900 (use (match_operand 3 "" ""))
7901 (clobber (reg:SI LR_REGNUM))])])
7902
7903 (define_insn "*call_value_reg_armv5"
7904 [(set (match_operand 0 "" "")
7905 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7906 (match_operand 2 "" "")))
7907 (use (match_operand 3 "" ""))
7908 (clobber (reg:SI LR_REGNUM))]
7909 "TARGET_ARM && arm_arch5"
7910 "blx%?\\t%1"
7911 [(set_attr "type" "call")]
7912 )
7913
7914 (define_insn "*call_value_reg_arm"
7915 [(set (match_operand 0 "" "")
7916 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7917 (match_operand 2 "" "")))
7918 (use (match_operand 3 "" ""))
7919 (clobber (reg:SI LR_REGNUM))]
7920 "TARGET_ARM && !arm_arch5"
7921 "*
7922 return output_call (&operands[1]);
7923 "
7924 [(set_attr "length" "12")
7925 (set_attr "type" "call")]
7926 )
7927
7928 ;; Note: see *call_mem
7929
7930 (define_insn "*call_value_mem"
7931 [(set (match_operand 0 "" "")
7932 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
7933 (match_operand 2 "" "")))
7934 (use (match_operand 3 "" ""))
7935 (clobber (reg:SI LR_REGNUM))]
7936 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
7937 "*
7938 return output_call_mem (&operands[1]);
7939 "
7940 [(set_attr "length" "12")
7941 (set_attr "type" "call")]
7942 )
7943
7944 (define_insn "*call_value_reg_thumb1_v5"
7945 [(set (match_operand 0 "" "")
7946 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7947 (match_operand 2 "" "")))
7948 (use (match_operand 3 "" ""))
7949 (clobber (reg:SI LR_REGNUM))]
7950 "TARGET_THUMB1 && arm_arch5"
7951 "blx\\t%1"
7952 [(set_attr "length" "2")
7953 (set_attr "type" "call")]
7954 )
7955
7956 (define_insn "*call_value_reg_thumb1"
7957 [(set (match_operand 0 "" "")
7958 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
7959 (match_operand 2 "" "")))
7960 (use (match_operand 3 "" ""))
7961 (clobber (reg:SI LR_REGNUM))]
7962 "TARGET_THUMB1 && !arm_arch5"
7963 "*
7964 {
7965 if (!TARGET_CALLER_INTERWORKING)
7966 return thumb_call_via_reg (operands[1]);
7967 else if (operands[2] == const0_rtx)
7968 return \"bl\\t%__interwork_call_via_%1\";
7969 else if (frame_pointer_needed)
7970 return \"bl\\t%__interwork_r7_call_via_%1\";
7971 else
7972 return \"bl\\t%__interwork_r11_call_via_%1\";
7973 }"
7974 [(set_attr "type" "call")]
7975 )
7976
7977 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7978 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7979
7980 (define_insn "*call_symbol"
7981 [(call (mem:SI (match_operand:SI 0 "" ""))
7982 (match_operand 1 "" ""))
7983 (use (match_operand 2 "" ""))
7984 (clobber (reg:SI LR_REGNUM))]
7985 "TARGET_32BIT
7986 && (GET_CODE (operands[0]) == SYMBOL_REF)
7987 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7988 "*
7989 {
7990 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7991 }"
7992 [(set_attr "type" "call")]
7993 )
7994
7995 (define_insn "*call_value_symbol"
7996 [(set (match_operand 0 "" "")
7997 (call (mem:SI (match_operand:SI 1 "" ""))
7998 (match_operand:SI 2 "" "")))
7999 (use (match_operand 3 "" ""))
8000 (clobber (reg:SI LR_REGNUM))]
8001 "TARGET_32BIT
8002 && (GET_CODE (operands[1]) == SYMBOL_REF)
8003 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8004 "*
8005 {
8006 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8007 }"
8008 [(set_attr "type" "call")]
8009 )
8010
8011 (define_insn "*call_insn"
8012 [(call (mem:SI (match_operand:SI 0 "" ""))
8013 (match_operand:SI 1 "" ""))
8014 (use (match_operand 2 "" ""))
8015 (clobber (reg:SI LR_REGNUM))]
8016 "TARGET_THUMB1
8017 && GET_CODE (operands[0]) == SYMBOL_REF
8018 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8019 "bl\\t%a0"
8020 [(set_attr "length" "4")
8021 (set_attr "type" "call")]
8022 )
8023
8024 (define_insn "*call_value_insn"
8025 [(set (match_operand 0 "" "")
8026 (call (mem:SI (match_operand 1 "" ""))
8027 (match_operand 2 "" "")))
8028 (use (match_operand 3 "" ""))
8029 (clobber (reg:SI LR_REGNUM))]
8030 "TARGET_THUMB1
8031 && GET_CODE (operands[1]) == SYMBOL_REF
8032 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8033 "bl\\t%a1"
8034 [(set_attr "length" "4")
8035 (set_attr "type" "call")]
8036 )
8037
8038 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8039 (define_expand "sibcall"
8040 [(parallel [(call (match_operand 0 "memory_operand" "")
8041 (match_operand 1 "general_operand" ""))
8042 (return)
8043 (use (match_operand 2 "" ""))])]
8044 "TARGET_32BIT"
8045 "
8046 {
8047 if (operands[2] == NULL_RTX)
8048 operands[2] = const0_rtx;
8049 }"
8050 )
8051
8052 (define_expand "sibcall_value"
8053 [(parallel [(set (match_operand 0 "" "")
8054 (call (match_operand 1 "memory_operand" "")
8055 (match_operand 2 "general_operand" "")))
8056 (return)
8057 (use (match_operand 3 "" ""))])]
8058 "TARGET_32BIT"
8059 "
8060 {
8061 if (operands[3] == NULL_RTX)
8062 operands[3] = const0_rtx;
8063 }"
8064 )
8065
8066 (define_insn "*sibcall_insn"
8067 [(call (mem:SI (match_operand:SI 0 "" "X"))
8068 (match_operand 1 "" ""))
8069 (return)
8070 (use (match_operand 2 "" ""))]
8071 "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8072 "*
8073 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8074 "
8075 [(set_attr "type" "call")]
8076 )
8077
8078 (define_insn "*sibcall_value_insn"
8079 [(set (match_operand 0 "" "")
8080 (call (mem:SI (match_operand:SI 1 "" "X"))
8081 (match_operand 2 "" "")))
8082 (return)
8083 (use (match_operand 3 "" ""))]
8084 "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8085 "*
8086 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8087 "
8088 [(set_attr "type" "call")]
8089 )
8090
8091 (define_expand "return"
8092 [(return)]
8093 "TARGET_32BIT && USE_RETURN_INSN (FALSE)"
8094 "")
8095
8096 ;; Often the return insn will be the same as loading from memory, so set attr
8097 (define_insn "*arm_return"
8098 [(return)]
8099 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8100 "*
8101 {
8102 if (arm_ccfsm_state == 2)
8103 {
8104 arm_ccfsm_state += 2;
8105 return \"\";
8106 }
8107 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8108 }"
8109 [(set_attr "type" "load1")
8110 (set_attr "length" "12")
8111 (set_attr "predicable" "yes")]
8112 )
8113
8114 (define_insn "*cond_return"
8115 [(set (pc)
8116 (if_then_else (match_operator 0 "arm_comparison_operator"
8117 [(match_operand 1 "cc_register" "") (const_int 0)])
8118 (return)
8119 (pc)))]
8120 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8121 "*
8122 {
8123 if (arm_ccfsm_state == 2)
8124 {
8125 arm_ccfsm_state += 2;
8126 return \"\";
8127 }
8128 return output_return_instruction (operands[0], TRUE, FALSE);
8129 }"
8130 [(set_attr "conds" "use")
8131 (set_attr "length" "12")
8132 (set_attr "type" "load1")]
8133 )
8134
8135 (define_insn "*cond_return_inverted"
8136 [(set (pc)
8137 (if_then_else (match_operator 0 "arm_comparison_operator"
8138 [(match_operand 1 "cc_register" "") (const_int 0)])
8139 (pc)
8140 (return)))]
8141 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8142 "*
8143 {
8144 if (arm_ccfsm_state == 2)
8145 {
8146 arm_ccfsm_state += 2;
8147 return \"\";
8148 }
8149 return output_return_instruction (operands[0], TRUE, TRUE);
8150 }"
8151 [(set_attr "conds" "use")
8152 (set_attr "length" "12")
8153 (set_attr "type" "load1")]
8154 )
8155
8156 ;; Generate a sequence of instructions to determine if the processor is
8157 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8158 ;; mask.
8159
8160 (define_expand "return_addr_mask"
8161 [(set (match_dup 1)
8162 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8163 (const_int 0)))
8164 (set (match_operand:SI 0 "s_register_operand" "")
8165 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8166 (const_int -1)
8167 (const_int 67108860)))] ; 0x03fffffc
8168 "TARGET_ARM"
8169 "
8170 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8171 ")
8172
8173 (define_insn "*check_arch2"
8174 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8175 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8176 (const_int 0)))]
8177 "TARGET_ARM"
8178 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8179 [(set_attr "length" "8")
8180 (set_attr "conds" "set")]
8181 )
8182
8183 ;; Call subroutine returning any type.
8184
8185 (define_expand "untyped_call"
8186 [(parallel [(call (match_operand 0 "" "")
8187 (const_int 0))
8188 (match_operand 1 "" "")
8189 (match_operand 2 "" "")])]
8190 "TARGET_EITHER"
8191 "
8192 {
8193 int i;
8194 rtx par = gen_rtx_PARALLEL (VOIDmode,
8195 rtvec_alloc (XVECLEN (operands[2], 0)));
8196 rtx addr = gen_reg_rtx (Pmode);
8197 rtx mem;
8198 int size = 0;
8199
8200 emit_move_insn (addr, XEXP (operands[1], 0));
8201 mem = change_address (operands[1], BLKmode, addr);
8202
8203 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8204 {
8205 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8206
8207 /* Default code only uses r0 as a return value, but we could
8208 be using anything up to 4 registers. */
8209 if (REGNO (src) == R0_REGNUM)
8210 src = gen_rtx_REG (TImode, R0_REGNUM);
8211
8212 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8213 GEN_INT (size));
8214 size += GET_MODE_SIZE (GET_MODE (src));
8215 }
8216
8217 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8218 const0_rtx));
8219
8220 size = 0;
8221
8222 for (i = 0; i < XVECLEN (par, 0); i++)
8223 {
8224 HOST_WIDE_INT offset = 0;
8225 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8226
8227 if (size != 0)
8228 emit_move_insn (addr, plus_constant (addr, size));
8229
8230 mem = change_address (mem, GET_MODE (reg), NULL);
8231 if (REGNO (reg) == R0_REGNUM)
8232 {
8233 /* On thumb we have to use a write-back instruction. */
8234 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8235 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8236 size = TARGET_ARM ? 16 : 0;
8237 }
8238 else
8239 {
8240 emit_move_insn (mem, reg);
8241 size = GET_MODE_SIZE (GET_MODE (reg));
8242 }
8243 }
8244
8245 /* The optimizer does not know that the call sets the function value
8246 registers we stored in the result block. We avoid problems by
8247 claiming that all hard registers are used and clobbered at this
8248 point. */
8249 emit_insn (gen_blockage ());
8250
8251 DONE;
8252 }"
8253 )
8254
8255 (define_expand "untyped_return"
8256 [(match_operand:BLK 0 "memory_operand" "")
8257 (match_operand 1 "" "")]
8258 "TARGET_EITHER"
8259 "
8260 {
8261 int i;
8262 rtx addr = gen_reg_rtx (Pmode);
8263 rtx mem;
8264 int size = 0;
8265
8266 emit_move_insn (addr, XEXP (operands[0], 0));
8267 mem = change_address (operands[0], BLKmode, addr);
8268
8269 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8270 {
8271 HOST_WIDE_INT offset = 0;
8272 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8273
8274 if (size != 0)
8275 emit_move_insn (addr, plus_constant (addr, size));
8276
8277 mem = change_address (mem, GET_MODE (reg), NULL);
8278 if (REGNO (reg) == R0_REGNUM)
8279 {
8280 /* On thumb we have to use a write-back instruction. */
8281 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8282 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8283 size = TARGET_ARM ? 16 : 0;
8284 }
8285 else
8286 {
8287 emit_move_insn (reg, mem);
8288 size = GET_MODE_SIZE (GET_MODE (reg));
8289 }
8290 }
8291
8292 /* Emit USE insns before the return. */
8293 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8294 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8295
8296 /* Construct the return. */
8297 expand_naked_return ();
8298
8299 DONE;
8300 }"
8301 )
8302
8303 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8304 ;; all of memory. This blocks insns from being moved across this point.
8305
8306 (define_insn "blockage"
8307 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8308 "TARGET_EITHER"
8309 ""
8310 [(set_attr "length" "0")
8311 (set_attr "type" "block")]
8312 )
8313
8314 (define_expand "casesi"
8315 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8316 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8317 (match_operand:SI 2 "const_int_operand" "") ; total range
8318 (match_operand:SI 3 "" "") ; table label
8319 (match_operand:SI 4 "" "")] ; Out of range label
8320 "TARGET_32BIT || optimize_size || flag_pic"
8321 "
8322 {
8323 enum insn_code code;
8324 if (operands[1] != const0_rtx)
8325 {
8326 rtx reg = gen_reg_rtx (SImode);
8327
8328 emit_insn (gen_addsi3 (reg, operands[0],
8329 GEN_INT (-INTVAL (operands[1]))));
8330 operands[0] = reg;
8331 }
8332
8333 if (TARGET_ARM)
8334 code = CODE_FOR_arm_casesi_internal;
8335 else if (TARGET_THUMB1)
8336 code = CODE_FOR_thumb1_casesi_internal_pic;
8337 else if (flag_pic)
8338 code = CODE_FOR_thumb2_casesi_internal_pic;
8339 else
8340 code = CODE_FOR_thumb2_casesi_internal;
8341
8342 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8343 operands[2] = force_reg (SImode, operands[2]);
8344
8345 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8346 operands[3], operands[4]));
8347 DONE;
8348 }"
8349 )
8350
8351 ;; The USE in this pattern is needed to tell flow analysis that this is
8352 ;; a CASESI insn. It has no other purpose.
8353 (define_insn "arm_casesi_internal"
8354 [(parallel [(set (pc)
8355 (if_then_else
8356 (leu (match_operand:SI 0 "s_register_operand" "r")
8357 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8358 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8359 (label_ref (match_operand 2 "" ""))))
8360 (label_ref (match_operand 3 "" ""))))
8361 (clobber (reg:CC CC_REGNUM))
8362 (use (label_ref (match_dup 2)))])]
8363 "TARGET_ARM"
8364 "*
8365 if (flag_pic)
8366 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8367 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8368 "
8369 [(set_attr "conds" "clob")
8370 (set_attr "length" "12")]
8371 )
8372
8373 (define_expand "thumb1_casesi_internal_pic"
8374 [(match_operand:SI 0 "s_register_operand" "")
8375 (match_operand:SI 1 "thumb1_cmp_operand" "")
8376 (match_operand 2 "" "")
8377 (match_operand 3 "" "")]
8378 "TARGET_THUMB1"
8379 {
8380 rtx reg0;
8381 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8382 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8383 operands[3]));
8384 reg0 = gen_rtx_REG (SImode, 0);
8385 emit_move_insn (reg0, operands[0]);
8386 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8387 DONE;
8388 }
8389 )
8390
8391 (define_insn "thumb1_casesi_dispatch"
8392 [(parallel [(set (pc) (unspec [(reg:SI 0)
8393 (label_ref (match_operand 0 "" ""))
8394 ;; (label_ref (match_operand 1 "" ""))
8395 ]
8396 UNSPEC_THUMB1_CASESI))
8397 (clobber (reg:SI IP_REGNUM))
8398 (clobber (reg:SI LR_REGNUM))])]
8399 "TARGET_THUMB1"
8400 "* return thumb1_output_casesi(operands);"
8401 [(set_attr "length" "4")]
8402 )
8403
8404 (define_expand "indirect_jump"
8405 [(set (pc)
8406 (match_operand:SI 0 "s_register_operand" ""))]
8407 "TARGET_EITHER"
8408 "
8409 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8410 address and use bx. */
8411 if (TARGET_THUMB2)
8412 {
8413 rtx tmp;
8414 tmp = gen_reg_rtx (SImode);
8415 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8416 operands[0] = tmp;
8417 }
8418 "
8419 )
8420
8421 ;; NB Never uses BX.
8422 (define_insn "*arm_indirect_jump"
8423 [(set (pc)
8424 (match_operand:SI 0 "s_register_operand" "r"))]
8425 "TARGET_ARM"
8426 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8427 [(set_attr "predicable" "yes")]
8428 )
8429
8430 (define_insn "*load_indirect_jump"
8431 [(set (pc)
8432 (match_operand:SI 0 "memory_operand" "m"))]
8433 "TARGET_ARM"
8434 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8435 [(set_attr "type" "load1")
8436 (set_attr "pool_range" "4096")
8437 (set_attr "neg_pool_range" "4084")
8438 (set_attr "predicable" "yes")]
8439 )
8440
8441 ;; NB Never uses BX.
8442 (define_insn "*thumb1_indirect_jump"
8443 [(set (pc)
8444 (match_operand:SI 0 "register_operand" "l*r"))]
8445 "TARGET_THUMB1"
8446 "mov\\tpc, %0"
8447 [(set_attr "conds" "clob")
8448 (set_attr "length" "2")]
8449 )
8450
8451 \f
8452 ;; Misc insns
8453
8454 (define_insn "nop"
8455 [(const_int 0)]
8456 "TARGET_EITHER"
8457 "*
8458 if (TARGET_UNIFIED_ASM)
8459 return \"nop\";
8460 if (TARGET_ARM)
8461 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8462 return \"mov\\tr8, r8\";
8463 "
8464 [(set (attr "length")
8465 (if_then_else (eq_attr "is_thumb" "yes")
8466 (const_int 2)
8467 (const_int 4)))]
8468 )
8469
8470 \f
8471 ;; Patterns to allow combination of arithmetic, cond code and shifts
8472
8473 (define_insn "*arith_shiftsi"
8474 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8475 (match_operator:SI 1 "shiftable_operator"
8476 [(match_operator:SI 3 "shift_operator"
8477 [(match_operand:SI 4 "s_register_operand" "r,r")
8478 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8479 (match_operand:SI 2 "s_register_operand" "rk,rk")]))]
8480 "TARGET_32BIT"
8481 "%i1%?\\t%0, %2, %4%S3"
8482 [(set_attr "predicable" "yes")
8483 (set_attr "shift" "4")
8484 (set_attr "arch" "32,a")
8485 ;; We have to make sure to disable the second alternative if
8486 ;; the shift_operator is MULT, since otherwise the insn will
8487 ;; also match a multiply_accumulate pattern and validate_change
8488 ;; will allow a replacement of the constant with a register
8489 ;; despite the checks done in shift_operator.
8490 (set_attr_alternative "insn_enabled"
8491 [(const_string "yes")
8492 (if_then_else
8493 (match_operand:SI 3 "mult_operator" "")
8494 (const_string "no") (const_string "yes"))])
8495 (set_attr "type" "alu_shift,alu_shift_reg")])
8496
8497 (define_split
8498 [(set (match_operand:SI 0 "s_register_operand" "")
8499 (match_operator:SI 1 "shiftable_operator"
8500 [(match_operator:SI 2 "shiftable_operator"
8501 [(match_operator:SI 3 "shift_operator"
8502 [(match_operand:SI 4 "s_register_operand" "")
8503 (match_operand:SI 5 "reg_or_int_operand" "")])
8504 (match_operand:SI 6 "s_register_operand" "")])
8505 (match_operand:SI 7 "arm_rhs_operand" "")]))
8506 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8507 "TARGET_32BIT"
8508 [(set (match_dup 8)
8509 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8510 (match_dup 6)]))
8511 (set (match_dup 0)
8512 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8513 "")
8514
8515 (define_insn "*arith_shiftsi_compare0"
8516 [(set (reg:CC_NOOV CC_REGNUM)
8517 (compare:CC_NOOV
8518 (match_operator:SI 1 "shiftable_operator"
8519 [(match_operator:SI 3 "shift_operator"
8520 [(match_operand:SI 4 "s_register_operand" "r,r")
8521 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8522 (match_operand:SI 2 "s_register_operand" "r,r")])
8523 (const_int 0)))
8524 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8525 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8526 (match_dup 2)]))]
8527 "TARGET_32BIT"
8528 "%i1%.\\t%0, %2, %4%S3"
8529 [(set_attr "conds" "set")
8530 (set_attr "shift" "4")
8531 (set_attr "arch" "32,a")
8532 (set_attr "type" "alu_shift,alu_shift_reg")])
8533
8534 (define_insn "*arith_shiftsi_compare0_scratch"
8535 [(set (reg:CC_NOOV CC_REGNUM)
8536 (compare:CC_NOOV
8537 (match_operator:SI 1 "shiftable_operator"
8538 [(match_operator:SI 3 "shift_operator"
8539 [(match_operand:SI 4 "s_register_operand" "r,r")
8540 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8541 (match_operand:SI 2 "s_register_operand" "r,r")])
8542 (const_int 0)))
8543 (clobber (match_scratch:SI 0 "=r,r"))]
8544 "TARGET_32BIT"
8545 "%i1%.\\t%0, %2, %4%S3"
8546 [(set_attr "conds" "set")
8547 (set_attr "shift" "4")
8548 (set_attr "arch" "32,a")
8549 (set_attr "type" "alu_shift,alu_shift_reg")])
8550
8551 (define_insn "*sub_shiftsi"
8552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8553 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8554 (match_operator:SI 2 "shift_operator"
8555 [(match_operand:SI 3 "s_register_operand" "r,r")
8556 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8557 "TARGET_32BIT"
8558 "sub%?\\t%0, %1, %3%S2"
8559 [(set_attr "predicable" "yes")
8560 (set_attr "shift" "3")
8561 (set_attr "arch" "32,a")
8562 (set_attr "type" "alu_shift,alu_shift_reg")])
8563
8564 (define_insn "*sub_shiftsi_compare0"
8565 [(set (reg:CC_NOOV CC_REGNUM)
8566 (compare:CC_NOOV
8567 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8568 (match_operator:SI 2 "shift_operator"
8569 [(match_operand:SI 3 "s_register_operand" "r,r")
8570 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8571 (const_int 0)))
8572 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8573 (minus:SI (match_dup 1)
8574 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8575 "TARGET_32BIT"
8576 "sub%.\\t%0, %1, %3%S2"
8577 [(set_attr "conds" "set")
8578 (set_attr "shift" "3")
8579 (set_attr "arch" "32,a")
8580 (set_attr "type" "alu_shift,alu_shift_reg")])
8581
8582 (define_insn "*sub_shiftsi_compare0_scratch"
8583 [(set (reg:CC_NOOV CC_REGNUM)
8584 (compare:CC_NOOV
8585 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8586 (match_operator:SI 2 "shift_operator"
8587 [(match_operand:SI 3 "s_register_operand" "r,r")
8588 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
8589 (const_int 0)))
8590 (clobber (match_scratch:SI 0 "=r,r"))]
8591 "TARGET_32BIT"
8592 "sub%.\\t%0, %1, %3%S2"
8593 [(set_attr "conds" "set")
8594 (set_attr "shift" "3")
8595 (set_attr "arch" "32,a")
8596 (set_attr "type" "alu_shift,alu_shift_reg")])
8597 \f
8598
8599 (define_insn "*and_scc"
8600 [(set (match_operand:SI 0 "s_register_operand" "=r")
8601 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8602 [(match_operand 3 "cc_register" "") (const_int 0)])
8603 (match_operand:SI 2 "s_register_operand" "r")))]
8604 "TARGET_ARM"
8605 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
8606 [(set_attr "conds" "use")
8607 (set_attr "length" "8")]
8608 )
8609
8610 (define_insn "*ior_scc"
8611 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8612 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
8613 [(match_operand 3 "cc_register" "") (const_int 0)])
8614 (match_operand:SI 1 "s_register_operand" "0,?r")))]
8615 "TARGET_ARM"
8616 "@
8617 orr%d2\\t%0, %1, #1
8618 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
8619 [(set_attr "conds" "use")
8620 (set_attr "length" "4,8")]
8621 )
8622
8623 ; A series of splitters for the compare_scc pattern below. Note that
8624 ; order is important.
8625 (define_split
8626 [(set (match_operand:SI 0 "s_register_operand" "")
8627 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8628 (const_int 0)))
8629 (clobber (reg:CC CC_REGNUM))]
8630 "TARGET_32BIT && reload_completed"
8631 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8632
8633 (define_split
8634 [(set (match_operand:SI 0 "s_register_operand" "")
8635 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8636 (const_int 0)))
8637 (clobber (reg:CC CC_REGNUM))]
8638 "TARGET_32BIT && reload_completed"
8639 [(set (match_dup 0) (not:SI (match_dup 1)))
8640 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8641
8642 (define_split
8643 [(set (match_operand:SI 0 "s_register_operand" "")
8644 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8645 (const_int 0)))
8646 (clobber (reg:CC CC_REGNUM))]
8647 "TARGET_32BIT && reload_completed"
8648 [(parallel
8649 [(set (reg:CC CC_REGNUM)
8650 (compare:CC (const_int 1) (match_dup 1)))
8651 (set (match_dup 0)
8652 (minus:SI (const_int 1) (match_dup 1)))])
8653 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8654 (set (match_dup 0) (const_int 0)))])
8655
8656 (define_split
8657 [(set (match_operand:SI 0 "s_register_operand" "")
8658 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8659 (match_operand:SI 2 "const_int_operand" "")))
8660 (clobber (reg:CC CC_REGNUM))]
8661 "TARGET_32BIT && reload_completed"
8662 [(parallel
8663 [(set (reg:CC CC_REGNUM)
8664 (compare:CC (match_dup 1) (match_dup 2)))
8665 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8666 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8667 (set (match_dup 0) (const_int 1)))]
8668 {
8669 operands[3] = GEN_INT (-INTVAL (operands[2]));
8670 })
8671
8672 (define_split
8673 [(set (match_operand:SI 0 "s_register_operand" "")
8674 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8675 (match_operand:SI 2 "arm_add_operand" "")))
8676 (clobber (reg:CC CC_REGNUM))]
8677 "TARGET_32BIT && reload_completed"
8678 [(parallel
8679 [(set (reg:CC_NOOV CC_REGNUM)
8680 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8681 (const_int 0)))
8682 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8683 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8684 (set (match_dup 0) (const_int 1)))])
8685
8686 (define_insn_and_split "*compare_scc"
8687 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8688 (match_operator:SI 1 "arm_comparison_operator"
8689 [(match_operand:SI 2 "s_register_operand" "r,r")
8690 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8691 (clobber (reg:CC CC_REGNUM))]
8692 "TARGET_32BIT"
8693 "#"
8694 "&& reload_completed"
8695 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8696 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8697 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8698 {
8699 rtx tmp1;
8700 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8701 operands[2], operands[3]);
8702 enum rtx_code rc = GET_CODE (operands[1]);
8703
8704 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8705
8706 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8707 if (mode == CCFPmode || mode == CCFPEmode)
8708 rc = reverse_condition_maybe_unordered (rc);
8709 else
8710 rc = reverse_condition (rc);
8711 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8712 })
8713
8714 ;; Attempt to improve the sequence generated by the compare_scc splitters
8715 ;; not to use conditional execution.
8716 (define_peephole2
8717 [(set (reg:CC CC_REGNUM)
8718 (compare:CC (match_operand:SI 1 "register_operand" "")
8719 (match_operand:SI 2 "arm_rhs_operand" "")))
8720 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8721 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8722 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8723 (set (match_dup 0) (const_int 1)))
8724 (match_scratch:SI 3 "r")]
8725 "TARGET_32BIT"
8726 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
8727 (parallel
8728 [(set (reg:CC CC_REGNUM)
8729 (compare:CC (const_int 0) (match_dup 3)))
8730 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8731 (set (match_dup 0)
8732 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8733 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))])
8734
8735 (define_insn "*cond_move"
8736 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8737 (if_then_else:SI (match_operator 3 "equality_operator"
8738 [(match_operator 4 "arm_comparison_operator"
8739 [(match_operand 5 "cc_register" "") (const_int 0)])
8740 (const_int 0)])
8741 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8742 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8743 "TARGET_ARM"
8744 "*
8745 if (GET_CODE (operands[3]) == NE)
8746 {
8747 if (which_alternative != 1)
8748 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8749 if (which_alternative != 0)
8750 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8751 return \"\";
8752 }
8753 if (which_alternative != 0)
8754 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8755 if (which_alternative != 1)
8756 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8757 return \"\";
8758 "
8759 [(set_attr "conds" "use")
8760 (set_attr "length" "4,4,8")]
8761 )
8762
8763 (define_insn "*cond_arith"
8764 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8765 (match_operator:SI 5 "shiftable_operator"
8766 [(match_operator:SI 4 "arm_comparison_operator"
8767 [(match_operand:SI 2 "s_register_operand" "r,r")
8768 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8769 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8770 (clobber (reg:CC CC_REGNUM))]
8771 "TARGET_ARM"
8772 "*
8773 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8774 return \"%i5\\t%0, %1, %2, lsr #31\";
8775
8776 output_asm_insn (\"cmp\\t%2, %3\", operands);
8777 if (GET_CODE (operands[5]) == AND)
8778 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8779 else if (GET_CODE (operands[5]) == MINUS)
8780 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8781 else if (which_alternative != 0)
8782 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8783 return \"%i5%d4\\t%0, %1, #1\";
8784 "
8785 [(set_attr "conds" "clob")
8786 (set_attr "length" "12")]
8787 )
8788
8789 (define_insn "*cond_sub"
8790 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8791 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8792 (match_operator:SI 4 "arm_comparison_operator"
8793 [(match_operand:SI 2 "s_register_operand" "r,r")
8794 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8795 (clobber (reg:CC CC_REGNUM))]
8796 "TARGET_ARM"
8797 "*
8798 output_asm_insn (\"cmp\\t%2, %3\", operands);
8799 if (which_alternative != 0)
8800 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8801 return \"sub%d4\\t%0, %1, #1\";
8802 "
8803 [(set_attr "conds" "clob")
8804 (set_attr "length" "8,12")]
8805 )
8806
8807 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
8808 (define_insn "*cmp_ite0"
8809 [(set (match_operand 6 "dominant_cc_register" "")
8810 (compare
8811 (if_then_else:SI
8812 (match_operator 4 "arm_comparison_operator"
8813 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8814 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8815 (match_operator:SI 5 "arm_comparison_operator"
8816 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8817 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8818 (const_int 0))
8819 (const_int 0)))]
8820 "TARGET_ARM"
8821 "*
8822 {
8823 static const char * const opcodes[4][2] =
8824 {
8825 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8826 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8827 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8828 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8829 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8830 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8831 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8832 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8833 };
8834 int swap =
8835 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8836
8837 return opcodes[which_alternative][swap];
8838 }"
8839 [(set_attr "conds" "set")
8840 (set_attr "length" "8")]
8841 )
8842
8843 (define_insn "*cmp_ite1"
8844 [(set (match_operand 6 "dominant_cc_register" "")
8845 (compare
8846 (if_then_else:SI
8847 (match_operator 4 "arm_comparison_operator"
8848 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8849 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8850 (match_operator:SI 5 "arm_comparison_operator"
8851 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8852 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
8853 (const_int 1))
8854 (const_int 0)))]
8855 "TARGET_ARM"
8856 "*
8857 {
8858 static const char * const opcodes[4][2] =
8859 {
8860 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
8861 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8862 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
8863 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8864 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
8865 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8866 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
8867 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8868 };
8869 int swap =
8870 comparison_dominates_p (GET_CODE (operands[5]),
8871 reverse_condition (GET_CODE (operands[4])));
8872
8873 return opcodes[which_alternative][swap];
8874 }"
8875 [(set_attr "conds" "set")
8876 (set_attr "length" "8")]
8877 )
8878
8879 (define_insn "*cmp_and"
8880 [(set (match_operand 6 "dominant_cc_register" "")
8881 (compare
8882 (and:SI
8883 (match_operator 4 "arm_comparison_operator"
8884 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8885 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8886 (match_operator:SI 5 "arm_comparison_operator"
8887 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8888 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8889 (const_int 0)))]
8890 "TARGET_ARM"
8891 "*
8892 {
8893 static const char *const opcodes[4][2] =
8894 {
8895 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
8896 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
8897 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
8898 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
8899 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
8900 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
8901 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
8902 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
8903 };
8904 int swap =
8905 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8906
8907 return opcodes[which_alternative][swap];
8908 }"
8909 [(set_attr "conds" "set")
8910 (set_attr "predicable" "no")
8911 (set_attr "length" "8")]
8912 )
8913
8914 (define_insn "*cmp_ior"
8915 [(set (match_operand 6 "dominant_cc_register" "")
8916 (compare
8917 (ior:SI
8918 (match_operator 4 "arm_comparison_operator"
8919 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
8920 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
8921 (match_operator:SI 5 "arm_comparison_operator"
8922 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
8923 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
8924 (const_int 0)))]
8925 "TARGET_ARM"
8926 "*
8927 {
8928 static const char *const opcodes[4][2] =
8929 {
8930 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
8931 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
8932 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
8933 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
8934 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
8935 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
8936 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
8937 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
8938 };
8939 int swap =
8940 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8941
8942 return opcodes[which_alternative][swap];
8943 }
8944 "
8945 [(set_attr "conds" "set")
8946 (set_attr "length" "8")]
8947 )
8948
8949 (define_insn_and_split "*ior_scc_scc"
8950 [(set (match_operand:SI 0 "s_register_operand" "=r")
8951 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8952 [(match_operand:SI 1 "s_register_operand" "r")
8953 (match_operand:SI 2 "arm_add_operand" "rIL")])
8954 (match_operator:SI 6 "arm_comparison_operator"
8955 [(match_operand:SI 4 "s_register_operand" "r")
8956 (match_operand:SI 5 "arm_add_operand" "rIL")])))
8957 (clobber (reg:CC CC_REGNUM))]
8958 "TARGET_ARM
8959 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8960 != CCmode)"
8961 "#"
8962 "TARGET_ARM && reload_completed"
8963 [(set (match_dup 7)
8964 (compare
8965 (ior:SI
8966 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8967 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8968 (const_int 0)))
8969 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8970 "operands[7]
8971 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8972 DOM_CC_X_OR_Y),
8973 CC_REGNUM);"
8974 [(set_attr "conds" "clob")
8975 (set_attr "length" "16")])
8976
8977 ; If the above pattern is followed by a CMP insn, then the compare is
8978 ; redundant, since we can rework the conditional instruction that follows.
8979 (define_insn_and_split "*ior_scc_scc_cmp"
8980 [(set (match_operand 0 "dominant_cc_register" "")
8981 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8982 [(match_operand:SI 1 "s_register_operand" "r")
8983 (match_operand:SI 2 "arm_add_operand" "rIL")])
8984 (match_operator:SI 6 "arm_comparison_operator"
8985 [(match_operand:SI 4 "s_register_operand" "r")
8986 (match_operand:SI 5 "arm_add_operand" "rIL")]))
8987 (const_int 0)))
8988 (set (match_operand:SI 7 "s_register_operand" "=r")
8989 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8990 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8991 "TARGET_ARM"
8992 "#"
8993 "TARGET_ARM && reload_completed"
8994 [(set (match_dup 0)
8995 (compare
8996 (ior:SI
8997 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8998 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8999 (const_int 0)))
9000 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9001 ""
9002 [(set_attr "conds" "set")
9003 (set_attr "length" "16")])
9004
9005 (define_insn_and_split "*and_scc_scc"
9006 [(set (match_operand:SI 0 "s_register_operand" "=r")
9007 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9008 [(match_operand:SI 1 "s_register_operand" "r")
9009 (match_operand:SI 2 "arm_add_operand" "rIL")])
9010 (match_operator:SI 6 "arm_comparison_operator"
9011 [(match_operand:SI 4 "s_register_operand" "r")
9012 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9013 (clobber (reg:CC CC_REGNUM))]
9014 "TARGET_ARM
9015 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9016 != CCmode)"
9017 "#"
9018 "TARGET_ARM && reload_completed
9019 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9020 != CCmode)"
9021 [(set (match_dup 7)
9022 (compare
9023 (and:SI
9024 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9025 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9026 (const_int 0)))
9027 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9028 "operands[7]
9029 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9030 DOM_CC_X_AND_Y),
9031 CC_REGNUM);"
9032 [(set_attr "conds" "clob")
9033 (set_attr "length" "16")])
9034
9035 ; If the above pattern is followed by a CMP insn, then the compare is
9036 ; redundant, since we can rework the conditional instruction that follows.
9037 (define_insn_and_split "*and_scc_scc_cmp"
9038 [(set (match_operand 0 "dominant_cc_register" "")
9039 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9040 [(match_operand:SI 1 "s_register_operand" "r")
9041 (match_operand:SI 2 "arm_add_operand" "rIL")])
9042 (match_operator:SI 6 "arm_comparison_operator"
9043 [(match_operand:SI 4 "s_register_operand" "r")
9044 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9045 (const_int 0)))
9046 (set (match_operand:SI 7 "s_register_operand" "=r")
9047 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9048 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9049 "TARGET_ARM"
9050 "#"
9051 "TARGET_ARM && reload_completed"
9052 [(set (match_dup 0)
9053 (compare
9054 (and:SI
9055 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9056 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9057 (const_int 0)))
9058 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9059 ""
9060 [(set_attr "conds" "set")
9061 (set_attr "length" "16")])
9062
9063 ;; If there is no dominance in the comparison, then we can still save an
9064 ;; instruction in the AND case, since we can know that the second compare
9065 ;; need only zero the value if false (if true, then the value is already
9066 ;; correct).
9067 (define_insn_and_split "*and_scc_scc_nodom"
9068 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9069 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9070 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9071 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9072 (match_operator:SI 6 "arm_comparison_operator"
9073 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9074 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9075 (clobber (reg:CC CC_REGNUM))]
9076 "TARGET_ARM
9077 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9078 == CCmode)"
9079 "#"
9080 "TARGET_ARM && reload_completed"
9081 [(parallel [(set (match_dup 0)
9082 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9083 (clobber (reg:CC CC_REGNUM))])
9084 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9085 (set (match_dup 0)
9086 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9087 (match_dup 0)
9088 (const_int 0)))]
9089 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9090 operands[4], operands[5]),
9091 CC_REGNUM);
9092 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9093 operands[5]);"
9094 [(set_attr "conds" "clob")
9095 (set_attr "length" "20")])
9096
9097 (define_split
9098 [(set (reg:CC_NOOV CC_REGNUM)
9099 (compare:CC_NOOV (ior:SI
9100 (and:SI (match_operand:SI 0 "s_register_operand" "")
9101 (const_int 1))
9102 (match_operator:SI 1 "arm_comparison_operator"
9103 [(match_operand:SI 2 "s_register_operand" "")
9104 (match_operand:SI 3 "arm_add_operand" "")]))
9105 (const_int 0)))
9106 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9107 "TARGET_ARM"
9108 [(set (match_dup 4)
9109 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9110 (match_dup 0)))
9111 (set (reg:CC_NOOV CC_REGNUM)
9112 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9113 (const_int 0)))]
9114 "")
9115
9116 (define_split
9117 [(set (reg:CC_NOOV CC_REGNUM)
9118 (compare:CC_NOOV (ior:SI
9119 (match_operator:SI 1 "arm_comparison_operator"
9120 [(match_operand:SI 2 "s_register_operand" "")
9121 (match_operand:SI 3 "arm_add_operand" "")])
9122 (and:SI (match_operand:SI 0 "s_register_operand" "")
9123 (const_int 1)))
9124 (const_int 0)))
9125 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9126 "TARGET_ARM"
9127 [(set (match_dup 4)
9128 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9129 (match_dup 0)))
9130 (set (reg:CC_NOOV CC_REGNUM)
9131 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9132 (const_int 0)))]
9133 "")
9134 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9135
9136 (define_insn "*negscc"
9137 [(set (match_operand:SI 0 "s_register_operand" "=r")
9138 (neg:SI (match_operator 3 "arm_comparison_operator"
9139 [(match_operand:SI 1 "s_register_operand" "r")
9140 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9141 (clobber (reg:CC CC_REGNUM))]
9142 "TARGET_ARM"
9143 "*
9144 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9145 return \"mov\\t%0, %1, asr #31\";
9146
9147 if (GET_CODE (operands[3]) == NE)
9148 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9149
9150 output_asm_insn (\"cmp\\t%1, %2\", operands);
9151 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9152 return \"mvn%d3\\t%0, #0\";
9153 "
9154 [(set_attr "conds" "clob")
9155 (set_attr "length" "12")]
9156 )
9157
9158 (define_insn "movcond"
9159 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9160 (if_then_else:SI
9161 (match_operator 5 "arm_comparison_operator"
9162 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9163 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9164 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9165 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9166 (clobber (reg:CC CC_REGNUM))]
9167 "TARGET_ARM"
9168 "*
9169 if (GET_CODE (operands[5]) == LT
9170 && (operands[4] == const0_rtx))
9171 {
9172 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9173 {
9174 if (operands[2] == const0_rtx)
9175 return \"and\\t%0, %1, %3, asr #31\";
9176 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9177 }
9178 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9179 {
9180 if (operands[1] == const0_rtx)
9181 return \"bic\\t%0, %2, %3, asr #31\";
9182 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9183 }
9184 /* The only case that falls through to here is when both ops 1 & 2
9185 are constants. */
9186 }
9187
9188 if (GET_CODE (operands[5]) == GE
9189 && (operands[4] == const0_rtx))
9190 {
9191 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9192 {
9193 if (operands[2] == const0_rtx)
9194 return \"bic\\t%0, %1, %3, asr #31\";
9195 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9196 }
9197 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9198 {
9199 if (operands[1] == const0_rtx)
9200 return \"and\\t%0, %2, %3, asr #31\";
9201 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9202 }
9203 /* The only case that falls through to here is when both ops 1 & 2
9204 are constants. */
9205 }
9206 if (GET_CODE (operands[4]) == CONST_INT
9207 && !const_ok_for_arm (INTVAL (operands[4])))
9208 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9209 else
9210 output_asm_insn (\"cmp\\t%3, %4\", operands);
9211 if (which_alternative != 0)
9212 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9213 if (which_alternative != 1)
9214 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9215 return \"\";
9216 "
9217 [(set_attr "conds" "clob")
9218 (set_attr "length" "8,8,12")]
9219 )
9220
9221 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9222
9223 (define_insn "*ifcompare_plus_move"
9224 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9225 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9226 [(match_operand:SI 4 "s_register_operand" "r,r")
9227 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9228 (plus:SI
9229 (match_operand:SI 2 "s_register_operand" "r,r")
9230 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9231 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9232 (clobber (reg:CC CC_REGNUM))]
9233 "TARGET_ARM"
9234 "#"
9235 [(set_attr "conds" "clob")
9236 (set_attr "length" "8,12")]
9237 )
9238
9239 (define_insn "*if_plus_move"
9240 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9241 (if_then_else:SI
9242 (match_operator 4 "arm_comparison_operator"
9243 [(match_operand 5 "cc_register" "") (const_int 0)])
9244 (plus:SI
9245 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9246 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9247 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9248 "TARGET_ARM"
9249 "@
9250 add%d4\\t%0, %2, %3
9251 sub%d4\\t%0, %2, #%n3
9252 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9253 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9254 [(set_attr "conds" "use")
9255 (set_attr "length" "4,4,8,8")
9256 (set_attr "type" "*,*,*,*")]
9257 )
9258
9259 (define_insn "*ifcompare_move_plus"
9260 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9261 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9262 [(match_operand:SI 4 "s_register_operand" "r,r")
9263 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9264 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9265 (plus:SI
9266 (match_operand:SI 2 "s_register_operand" "r,r")
9267 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9268 (clobber (reg:CC CC_REGNUM))]
9269 "TARGET_ARM"
9270 "#"
9271 [(set_attr "conds" "clob")
9272 (set_attr "length" "8,12")]
9273 )
9274
9275 (define_insn "*if_move_plus"
9276 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9277 (if_then_else:SI
9278 (match_operator 4 "arm_comparison_operator"
9279 [(match_operand 5 "cc_register" "") (const_int 0)])
9280 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9281 (plus:SI
9282 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9283 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9284 "TARGET_ARM"
9285 "@
9286 add%D4\\t%0, %2, %3
9287 sub%D4\\t%0, %2, #%n3
9288 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9289 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9290 [(set_attr "conds" "use")
9291 (set_attr "length" "4,4,8,8")
9292 (set_attr "type" "*,*,*,*")]
9293 )
9294
9295 (define_insn "*ifcompare_arith_arith"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r")
9297 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9298 [(match_operand:SI 5 "s_register_operand" "r")
9299 (match_operand:SI 6 "arm_add_operand" "rIL")])
9300 (match_operator:SI 8 "shiftable_operator"
9301 [(match_operand:SI 1 "s_register_operand" "r")
9302 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9303 (match_operator:SI 7 "shiftable_operator"
9304 [(match_operand:SI 3 "s_register_operand" "r")
9305 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9306 (clobber (reg:CC CC_REGNUM))]
9307 "TARGET_ARM"
9308 "#"
9309 [(set_attr "conds" "clob")
9310 (set_attr "length" "12")]
9311 )
9312
9313 (define_insn "*if_arith_arith"
9314 [(set (match_operand:SI 0 "s_register_operand" "=r")
9315 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9316 [(match_operand 8 "cc_register" "") (const_int 0)])
9317 (match_operator:SI 6 "shiftable_operator"
9318 [(match_operand:SI 1 "s_register_operand" "r")
9319 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9320 (match_operator:SI 7 "shiftable_operator"
9321 [(match_operand:SI 3 "s_register_operand" "r")
9322 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9323 "TARGET_ARM"
9324 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9325 [(set_attr "conds" "use")
9326 (set_attr "length" "8")]
9327 )
9328
9329 (define_insn "*ifcompare_arith_move"
9330 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9331 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9332 [(match_operand:SI 2 "s_register_operand" "r,r")
9333 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9334 (match_operator:SI 7 "shiftable_operator"
9335 [(match_operand:SI 4 "s_register_operand" "r,r")
9336 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9337 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9338 (clobber (reg:CC CC_REGNUM))]
9339 "TARGET_ARM"
9340 "*
9341 /* If we have an operation where (op x 0) is the identity operation and
9342 the conditional operator is LT or GE and we are comparing against zero and
9343 everything is in registers then we can do this in two instructions. */
9344 if (operands[3] == const0_rtx
9345 && GET_CODE (operands[7]) != AND
9346 && GET_CODE (operands[5]) == REG
9347 && GET_CODE (operands[1]) == REG
9348 && REGNO (operands[1]) == REGNO (operands[4])
9349 && REGNO (operands[4]) != REGNO (operands[0]))
9350 {
9351 if (GET_CODE (operands[6]) == LT)
9352 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9353 else if (GET_CODE (operands[6]) == GE)
9354 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9355 }
9356 if (GET_CODE (operands[3]) == CONST_INT
9357 && !const_ok_for_arm (INTVAL (operands[3])))
9358 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9359 else
9360 output_asm_insn (\"cmp\\t%2, %3\", operands);
9361 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9362 if (which_alternative != 0)
9363 return \"mov%D6\\t%0, %1\";
9364 return \"\";
9365 "
9366 [(set_attr "conds" "clob")
9367 (set_attr "length" "8,12")]
9368 )
9369
9370 (define_insn "*if_arith_move"
9371 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9372 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9373 [(match_operand 6 "cc_register" "") (const_int 0)])
9374 (match_operator:SI 5 "shiftable_operator"
9375 [(match_operand:SI 2 "s_register_operand" "r,r")
9376 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9377 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9378 "TARGET_ARM"
9379 "@
9380 %I5%d4\\t%0, %2, %3
9381 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9382 [(set_attr "conds" "use")
9383 (set_attr "length" "4,8")
9384 (set_attr "type" "*,*")]
9385 )
9386
9387 (define_insn "*ifcompare_move_arith"
9388 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9389 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9390 [(match_operand:SI 4 "s_register_operand" "r,r")
9391 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9392 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9393 (match_operator:SI 7 "shiftable_operator"
9394 [(match_operand:SI 2 "s_register_operand" "r,r")
9395 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9396 (clobber (reg:CC CC_REGNUM))]
9397 "TARGET_ARM"
9398 "*
9399 /* If we have an operation where (op x 0) is the identity operation and
9400 the conditional operator is LT or GE and we are comparing against zero and
9401 everything is in registers then we can do this in two instructions */
9402 if (operands[5] == const0_rtx
9403 && GET_CODE (operands[7]) != AND
9404 && GET_CODE (operands[3]) == REG
9405 && GET_CODE (operands[1]) == REG
9406 && REGNO (operands[1]) == REGNO (operands[2])
9407 && REGNO (operands[2]) != REGNO (operands[0]))
9408 {
9409 if (GET_CODE (operands[6]) == GE)
9410 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9411 else if (GET_CODE (operands[6]) == LT)
9412 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9413 }
9414
9415 if (GET_CODE (operands[5]) == CONST_INT
9416 && !const_ok_for_arm (INTVAL (operands[5])))
9417 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9418 else
9419 output_asm_insn (\"cmp\\t%4, %5\", operands);
9420
9421 if (which_alternative != 0)
9422 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9423 return \"%I7%D6\\t%0, %2, %3\";
9424 "
9425 [(set_attr "conds" "clob")
9426 (set_attr "length" "8,12")]
9427 )
9428
9429 (define_insn "*if_move_arith"
9430 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9431 (if_then_else:SI
9432 (match_operator 4 "arm_comparison_operator"
9433 [(match_operand 6 "cc_register" "") (const_int 0)])
9434 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9435 (match_operator:SI 5 "shiftable_operator"
9436 [(match_operand:SI 2 "s_register_operand" "r,r")
9437 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9438 "TARGET_ARM"
9439 "@
9440 %I5%D4\\t%0, %2, %3
9441 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9442 [(set_attr "conds" "use")
9443 (set_attr "length" "4,8")
9444 (set_attr "type" "*,*")]
9445 )
9446
9447 (define_insn "*ifcompare_move_not"
9448 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9449 (if_then_else:SI
9450 (match_operator 5 "arm_comparison_operator"
9451 [(match_operand:SI 3 "s_register_operand" "r,r")
9452 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9453 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9454 (not:SI
9455 (match_operand:SI 2 "s_register_operand" "r,r"))))
9456 (clobber (reg:CC CC_REGNUM))]
9457 "TARGET_ARM"
9458 "#"
9459 [(set_attr "conds" "clob")
9460 (set_attr "length" "8,12")]
9461 )
9462
9463 (define_insn "*if_move_not"
9464 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9465 (if_then_else:SI
9466 (match_operator 4 "arm_comparison_operator"
9467 [(match_operand 3 "cc_register" "") (const_int 0)])
9468 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9469 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9470 "TARGET_ARM"
9471 "@
9472 mvn%D4\\t%0, %2
9473 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9474 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9475 [(set_attr "conds" "use")
9476 (set_attr "length" "4,8,8")]
9477 )
9478
9479 (define_insn "*ifcompare_not_move"
9480 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9481 (if_then_else:SI
9482 (match_operator 5 "arm_comparison_operator"
9483 [(match_operand:SI 3 "s_register_operand" "r,r")
9484 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9485 (not:SI
9486 (match_operand:SI 2 "s_register_operand" "r,r"))
9487 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9488 (clobber (reg:CC CC_REGNUM))]
9489 "TARGET_ARM"
9490 "#"
9491 [(set_attr "conds" "clob")
9492 (set_attr "length" "8,12")]
9493 )
9494
9495 (define_insn "*if_not_move"
9496 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9497 (if_then_else:SI
9498 (match_operator 4 "arm_comparison_operator"
9499 [(match_operand 3 "cc_register" "") (const_int 0)])
9500 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9501 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9502 "TARGET_ARM"
9503 "@
9504 mvn%d4\\t%0, %2
9505 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9506 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9507 [(set_attr "conds" "use")
9508 (set_attr "length" "4,8,8")]
9509 )
9510
9511 (define_insn "*ifcompare_shift_move"
9512 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9513 (if_then_else:SI
9514 (match_operator 6 "arm_comparison_operator"
9515 [(match_operand:SI 4 "s_register_operand" "r,r")
9516 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9517 (match_operator:SI 7 "shift_operator"
9518 [(match_operand:SI 2 "s_register_operand" "r,r")
9519 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9520 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9521 (clobber (reg:CC CC_REGNUM))]
9522 "TARGET_ARM"
9523 "#"
9524 [(set_attr "conds" "clob")
9525 (set_attr "length" "8,12")]
9526 )
9527
9528 (define_insn "*if_shift_move"
9529 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9530 (if_then_else:SI
9531 (match_operator 5 "arm_comparison_operator"
9532 [(match_operand 6 "cc_register" "") (const_int 0)])
9533 (match_operator:SI 4 "shift_operator"
9534 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9535 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9536 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9537 "TARGET_ARM"
9538 "@
9539 mov%d5\\t%0, %2%S4
9540 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9541 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9542 [(set_attr "conds" "use")
9543 (set_attr "shift" "2")
9544 (set_attr "length" "4,8,8")
9545 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9546 (const_string "alu_shift")
9547 (const_string "alu_shift_reg")))]
9548 )
9549
9550 (define_insn "*ifcompare_move_shift"
9551 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9552 (if_then_else:SI
9553 (match_operator 6 "arm_comparison_operator"
9554 [(match_operand:SI 4 "s_register_operand" "r,r")
9555 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9556 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9557 (match_operator:SI 7 "shift_operator"
9558 [(match_operand:SI 2 "s_register_operand" "r,r")
9559 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9560 (clobber (reg:CC CC_REGNUM))]
9561 "TARGET_ARM"
9562 "#"
9563 [(set_attr "conds" "clob")
9564 (set_attr "length" "8,12")]
9565 )
9566
9567 (define_insn "*if_move_shift"
9568 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9569 (if_then_else:SI
9570 (match_operator 5 "arm_comparison_operator"
9571 [(match_operand 6 "cc_register" "") (const_int 0)])
9572 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9573 (match_operator:SI 4 "shift_operator"
9574 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9575 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9576 "TARGET_ARM"
9577 "@
9578 mov%D5\\t%0, %2%S4
9579 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9580 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9581 [(set_attr "conds" "use")
9582 (set_attr "shift" "2")
9583 (set_attr "length" "4,8,8")
9584 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9585 (const_string "alu_shift")
9586 (const_string "alu_shift_reg")))]
9587 )
9588
9589 (define_insn "*ifcompare_shift_shift"
9590 [(set (match_operand:SI 0 "s_register_operand" "=r")
9591 (if_then_else:SI
9592 (match_operator 7 "arm_comparison_operator"
9593 [(match_operand:SI 5 "s_register_operand" "r")
9594 (match_operand:SI 6 "arm_add_operand" "rIL")])
9595 (match_operator:SI 8 "shift_operator"
9596 [(match_operand:SI 1 "s_register_operand" "r")
9597 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9598 (match_operator:SI 9 "shift_operator"
9599 [(match_operand:SI 3 "s_register_operand" "r")
9600 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9601 (clobber (reg:CC CC_REGNUM))]
9602 "TARGET_ARM"
9603 "#"
9604 [(set_attr "conds" "clob")
9605 (set_attr "length" "12")]
9606 )
9607
9608 (define_insn "*if_shift_shift"
9609 [(set (match_operand:SI 0 "s_register_operand" "=r")
9610 (if_then_else:SI
9611 (match_operator 5 "arm_comparison_operator"
9612 [(match_operand 8 "cc_register" "") (const_int 0)])
9613 (match_operator:SI 6 "shift_operator"
9614 [(match_operand:SI 1 "s_register_operand" "r")
9615 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9616 (match_operator:SI 7 "shift_operator"
9617 [(match_operand:SI 3 "s_register_operand" "r")
9618 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9619 "TARGET_ARM"
9620 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9621 [(set_attr "conds" "use")
9622 (set_attr "shift" "1")
9623 (set_attr "length" "8")
9624 (set (attr "type") (if_then_else
9625 (and (match_operand 2 "const_int_operand" "")
9626 (match_operand 4 "const_int_operand" ""))
9627 (const_string "alu_shift")
9628 (const_string "alu_shift_reg")))]
9629 )
9630
9631 (define_insn "*ifcompare_not_arith"
9632 [(set (match_operand:SI 0 "s_register_operand" "=r")
9633 (if_then_else:SI
9634 (match_operator 6 "arm_comparison_operator"
9635 [(match_operand:SI 4 "s_register_operand" "r")
9636 (match_operand:SI 5 "arm_add_operand" "rIL")])
9637 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9638 (match_operator:SI 7 "shiftable_operator"
9639 [(match_operand:SI 2 "s_register_operand" "r")
9640 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9641 (clobber (reg:CC CC_REGNUM))]
9642 "TARGET_ARM"
9643 "#"
9644 [(set_attr "conds" "clob")
9645 (set_attr "length" "12")]
9646 )
9647
9648 (define_insn "*if_not_arith"
9649 [(set (match_operand:SI 0 "s_register_operand" "=r")
9650 (if_then_else:SI
9651 (match_operator 5 "arm_comparison_operator"
9652 [(match_operand 4 "cc_register" "") (const_int 0)])
9653 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9654 (match_operator:SI 6 "shiftable_operator"
9655 [(match_operand:SI 2 "s_register_operand" "r")
9656 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9657 "TARGET_ARM"
9658 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9659 [(set_attr "conds" "use")
9660 (set_attr "length" "8")]
9661 )
9662
9663 (define_insn "*ifcompare_arith_not"
9664 [(set (match_operand:SI 0 "s_register_operand" "=r")
9665 (if_then_else:SI
9666 (match_operator 6 "arm_comparison_operator"
9667 [(match_operand:SI 4 "s_register_operand" "r")
9668 (match_operand:SI 5 "arm_add_operand" "rIL")])
9669 (match_operator:SI 7 "shiftable_operator"
9670 [(match_operand:SI 2 "s_register_operand" "r")
9671 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9672 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9673 (clobber (reg:CC CC_REGNUM))]
9674 "TARGET_ARM"
9675 "#"
9676 [(set_attr "conds" "clob")
9677 (set_attr "length" "12")]
9678 )
9679
9680 (define_insn "*if_arith_not"
9681 [(set (match_operand:SI 0 "s_register_operand" "=r")
9682 (if_then_else:SI
9683 (match_operator 5 "arm_comparison_operator"
9684 [(match_operand 4 "cc_register" "") (const_int 0)])
9685 (match_operator:SI 6 "shiftable_operator"
9686 [(match_operand:SI 2 "s_register_operand" "r")
9687 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9688 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9689 "TARGET_ARM"
9690 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9691 [(set_attr "conds" "use")
9692 (set_attr "length" "8")]
9693 )
9694
9695 (define_insn "*ifcompare_neg_move"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9697 (if_then_else:SI
9698 (match_operator 5 "arm_comparison_operator"
9699 [(match_operand:SI 3 "s_register_operand" "r,r")
9700 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9701 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9702 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9703 (clobber (reg:CC CC_REGNUM))]
9704 "TARGET_ARM"
9705 "#"
9706 [(set_attr "conds" "clob")
9707 (set_attr "length" "8,12")]
9708 )
9709
9710 (define_insn "*if_neg_move"
9711 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9712 (if_then_else:SI
9713 (match_operator 4 "arm_comparison_operator"
9714 [(match_operand 3 "cc_register" "") (const_int 0)])
9715 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9716 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9717 "TARGET_ARM"
9718 "@
9719 rsb%d4\\t%0, %2, #0
9720 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
9721 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
9722 [(set_attr "conds" "use")
9723 (set_attr "length" "4,8,8")]
9724 )
9725
9726 (define_insn "*ifcompare_move_neg"
9727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9728 (if_then_else:SI
9729 (match_operator 5 "arm_comparison_operator"
9730 [(match_operand:SI 3 "s_register_operand" "r,r")
9731 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9732 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9733 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9734 (clobber (reg:CC CC_REGNUM))]
9735 "TARGET_ARM"
9736 "#"
9737 [(set_attr "conds" "clob")
9738 (set_attr "length" "8,12")]
9739 )
9740
9741 (define_insn "*if_move_neg"
9742 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9743 (if_then_else:SI
9744 (match_operator 4 "arm_comparison_operator"
9745 [(match_operand 3 "cc_register" "") (const_int 0)])
9746 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9747 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9748 "TARGET_ARM"
9749 "@
9750 rsb%D4\\t%0, %2, #0
9751 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
9752 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
9753 [(set_attr "conds" "use")
9754 (set_attr "length" "4,8,8")]
9755 )
9756
9757 (define_insn "*arith_adjacentmem"
9758 [(set (match_operand:SI 0 "s_register_operand" "=r")
9759 (match_operator:SI 1 "shiftable_operator"
9760 [(match_operand:SI 2 "memory_operand" "m")
9761 (match_operand:SI 3 "memory_operand" "m")]))
9762 (clobber (match_scratch:SI 4 "=r"))]
9763 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9764 "*
9765 {
9766 rtx ldm[3];
9767 rtx arith[4];
9768 rtx base_reg;
9769 HOST_WIDE_INT val1 = 0, val2 = 0;
9770
9771 if (REGNO (operands[0]) > REGNO (operands[4]))
9772 {
9773 ldm[1] = operands[4];
9774 ldm[2] = operands[0];
9775 }
9776 else
9777 {
9778 ldm[1] = operands[0];
9779 ldm[2] = operands[4];
9780 }
9781
9782 base_reg = XEXP (operands[2], 0);
9783
9784 if (!REG_P (base_reg))
9785 {
9786 val1 = INTVAL (XEXP (base_reg, 1));
9787 base_reg = XEXP (base_reg, 0);
9788 }
9789
9790 if (!REG_P (XEXP (operands[3], 0)))
9791 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9792
9793 arith[0] = operands[0];
9794 arith[3] = operands[1];
9795
9796 if (val1 < val2)
9797 {
9798 arith[1] = ldm[1];
9799 arith[2] = ldm[2];
9800 }
9801 else
9802 {
9803 arith[1] = ldm[2];
9804 arith[2] = ldm[1];
9805 }
9806
9807 ldm[0] = base_reg;
9808 if (val1 !=0 && val2 != 0)
9809 {
9810 rtx ops[3];
9811
9812 if (val1 == 4 || val2 == 4)
9813 /* Other val must be 8, since we know they are adjacent and neither
9814 is zero. */
9815 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
9816 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9817 {
9818 ldm[0] = ops[0] = operands[4];
9819 ops[1] = base_reg;
9820 ops[2] = GEN_INT (val1);
9821 output_add_immediate (ops);
9822 if (val1 < val2)
9823 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9824 else
9825 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9826 }
9827 else
9828 {
9829 /* Offset is out of range for a single add, so use two ldr. */
9830 ops[0] = ldm[1];
9831 ops[1] = base_reg;
9832 ops[2] = GEN_INT (val1);
9833 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9834 ops[0] = ldm[2];
9835 ops[2] = GEN_INT (val2);
9836 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9837 }
9838 }
9839 else if (val1 != 0)
9840 {
9841 if (val1 < val2)
9842 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9843 else
9844 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9845 }
9846 else
9847 {
9848 if (val1 < val2)
9849 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
9850 else
9851 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
9852 }
9853 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9854 return \"\";
9855 }"
9856 [(set_attr "length" "12")
9857 (set_attr "predicable" "yes")
9858 (set_attr "type" "load1")]
9859 )
9860
9861 ; This pattern is never tried by combine, so do it as a peephole
9862
9863 (define_peephole2
9864 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9865 (match_operand:SI 1 "arm_general_register_operand" ""))
9866 (set (reg:CC CC_REGNUM)
9867 (compare:CC (match_dup 1) (const_int 0)))]
9868 "TARGET_ARM"
9869 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9870 (set (match_dup 0) (match_dup 1))])]
9871 ""
9872 )
9873
9874 (define_split
9875 [(set (match_operand:SI 0 "s_register_operand" "")
9876 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9877 (const_int 0))
9878 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9879 [(match_operand:SI 3 "s_register_operand" "")
9880 (match_operand:SI 4 "arm_rhs_operand" "")]))))
9881 (clobber (match_operand:SI 5 "s_register_operand" ""))]
9882 "TARGET_ARM"
9883 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
9884 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
9885 (match_dup 5)))]
9886 ""
9887 )
9888
9889 ;; This split can be used because CC_Z mode implies that the following
9890 ;; branch will be an equality, or an unsigned inequality, so the sign
9891 ;; extension is not needed.
9892
9893 (define_split
9894 [(set (reg:CC_Z CC_REGNUM)
9895 (compare:CC_Z
9896 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
9897 (const_int 24))
9898 (match_operand 1 "const_int_operand" "")))
9899 (clobber (match_scratch:SI 2 ""))]
9900 "TARGET_ARM
9901 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
9902 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
9903 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
9904 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
9905 "
9906 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
9907 "
9908 )
9909 ;; ??? Check the patterns above for Thumb-2 usefulness
9910
9911 (define_expand "prologue"
9912 [(clobber (const_int 0))]
9913 "TARGET_EITHER"
9914 "if (TARGET_32BIT)
9915 arm_expand_prologue ();
9916 else
9917 thumb1_expand_prologue ();
9918 DONE;
9919 "
9920 )
9921
9922 (define_expand "epilogue"
9923 [(clobber (const_int 0))]
9924 "TARGET_EITHER"
9925 "
9926 if (crtl->calls_eh_return)
9927 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
9928 if (TARGET_THUMB1)
9929 thumb1_expand_epilogue ();
9930 else if (USE_RETURN_INSN (FALSE))
9931 {
9932 emit_jump_insn (gen_return ());
9933 DONE;
9934 }
9935 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
9936 gen_rtvec (1,
9937 gen_rtx_RETURN (VOIDmode)),
9938 VUNSPEC_EPILOGUE));
9939 DONE;
9940 "
9941 )
9942
9943 ;; Note - although unspec_volatile's USE all hard registers,
9944 ;; USEs are ignored after relaod has completed. Thus we need
9945 ;; to add an unspec of the link register to ensure that flow
9946 ;; does not think that it is unused by the sibcall branch that
9947 ;; will replace the standard function epilogue.
9948 (define_insn "sibcall_epilogue"
9949 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
9950 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
9951 "TARGET_32BIT"
9952 "*
9953 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
9954 return output_return_instruction (const_true_rtx, FALSE, FALSE);
9955 return arm_output_epilogue (next_nonnote_insn (insn));
9956 "
9957 ;; Length is absolute worst case
9958 [(set_attr "length" "44")
9959 (set_attr "type" "block")
9960 ;; We don't clobber the conditions, but the potential length of this
9961 ;; operation is sufficient to make conditionalizing the sequence
9962 ;; unlikely to be profitable.
9963 (set_attr "conds" "clob")]
9964 )
9965
9966 (define_insn "*epilogue_insns"
9967 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
9968 "TARGET_EITHER"
9969 "*
9970 if (TARGET_32BIT)
9971 return arm_output_epilogue (NULL);
9972 else /* TARGET_THUMB1 */
9973 return thumb_unexpanded_epilogue ();
9974 "
9975 ; Length is absolute worst case
9976 [(set_attr "length" "44")
9977 (set_attr "type" "block")
9978 ;; We don't clobber the conditions, but the potential length of this
9979 ;; operation is sufficient to make conditionalizing the sequence
9980 ;; unlikely to be profitable.
9981 (set_attr "conds" "clob")]
9982 )
9983
9984 (define_expand "eh_epilogue"
9985 [(use (match_operand:SI 0 "register_operand" ""))
9986 (use (match_operand:SI 1 "register_operand" ""))
9987 (use (match_operand:SI 2 "register_operand" ""))]
9988 "TARGET_EITHER"
9989 "
9990 {
9991 cfun->machine->eh_epilogue_sp_ofs = operands[1];
9992 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
9993 {
9994 rtx ra = gen_rtx_REG (Pmode, 2);
9995
9996 emit_move_insn (ra, operands[2]);
9997 operands[2] = ra;
9998 }
9999 /* This is a hack -- we may have crystalized the function type too
10000 early. */
10001 cfun->machine->func_type = 0;
10002 }"
10003 )
10004
10005 ;; This split is only used during output to reduce the number of patterns
10006 ;; that need assembler instructions adding to them. We allowed the setting
10007 ;; of the conditions to be implicit during rtl generation so that
10008 ;; the conditional compare patterns would work. However this conflicts to
10009 ;; some extent with the conditional data operations, so we have to split them
10010 ;; up again here.
10011
10012 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10013 ;; conditional execution sufficient?
10014
10015 (define_split
10016 [(set (match_operand:SI 0 "s_register_operand" "")
10017 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10018 [(match_operand 2 "" "") (match_operand 3 "" "")])
10019 (match_dup 0)
10020 (match_operand 4 "" "")))
10021 (clobber (reg:CC CC_REGNUM))]
10022 "TARGET_ARM && reload_completed"
10023 [(set (match_dup 5) (match_dup 6))
10024 (cond_exec (match_dup 7)
10025 (set (match_dup 0) (match_dup 4)))]
10026 "
10027 {
10028 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10029 operands[2], operands[3]);
10030 enum rtx_code rc = GET_CODE (operands[1]);
10031
10032 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10033 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10034 if (mode == CCFPmode || mode == CCFPEmode)
10035 rc = reverse_condition_maybe_unordered (rc);
10036 else
10037 rc = reverse_condition (rc);
10038
10039 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10040 }"
10041 )
10042
10043 (define_split
10044 [(set (match_operand:SI 0 "s_register_operand" "")
10045 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10046 [(match_operand 2 "" "") (match_operand 3 "" "")])
10047 (match_operand 4 "" "")
10048 (match_dup 0)))
10049 (clobber (reg:CC CC_REGNUM))]
10050 "TARGET_ARM && reload_completed"
10051 [(set (match_dup 5) (match_dup 6))
10052 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10053 (set (match_dup 0) (match_dup 4)))]
10054 "
10055 {
10056 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10057 operands[2], operands[3]);
10058
10059 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10060 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10061 }"
10062 )
10063
10064 (define_split
10065 [(set (match_operand:SI 0 "s_register_operand" "")
10066 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10067 [(match_operand 2 "" "") (match_operand 3 "" "")])
10068 (match_operand 4 "" "")
10069 (match_operand 5 "" "")))
10070 (clobber (reg:CC CC_REGNUM))]
10071 "TARGET_ARM && reload_completed"
10072 [(set (match_dup 6) (match_dup 7))
10073 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10074 (set (match_dup 0) (match_dup 4)))
10075 (cond_exec (match_dup 8)
10076 (set (match_dup 0) (match_dup 5)))]
10077 "
10078 {
10079 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10080 operands[2], operands[3]);
10081 enum rtx_code rc = GET_CODE (operands[1]);
10082
10083 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10084 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10085 if (mode == CCFPmode || mode == CCFPEmode)
10086 rc = reverse_condition_maybe_unordered (rc);
10087 else
10088 rc = reverse_condition (rc);
10089
10090 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10091 }"
10092 )
10093
10094 (define_split
10095 [(set (match_operand:SI 0 "s_register_operand" "")
10096 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10097 [(match_operand:SI 2 "s_register_operand" "")
10098 (match_operand:SI 3 "arm_add_operand" "")])
10099 (match_operand:SI 4 "arm_rhs_operand" "")
10100 (not:SI
10101 (match_operand:SI 5 "s_register_operand" ""))))
10102 (clobber (reg:CC CC_REGNUM))]
10103 "TARGET_ARM && reload_completed"
10104 [(set (match_dup 6) (match_dup 7))
10105 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10106 (set (match_dup 0) (match_dup 4)))
10107 (cond_exec (match_dup 8)
10108 (set (match_dup 0) (not:SI (match_dup 5))))]
10109 "
10110 {
10111 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10112 operands[2], operands[3]);
10113 enum rtx_code rc = GET_CODE (operands[1]);
10114
10115 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10116 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10117 if (mode == CCFPmode || mode == CCFPEmode)
10118 rc = reverse_condition_maybe_unordered (rc);
10119 else
10120 rc = reverse_condition (rc);
10121
10122 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10123 }"
10124 )
10125
10126 (define_insn "*cond_move_not"
10127 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10128 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10129 [(match_operand 3 "cc_register" "") (const_int 0)])
10130 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10131 (not:SI
10132 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10133 "TARGET_ARM"
10134 "@
10135 mvn%D4\\t%0, %2
10136 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10137 [(set_attr "conds" "use")
10138 (set_attr "length" "4,8")]
10139 )
10140
10141 ;; The next two patterns occur when an AND operation is followed by a
10142 ;; scc insn sequence
10143
10144 (define_insn "*sign_extract_onebit"
10145 [(set (match_operand:SI 0 "s_register_operand" "=r")
10146 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10147 (const_int 1)
10148 (match_operand:SI 2 "const_int_operand" "n")))
10149 (clobber (reg:CC CC_REGNUM))]
10150 "TARGET_ARM"
10151 "*
10152 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10153 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10154 return \"mvnne\\t%0, #0\";
10155 "
10156 [(set_attr "conds" "clob")
10157 (set_attr "length" "8")]
10158 )
10159
10160 (define_insn "*not_signextract_onebit"
10161 [(set (match_operand:SI 0 "s_register_operand" "=r")
10162 (not:SI
10163 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10164 (const_int 1)
10165 (match_operand:SI 2 "const_int_operand" "n"))))
10166 (clobber (reg:CC CC_REGNUM))]
10167 "TARGET_ARM"
10168 "*
10169 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10170 output_asm_insn (\"tst\\t%1, %2\", operands);
10171 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10172 return \"movne\\t%0, #0\";
10173 "
10174 [(set_attr "conds" "clob")
10175 (set_attr "length" "12")]
10176 )
10177 ;; ??? The above patterns need auditing for Thumb-2
10178
10179 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10180 ;; expressions. For simplicity, the first register is also in the unspec
10181 ;; part.
10182 (define_insn "*push_multi"
10183 [(match_parallel 2 "multi_register_push"
10184 [(set (match_operand:BLK 0 "memory_operand" "=m")
10185 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10186 UNSPEC_PUSH_MULT))])]
10187 "TARGET_32BIT"
10188 "*
10189 {
10190 int num_saves = XVECLEN (operands[2], 0);
10191
10192 /* For the StrongARM at least it is faster to
10193 use STR to store only a single register.
10194 In Thumb mode always use push, and the assembler will pick
10195 something appropriate. */
10196 if (num_saves == 1 && TARGET_ARM)
10197 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10198 else
10199 {
10200 int i;
10201 char pattern[100];
10202
10203 if (TARGET_ARM)
10204 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10205 else
10206 strcpy (pattern, \"push\\t{%1\");
10207
10208 for (i = 1; i < num_saves; i++)
10209 {
10210 strcat (pattern, \", %|\");
10211 strcat (pattern,
10212 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10213 }
10214
10215 strcat (pattern, \"}\");
10216 output_asm_insn (pattern, operands);
10217 }
10218
10219 return \"\";
10220 }"
10221 [(set_attr "type" "store4")]
10222 )
10223
10224 (define_insn "stack_tie"
10225 [(set (mem:BLK (scratch))
10226 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10227 (match_operand:SI 1 "s_register_operand" "rk")]
10228 UNSPEC_PRLG_STK))]
10229 ""
10230 ""
10231 [(set_attr "length" "0")]
10232 )
10233
10234 ;; Similarly for the floating point registers
10235 (define_insn "*push_fp_multi"
10236 [(match_parallel 2 "multi_register_push"
10237 [(set (match_operand:BLK 0 "memory_operand" "=m")
10238 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10239 UNSPEC_PUSH_MULT))])]
10240 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10241 "*
10242 {
10243 char pattern[100];
10244
10245 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10246 output_asm_insn (pattern, operands);
10247 return \"\";
10248 }"
10249 [(set_attr "type" "f_store")]
10250 )
10251
10252 ;; Special patterns for dealing with the constant pool
10253
10254 (define_insn "align_4"
10255 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10256 "TARGET_EITHER"
10257 "*
10258 assemble_align (32);
10259 return \"\";
10260 "
10261 )
10262
10263 (define_insn "align_8"
10264 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10265 "TARGET_EITHER"
10266 "*
10267 assemble_align (64);
10268 return \"\";
10269 "
10270 )
10271
10272 (define_insn "consttable_end"
10273 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10274 "TARGET_EITHER"
10275 "*
10276 making_const_table = FALSE;
10277 return \"\";
10278 "
10279 )
10280
10281 (define_insn "consttable_1"
10282 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10283 "TARGET_THUMB1"
10284 "*
10285 making_const_table = TRUE;
10286 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10287 assemble_zeros (3);
10288 return \"\";
10289 "
10290 [(set_attr "length" "4")]
10291 )
10292
10293 (define_insn "consttable_2"
10294 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10295 "TARGET_THUMB1"
10296 "*
10297 making_const_table = TRUE;
10298 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10299 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10300 assemble_zeros (2);
10301 return \"\";
10302 "
10303 [(set_attr "length" "4")]
10304 )
10305
10306 (define_insn "consttable_4"
10307 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10308 "TARGET_EITHER"
10309 "*
10310 {
10311 rtx x = operands[0];
10312 making_const_table = TRUE;
10313 switch (GET_MODE_CLASS (GET_MODE (x)))
10314 {
10315 case MODE_FLOAT:
10316 if (GET_MODE (x) == HFmode)
10317 arm_emit_fp16_const (x);
10318 else
10319 {
10320 REAL_VALUE_TYPE r;
10321 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10322 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10323 }
10324 break;
10325 default:
10326 /* XXX: Sometimes gcc does something really dumb and ends up with
10327 a HIGH in a constant pool entry, usually because it's trying to
10328 load into a VFP register. We know this will always be used in
10329 combination with a LO_SUM which ignores the high bits, so just
10330 strip off the HIGH. */
10331 if (GET_CODE (x) == HIGH)
10332 x = XEXP (x, 0);
10333 assemble_integer (x, 4, BITS_PER_WORD, 1);
10334 mark_symbol_refs_as_used (x);
10335 break;
10336 }
10337 return \"\";
10338 }"
10339 [(set_attr "length" "4")]
10340 )
10341
10342 (define_insn "consttable_8"
10343 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10344 "TARGET_EITHER"
10345 "*
10346 {
10347 making_const_table = TRUE;
10348 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10349 {
10350 case MODE_FLOAT:
10351 {
10352 REAL_VALUE_TYPE r;
10353 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10354 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10355 break;
10356 }
10357 default:
10358 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10359 break;
10360 }
10361 return \"\";
10362 }"
10363 [(set_attr "length" "8")]
10364 )
10365
10366 (define_insn "consttable_16"
10367 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10368 "TARGET_EITHER"
10369 "*
10370 {
10371 making_const_table = TRUE;
10372 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10373 {
10374 case MODE_FLOAT:
10375 {
10376 REAL_VALUE_TYPE r;
10377 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10378 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10379 break;
10380 }
10381 default:
10382 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10383 break;
10384 }
10385 return \"\";
10386 }"
10387 [(set_attr "length" "16")]
10388 )
10389
10390 ;; Miscellaneous Thumb patterns
10391
10392 (define_expand "tablejump"
10393 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10394 (use (label_ref (match_operand 1 "" "")))])]
10395 "TARGET_THUMB1"
10396 "
10397 if (flag_pic)
10398 {
10399 /* Hopefully, CSE will eliminate this copy. */
10400 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10401 rtx reg2 = gen_reg_rtx (SImode);
10402
10403 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10404 operands[0] = reg2;
10405 }
10406 "
10407 )
10408
10409 ;; NB never uses BX.
10410 (define_insn "*thumb1_tablejump"
10411 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10412 (use (label_ref (match_operand 1 "" "")))]
10413 "TARGET_THUMB1"
10414 "mov\\t%|pc, %0"
10415 [(set_attr "length" "2")]
10416 )
10417
10418 ;; V5 Instructions,
10419
10420 (define_insn "clzsi2"
10421 [(set (match_operand:SI 0 "s_register_operand" "=r")
10422 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10423 "TARGET_32BIT && arm_arch5"
10424 "clz%?\\t%0, %1"
10425 [(set_attr "predicable" "yes")
10426 (set_attr "insn" "clz")])
10427
10428 (define_insn "rbitsi2"
10429 [(set (match_operand:SI 0 "s_register_operand" "=r")
10430 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10431 "TARGET_32BIT && arm_arch_thumb2"
10432 "rbit%?\\t%0, %1"
10433 [(set_attr "predicable" "yes")
10434 (set_attr "insn" "clz")])
10435
10436 (define_expand "ctzsi2"
10437 [(set (match_operand:SI 0 "s_register_operand" "")
10438 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
10439 "TARGET_32BIT && arm_arch_thumb2"
10440 "
10441 {
10442 rtx tmp = gen_reg_rtx (SImode);
10443 emit_insn (gen_rbitsi2 (tmp, operands[1]));
10444 emit_insn (gen_clzsi2 (operands[0], tmp));
10445 }
10446 DONE;
10447 "
10448 )
10449
10450 ;; V5E instructions.
10451
10452 (define_insn "prefetch"
10453 [(prefetch (match_operand:SI 0 "address_operand" "p")
10454 (match_operand:SI 1 "" "")
10455 (match_operand:SI 2 "" ""))]
10456 "TARGET_32BIT && arm_arch5e"
10457 "pld\\t%a0")
10458
10459 ;; General predication pattern
10460
10461 (define_cond_exec
10462 [(match_operator 0 "arm_comparison_operator"
10463 [(match_operand 1 "cc_register" "")
10464 (const_int 0)])]
10465 "TARGET_32BIT"
10466 ""
10467 )
10468
10469 (define_insn "prologue_use"
10470 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10471 ""
10472 "%@ %0 needed for prologue"
10473 [(set_attr "length" "0")]
10474 )
10475
10476
10477 ;; Patterns for exception handling
10478
10479 (define_expand "eh_return"
10480 [(use (match_operand 0 "general_operand" ""))]
10481 "TARGET_EITHER"
10482 "
10483 {
10484 if (TARGET_32BIT)
10485 emit_insn (gen_arm_eh_return (operands[0]));
10486 else
10487 emit_insn (gen_thumb_eh_return (operands[0]));
10488 DONE;
10489 }"
10490 )
10491
10492 ;; We can't expand this before we know where the link register is stored.
10493 (define_insn_and_split "arm_eh_return"
10494 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10495 VUNSPEC_EH_RETURN)
10496 (clobber (match_scratch:SI 1 "=&r"))]
10497 "TARGET_ARM"
10498 "#"
10499 "&& reload_completed"
10500 [(const_int 0)]
10501 "
10502 {
10503 arm_set_return_address (operands[0], operands[1]);
10504 DONE;
10505 }"
10506 )
10507
10508 (define_insn_and_split "thumb_eh_return"
10509 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10510 VUNSPEC_EH_RETURN)
10511 (clobber (match_scratch:SI 1 "=&l"))]
10512 "TARGET_THUMB1"
10513 "#"
10514 "&& reload_completed"
10515 [(const_int 0)]
10516 "
10517 {
10518 thumb_set_return_address (operands[0], operands[1]);
10519 DONE;
10520 }"
10521 )
10522
10523 \f
10524 ;; TLS support
10525
10526 (define_insn "load_tp_hard"
10527 [(set (match_operand:SI 0 "register_operand" "=r")
10528 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10529 "TARGET_HARD_TP"
10530 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10531 [(set_attr "predicable" "yes")]
10532 )
10533
10534 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10535 (define_insn "load_tp_soft"
10536 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10537 (clobber (reg:SI LR_REGNUM))
10538 (clobber (reg:SI IP_REGNUM))
10539 (clobber (reg:CC CC_REGNUM))]
10540 "TARGET_SOFT_TP"
10541 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10542 [(set_attr "conds" "clob")]
10543 )
10544
10545 (define_insn "*arm_movtas_ze"
10546 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
10547 (const_int 16)
10548 (const_int 16))
10549 (match_operand:SI 1 "const_int_operand" ""))]
10550 "TARGET_32BIT"
10551 "movt%?\t%0, %c1"
10552 [(set_attr "predicable" "yes")
10553 (set_attr "length" "4")]
10554 )
10555
10556 (define_insn "*arm_rev"
10557 [(set (match_operand:SI 0 "s_register_operand" "=r")
10558 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10559 "TARGET_32BIT && arm_arch6"
10560 "rev%?\t%0, %1"
10561 [(set_attr "predicable" "yes")
10562 (set_attr "length" "4")]
10563 )
10564
10565 (define_insn "*thumb1_rev"
10566 [(set (match_operand:SI 0 "s_register_operand" "=l")
10567 (bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
10568 "TARGET_THUMB1 && arm_arch6"
10569 "rev\t%0, %1"
10570 [(set_attr "length" "2")]
10571 )
10572
10573 (define_expand "arm_legacy_rev"
10574 [(set (match_operand:SI 2 "s_register_operand" "")
10575 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
10576 (const_int 16))
10577 (match_dup 1)))
10578 (set (match_dup 2)
10579 (lshiftrt:SI (match_dup 2)
10580 (const_int 8)))
10581 (set (match_operand:SI 3 "s_register_operand" "")
10582 (rotatert:SI (match_dup 1)
10583 (const_int 8)))
10584 (set (match_dup 2)
10585 (and:SI (match_dup 2)
10586 (const_int -65281)))
10587 (set (match_operand:SI 0 "s_register_operand" "")
10588 (xor:SI (match_dup 3)
10589 (match_dup 2)))]
10590 "TARGET_32BIT"
10591 ""
10592 )
10593
10594 ;; Reuse temporaries to keep register pressure down.
10595 (define_expand "thumb_legacy_rev"
10596 [(set (match_operand:SI 2 "s_register_operand" "")
10597 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
10598 (const_int 24)))
10599 (set (match_operand:SI 3 "s_register_operand" "")
10600 (lshiftrt:SI (match_dup 1)
10601 (const_int 24)))
10602 (set (match_dup 3)
10603 (ior:SI (match_dup 3)
10604 (match_dup 2)))
10605 (set (match_operand:SI 4 "s_register_operand" "")
10606 (const_int 16))
10607 (set (match_operand:SI 5 "s_register_operand" "")
10608 (rotatert:SI (match_dup 1)
10609 (match_dup 4)))
10610 (set (match_dup 2)
10611 (ashift:SI (match_dup 5)
10612 (const_int 24)))
10613 (set (match_dup 5)
10614 (lshiftrt:SI (match_dup 5)
10615 (const_int 24)))
10616 (set (match_dup 5)
10617 (ior:SI (match_dup 5)
10618 (match_dup 2)))
10619 (set (match_dup 5)
10620 (rotatert:SI (match_dup 5)
10621 (match_dup 4)))
10622 (set (match_operand:SI 0 "s_register_operand" "")
10623 (ior:SI (match_dup 5)
10624 (match_dup 3)))]
10625 "TARGET_THUMB"
10626 ""
10627 )
10628
10629 (define_expand "bswapsi2"
10630 [(set (match_operand:SI 0 "s_register_operand" "=r")
10631 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
10632 "TARGET_EITHER"
10633 "
10634 if (!arm_arch6)
10635 {
10636 if (!optimize_size)
10637 {
10638 rtx op2 = gen_reg_rtx (SImode);
10639 rtx op3 = gen_reg_rtx (SImode);
10640
10641 if (TARGET_THUMB)
10642 {
10643 rtx op4 = gen_reg_rtx (SImode);
10644 rtx op5 = gen_reg_rtx (SImode);
10645
10646 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10647 op2, op3, op4, op5));
10648 }
10649 else
10650 {
10651 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10652 op2, op3));
10653 }
10654
10655 DONE;
10656 }
10657 else
10658 FAIL;
10659 }
10660 "
10661 )
10662
10663 ;; Load the load/store multiple patterns
10664 (include "ldmstm.md")
10665 ;; Load the FPA co-processor patterns
10666 (include "fpa.md")
10667 ;; Load the Maverick co-processor patterns
10668 (include "cirrus.md")
10669 ;; Vector bits common to IWMMXT and Neon
10670 (include "vec-common.md")
10671 ;; Load the Intel Wireless Multimedia Extension patterns
10672 (include "iwmmxt.md")
10673 ;; Load the VFP co-processor patterns
10674 (include "vfp.md")
10675 ;; Thumb-2 patterns
10676 (include "thumb2.md")
10677 ;; Neon patterns
10678 (include "neon.md")
10679 ;; Synchronization Primitives
10680 (include "sync.md")