arm.md (negdi2): Remove redundant code to force values into a register.
[gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Add PC and all but the last operand together,
71 ; The last operand is the number of a PIC_LABEL
72 ; that points at the containing instruction.
73 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
74 ; being scheduled before the stack adjustment insn.
75 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76 ; this unspec is used to prevent the deletion of
77 ; instructions setting registers for EH handling
78 ; and stack frame generation. Operand 0 is the
79 ; register to "use".
80 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
94 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
95 ; instruction stream.
96 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
97 ; generate correct unwind information.
98 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99 ; correctly for PIC usage.
100 (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101 ; a given symbolic address.
102 (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103 (UNSPEC_RBIT 26) ; rbit operation.
104 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
105 ; another symbolic address.
106 ]
107 )
108
109 ;; UNSPEC_VOLATILE Usage:
110
111 (define_constants
112 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
113 ; insn in the code.
114 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
115 ; instruction epilogue sequence that isn't expanded
116 ; into normal RTL. Used for both normal and sibcall
117 ; epilogues.
118 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
119 ; for inlined constants.
120 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
121 ; table.
122 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
123 ; an 8-bit object.
124 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
125 ; a 16-bit object.
126 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
127 ; a 32-bit object.
128 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
129 ; a 64-bit object.
130 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
131 ; a 128-bit object.
132 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
133 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
134 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
135 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
136 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
137 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
138 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
139 ; handling.
140 ]
141 )
142 \f
143 ;;---------------------------------------------------------------------------
144 ;; Attributes
145
146 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
147 ; generating ARM code. This is used to control the length of some insn
148 ; patterns that share the same RTL in both ARM and Thumb code.
149 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
150
151 ;; Operand number of an input operand that is shifted. Zero if the
152 ;; given instruction does not shift one of its input operands.
153 (define_attr "shift" "" (const_int 0))
154
155 ; Floating Point Unit. If we only have floating point emulation, then there
156 ; is no point in scheduling the floating point insns. (Well, for best
157 ; performance we should try and group them together).
158 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
159 (const (symbol_ref "arm_fpu_attr")))
160
161 ; LENGTH of an instruction (in bytes)
162 (define_attr "length" "" (const_int 4))
163
164 ; POOL_RANGE is how far away from a constant pool entry that this insn
165 ; can be placed. If the distance is zero, then this insn will never
166 ; reference the pool.
167 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
168 ; before its address.
169 (define_attr "pool_range" "" (const_int 0))
170 (define_attr "neg_pool_range" "" (const_int 0))
171
172 ; An assembler sequence may clobber the condition codes without us knowing.
173 ; If such an insn references the pool, then we have no way of knowing how,
174 ; so use the most conservative value for pool_range.
175 (define_asm_attributes
176 [(set_attr "conds" "clob")
177 (set_attr "length" "4")
178 (set_attr "pool_range" "250")])
179
180 ;; The instruction used to implement a particular pattern. This
181 ;; information is used by pipeline descriptions to provide accurate
182 ;; scheduling information.
183
184 (define_attr "insn"
185 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
186 (const_string "other"))
187
188 ; TYPE attribute is used to detect floating point instructions which, if
189 ; running on a co-processor can run in parallel with other, basic instructions
190 ; If write-buffer scheduling is enabled then it can also be used in the
191 ; scheduling of writes.
192
193 ; Classification of each insn
194 ; Note: vfp.md has different meanings for some of these, and some further
195 ; types as well. See that file for details.
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
223 ; branch a branch
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
237 ;
238
239 (define_attr "type"
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
241 (if_then_else
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
245
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
249
250 ;; Classification of NEON instructions for scheduling purposes.
251 ;; Do not set this attribute and the "type" attribute together in
252 ;; any one instruction pattern.
253 (define_attr "neon_type"
254 "neon_int_1,\
255 neon_int_2,\
256 neon_int_3,\
257 neon_int_4,\
258 neon_int_5,\
259 neon_vqneg_vqabs,\
260 neon_vmov,\
261 neon_vaba,\
262 neon_vsma,\
263 neon_vaba_qqq,\
264 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
265 neon_mul_qqq_8_16_32_ddd_32,\
266 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
267 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
268 neon_mla_qqq_8_16,\
269 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
270 neon_mla_qqq_32_qqd_32_scalar,\
271 neon_mul_ddd_16_scalar_32_16_long_scalar,\
272 neon_mul_qqd_32_scalar,\
273 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
274 neon_shift_1,\
275 neon_shift_2,\
276 neon_shift_3,\
277 neon_vshl_ddd,\
278 neon_vqshl_vrshl_vqrshl_qqq,\
279 neon_vsra_vrsra,\
280 neon_fp_vadd_ddd_vabs_dd,\
281 neon_fp_vadd_qqq_vabs_qq,\
282 neon_fp_vsum,\
283 neon_fp_vmul_ddd,\
284 neon_fp_vmul_qqd,\
285 neon_fp_vmla_ddd,\
286 neon_fp_vmla_qqq,\
287 neon_fp_vmla_ddd_scalar,\
288 neon_fp_vmla_qqq_scalar,\
289 neon_fp_vrecps_vrsqrts_ddd,\
290 neon_fp_vrecps_vrsqrts_qqq,\
291 neon_bp_simple,\
292 neon_bp_2cycle,\
293 neon_bp_3cycle,\
294 neon_ldr,\
295 neon_str,\
296 neon_vld1_1_2_regs,\
297 neon_vld1_3_4_regs,\
298 neon_vld2_2_regs_vld1_vld2_all_lanes,\
299 neon_vld2_4_regs,\
300 neon_vld3_vld4,\
301 neon_vst1_1_2_regs_vst2_2_regs,\
302 neon_vst1_3_4_regs,\
303 neon_vst2_4_regs_vst3_vst4,\
304 neon_vst3_vst4,\
305 neon_vld1_vld2_lane,\
306 neon_vld3_vld4_lane,\
307 neon_vst1_vst2_lane,\
308 neon_vst3_vst4_lane,\
309 neon_vld3_vld4_all_lanes,\
310 neon_mcr,\
311 neon_mcr_2_mcrr,\
312 neon_mrc,\
313 neon_mrrc,\
314 neon_ldm_2,\
315 neon_stm_2,\
316 none"
317 (const_string "none"))
318
319 ; condition codes: this one is used by final_prescan_insn to speed up
320 ; conditionalizing instructions. It saves having to scan the rtl to see if
321 ; it uses or alters the condition codes.
322 ;
323 ; USE means that the condition codes are used by the insn in the process of
324 ; outputting code, this means (at present) that we can't use the insn in
325 ; inlined branches
326 ;
327 ; SET means that the purpose of the insn is to set the condition codes in a
328 ; well defined manner.
329 ;
330 ; CLOB means that the condition codes are altered in an undefined manner, if
331 ; they are altered at all
332 ;
333 ; UNCONDITIONAL means the instions can not be conditionally executed.
334 ;
335 ; NOCOND means that the condition codes are neither altered nor affect the
336 ; output of this insn
337
338 (define_attr "conds" "use,set,clob,unconditional,nocond"
339 (if_then_else (eq_attr "type" "call")
340 (const_string "clob")
341 (if_then_else (eq_attr "neon_type" "none")
342 (const_string "nocond")
343 (const_string "unconditional"))))
344
345 ; Predicable means that the insn can be conditionally executed based on
346 ; an automatically added predicate (additional patterns are generated by
347 ; gen...). We default to 'no' because no Thumb patterns match this rule
348 ; and not all ARM patterns do.
349 (define_attr "predicable" "no,yes" (const_string "no"))
350
351 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
352 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
353 ; suffer blockages enough to warrant modelling this (and it can adversely
354 ; affect the schedule).
355 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
356
357 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
358 ; to stall the processor. Used with model_wbuf above.
359 (define_attr "write_conflict" "no,yes"
360 (if_then_else (eq_attr "type"
361 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
362 (const_string "yes")
363 (const_string "no")))
364
365 ; Classify the insns into those that take one cycle and those that take more
366 ; than one on the main cpu execution unit.
367 (define_attr "core_cycles" "single,multi"
368 (if_then_else (eq_attr "type"
369 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
370 (const_string "single")
371 (const_string "multi")))
372
373 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
374 ;; distant label. Only applicable to Thumb code.
375 (define_attr "far_jump" "yes,no" (const_string "no"))
376
377
378 ;; The number of machine instructions this pattern expands to.
379 ;; Used for Thumb-2 conditional execution.
380 (define_attr "ce_count" "" (const_int 1))
381
382 ;;---------------------------------------------------------------------------
383 ;; Mode iterators
384
385 ; A list of modes that are exactly 64 bits in size. We use this to expand
386 ; some splits that are the same for all modes when operating on ARM
387 ; registers.
388 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
389
390 ;; The integer modes up to word size
391 (define_mode_iterator QHSI [QI HI SI])
392
393 ;;---------------------------------------------------------------------------
394 ;; Predicates
395
396 (include "predicates.md")
397 (include "constraints.md")
398
399 ;;---------------------------------------------------------------------------
400 ;; Pipeline descriptions
401
402 ;; Processor type. This is created automatically from arm-cores.def.
403 (include "arm-tune.md")
404
405 (define_attr "tune_cortexr4" "yes,no"
406 (const (if_then_else
407 (eq_attr "tune" "cortexr4,cortexr4f")
408 (const_string "yes")
409 (const_string "no"))))
410
411 ;; True if the generic scheduling description should be used.
412
413 (define_attr "generic_sched" "yes,no"
414 (const (if_then_else
415 (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
416 (eq_attr "tune_cortexr4" "yes"))
417 (const_string "no")
418 (const_string "yes"))))
419
420 (define_attr "generic_vfp" "yes,no"
421 (const (if_then_else
422 (and (eq_attr "fpu" "vfp")
423 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
424 (eq_attr "tune_cortexr4" "no"))
425 (const_string "yes")
426 (const_string "no"))))
427
428 (include "arm-generic.md")
429 (include "arm926ejs.md")
430 (include "arm1020e.md")
431 (include "arm1026ejs.md")
432 (include "arm1136jfs.md")
433 (include "cortex-a8.md")
434 (include "cortex-a9.md")
435 (include "cortex-r4.md")
436 (include "cortex-r4f.md")
437 (include "vfp11.md")
438
439 \f
440 ;;---------------------------------------------------------------------------
441 ;; Insn patterns
442 ;;
443 ;; Addition insns.
444
445 ;; Note: For DImode insns, there is normally no reason why operands should
446 ;; not be in the same register, what we don't want is for something being
447 ;; written to partially overlap something that is an input.
448 ;; Cirrus 64bit additions should not be split because we have a native
449 ;; 64bit addition instructions.
450
451 (define_expand "adddi3"
452 [(parallel
453 [(set (match_operand:DI 0 "s_register_operand" "")
454 (plus:DI (match_operand:DI 1 "s_register_operand" "")
455 (match_operand:DI 2 "s_register_operand" "")))
456 (clobber (reg:CC CC_REGNUM))])]
457 "TARGET_EITHER"
458 "
459 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
460 {
461 if (!cirrus_fp_register (operands[0], DImode))
462 operands[0] = force_reg (DImode, operands[0]);
463 if (!cirrus_fp_register (operands[1], DImode))
464 operands[1] = force_reg (DImode, operands[1]);
465 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
466 DONE;
467 }
468
469 if (TARGET_THUMB1)
470 {
471 if (GET_CODE (operands[1]) != REG)
472 operands[1] = force_reg (DImode, operands[1]);
473 if (GET_CODE (operands[2]) != REG)
474 operands[2] = force_reg (DImode, operands[2]);
475 }
476 "
477 )
478
479 (define_insn "*thumb1_adddi3"
480 [(set (match_operand:DI 0 "register_operand" "=l")
481 (plus:DI (match_operand:DI 1 "register_operand" "%0")
482 (match_operand:DI 2 "register_operand" "l")))
483 (clobber (reg:CC CC_REGNUM))
484 ]
485 "TARGET_THUMB1"
486 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
487 [(set_attr "length" "4")]
488 )
489
490 (define_insn_and_split "*arm_adddi3"
491 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
492 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
493 (match_operand:DI 2 "s_register_operand" "r, 0")))
494 (clobber (reg:CC CC_REGNUM))]
495 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
496 "#"
497 "TARGET_32BIT && reload_completed"
498 [(parallel [(set (reg:CC_C CC_REGNUM)
499 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
500 (match_dup 1)))
501 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
502 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
503 (plus:SI (match_dup 4) (match_dup 5))))]
504 "
505 {
506 operands[3] = gen_highpart (SImode, operands[0]);
507 operands[0] = gen_lowpart (SImode, operands[0]);
508 operands[4] = gen_highpart (SImode, operands[1]);
509 operands[1] = gen_lowpart (SImode, operands[1]);
510 operands[5] = gen_highpart (SImode, operands[2]);
511 operands[2] = gen_lowpart (SImode, operands[2]);
512 }"
513 [(set_attr "conds" "clob")
514 (set_attr "length" "8")]
515 )
516
517 (define_insn_and_split "*adddi_sesidi_di"
518 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
519 (plus:DI (sign_extend:DI
520 (match_operand:SI 2 "s_register_operand" "r,r"))
521 (match_operand:DI 1 "s_register_operand" "0,r")))
522 (clobber (reg:CC CC_REGNUM))]
523 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
524 "#"
525 "TARGET_32BIT && reload_completed"
526 [(parallel [(set (reg:CC_C CC_REGNUM)
527 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
528 (match_dup 1)))
529 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
530 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
531 (plus:SI (ashiftrt:SI (match_dup 2)
532 (const_int 31))
533 (match_dup 4))))]
534 "
535 {
536 operands[3] = gen_highpart (SImode, operands[0]);
537 operands[0] = gen_lowpart (SImode, operands[0]);
538 operands[4] = gen_highpart (SImode, operands[1]);
539 operands[1] = gen_lowpart (SImode, operands[1]);
540 operands[2] = gen_lowpart (SImode, operands[2]);
541 }"
542 [(set_attr "conds" "clob")
543 (set_attr "length" "8")]
544 )
545
546 (define_insn_and_split "*adddi_zesidi_di"
547 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
548 (plus:DI (zero_extend:DI
549 (match_operand:SI 2 "s_register_operand" "r,r"))
550 (match_operand:DI 1 "s_register_operand" "0,r")))
551 (clobber (reg:CC CC_REGNUM))]
552 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
553 "#"
554 "TARGET_32BIT && reload_completed"
555 [(parallel [(set (reg:CC_C CC_REGNUM)
556 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
557 (match_dup 1)))
558 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
559 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
560 (plus:SI (match_dup 4) (const_int 0))))]
561 "
562 {
563 operands[3] = gen_highpart (SImode, operands[0]);
564 operands[0] = gen_lowpart (SImode, operands[0]);
565 operands[4] = gen_highpart (SImode, operands[1]);
566 operands[1] = gen_lowpart (SImode, operands[1]);
567 operands[2] = gen_lowpart (SImode, operands[2]);
568 }"
569 [(set_attr "conds" "clob")
570 (set_attr "length" "8")]
571 )
572
573 (define_expand "addsi3"
574 [(set (match_operand:SI 0 "s_register_operand" "")
575 (plus:SI (match_operand:SI 1 "s_register_operand" "")
576 (match_operand:SI 2 "reg_or_int_operand" "")))]
577 "TARGET_EITHER"
578 "
579 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
580 {
581 arm_split_constant (PLUS, SImode, NULL_RTX,
582 INTVAL (operands[2]), operands[0], operands[1],
583 optimize && can_create_pseudo_p ());
584 DONE;
585 }
586 "
587 )
588
589 ; If there is a scratch available, this will be faster than synthesizing the
590 ; addition.
591 (define_peephole2
592 [(match_scratch:SI 3 "r")
593 (set (match_operand:SI 0 "arm_general_register_operand" "")
594 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
595 (match_operand:SI 2 "const_int_operand" "")))]
596 "TARGET_32BIT &&
597 !(const_ok_for_arm (INTVAL (operands[2]))
598 || const_ok_for_arm (-INTVAL (operands[2])))
599 && const_ok_for_arm (~INTVAL (operands[2]))"
600 [(set (match_dup 3) (match_dup 2))
601 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
602 ""
603 )
604
605 ;; The r/r/k alternative is required when reloading the address
606 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
607 ;; put the duplicated register first, and not try the commutative version.
608 (define_insn_and_split "*arm_addsi3"
609 [(set (match_operand:SI 0 "s_register_operand" "=r, !k, r,r, !k,r")
610 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
611 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
612 "TARGET_32BIT"
613 "@
614 add%?\\t%0, %1, %2
615 add%?\\t%0, %1, %2
616 add%?\\t%0, %2, %1
617 sub%?\\t%0, %1, #%n2
618 sub%?\\t%0, %1, #%n2
619 #"
620 "TARGET_32BIT
621 && GET_CODE (operands[2]) == CONST_INT
622 && !(const_ok_for_arm (INTVAL (operands[2]))
623 || const_ok_for_arm (-INTVAL (operands[2])))
624 && (reload_completed || !arm_eliminable_register (operands[1]))"
625 [(clobber (const_int 0))]
626 "
627 arm_split_constant (PLUS, SImode, curr_insn,
628 INTVAL (operands[2]), operands[0],
629 operands[1], 0);
630 DONE;
631 "
632 [(set_attr "length" "4,4,4,4,4,16")
633 (set_attr "predicable" "yes")]
634 )
635
636 ;; Register group 'k' is a single register group containing only the stack
637 ;; register. Trying to reload it will always fail catastrophically,
638 ;; so never allow those alternatives to match if reloading is needed.
639
640 (define_insn_and_split "*thumb1_addsi3"
641 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
642 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
643 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
644 "TARGET_THUMB1"
645 "*
646 static const char * const asms[] =
647 {
648 \"add\\t%0, %0, %2\",
649 \"sub\\t%0, %0, #%n2\",
650 \"add\\t%0, %1, %2\",
651 \"add\\t%0, %0, %2\",
652 \"add\\t%0, %0, %2\",
653 \"add\\t%0, %1, %2\",
654 \"add\\t%0, %1, %2\",
655 \"#\",
656 \"#\"
657 };
658 if ((which_alternative == 2 || which_alternative == 6)
659 && GET_CODE (operands[2]) == CONST_INT
660 && INTVAL (operands[2]) < 0)
661 return \"sub\\t%0, %1, #%n2\";
662 return asms[which_alternative];
663 "
664 "&& reload_completed && CONST_INT_P (operands[2])
665 && operands[1] != stack_pointer_rtx
666 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
667 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
668 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
669 {
670 HOST_WIDE_INT offset = INTVAL (operands[2]);
671 if (offset > 255)
672 offset = 255;
673 else if (offset < -255)
674 offset = -255;
675
676 operands[3] = GEN_INT (offset);
677 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
678 }
679 [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
680 )
681
682 ;; Reloading and elimination of the frame pointer can
683 ;; sometimes cause this optimization to be missed.
684 (define_peephole2
685 [(set (match_operand:SI 0 "arm_general_register_operand" "")
686 (match_operand:SI 1 "const_int_operand" ""))
687 (set (match_dup 0)
688 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
689 "TARGET_THUMB1
690 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
691 && (INTVAL (operands[1]) & 3) == 0"
692 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
693 ""
694 )
695
696 ;; ??? Make Thumb-2 variants which prefer low regs
697 (define_insn "*addsi3_compare0"
698 [(set (reg:CC_NOOV CC_REGNUM)
699 (compare:CC_NOOV
700 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
701 (match_operand:SI 2 "arm_add_operand" "rI,L"))
702 (const_int 0)))
703 (set (match_operand:SI 0 "s_register_operand" "=r,r")
704 (plus:SI (match_dup 1) (match_dup 2)))]
705 "TARGET_32BIT"
706 "@
707 add%.\\t%0, %1, %2
708 sub%.\\t%0, %1, #%n2"
709 [(set_attr "conds" "set")]
710 )
711
712 (define_insn "*addsi3_compare0_scratch"
713 [(set (reg:CC_NOOV CC_REGNUM)
714 (compare:CC_NOOV
715 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
716 (match_operand:SI 1 "arm_add_operand" "rI,L"))
717 (const_int 0)))]
718 "TARGET_32BIT"
719 "@
720 cmn%?\\t%0, %1
721 cmp%?\\t%0, #%n1"
722 [(set_attr "conds" "set")]
723 )
724
725 (define_insn "*compare_negsi_si"
726 [(set (reg:CC_Z CC_REGNUM)
727 (compare:CC_Z
728 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
729 (match_operand:SI 1 "s_register_operand" "r")))]
730 "TARGET_32BIT"
731 "cmn%?\\t%1, %0"
732 [(set_attr "conds" "set")]
733 )
734
735 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
736 ;; addend is a constant.
737 (define_insn "*cmpsi2_addneg"
738 [(set (reg:CC CC_REGNUM)
739 (compare:CC
740 (match_operand:SI 1 "s_register_operand" "r,r")
741 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
742 (set (match_operand:SI 0 "s_register_operand" "=r,r")
743 (plus:SI (match_dup 1)
744 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
745 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
746 "@
747 sub%.\\t%0, %1, %2
748 add%.\\t%0, %1, #%n2"
749 [(set_attr "conds" "set")]
750 )
751
752 ;; Convert the sequence
753 ;; sub rd, rn, #1
754 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
755 ;; bne dest
756 ;; into
757 ;; subs rd, rn, #1
758 ;; bcs dest ((unsigned)rn >= 1)
759 ;; similarly for the beq variant using bcc.
760 ;; This is a common looping idiom (while (n--))
761 (define_peephole2
762 [(set (match_operand:SI 0 "arm_general_register_operand" "")
763 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
764 (const_int -1)))
765 (set (match_operand 2 "cc_register" "")
766 (compare (match_dup 0) (const_int -1)))
767 (set (pc)
768 (if_then_else (match_operator 3 "equality_operator"
769 [(match_dup 2) (const_int 0)])
770 (match_operand 4 "" "")
771 (match_operand 5 "" "")))]
772 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
773 [(parallel[
774 (set (match_dup 2)
775 (compare:CC
776 (match_dup 1) (const_int 1)))
777 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
778 (set (pc)
779 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
780 (match_dup 4)
781 (match_dup 5)))]
782 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
783 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
784 ? GEU : LTU),
785 VOIDmode,
786 operands[2], const0_rtx);"
787 )
788
789 ;; The next four insns work because they compare the result with one of
790 ;; the operands, and we know that the use of the condition code is
791 ;; either GEU or LTU, so we can use the carry flag from the addition
792 ;; instead of doing the compare a second time.
793 (define_insn "*addsi3_compare_op1"
794 [(set (reg:CC_C CC_REGNUM)
795 (compare:CC_C
796 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
797 (match_operand:SI 2 "arm_add_operand" "rI,L"))
798 (match_dup 1)))
799 (set (match_operand:SI 0 "s_register_operand" "=r,r")
800 (plus:SI (match_dup 1) (match_dup 2)))]
801 "TARGET_32BIT"
802 "@
803 add%.\\t%0, %1, %2
804 sub%.\\t%0, %1, #%n2"
805 [(set_attr "conds" "set")]
806 )
807
808 (define_insn "*addsi3_compare_op2"
809 [(set (reg:CC_C CC_REGNUM)
810 (compare:CC_C
811 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
812 (match_operand:SI 2 "arm_add_operand" "rI,L"))
813 (match_dup 2)))
814 (set (match_operand:SI 0 "s_register_operand" "=r,r")
815 (plus:SI (match_dup 1) (match_dup 2)))]
816 "TARGET_32BIT"
817 "@
818 add%.\\t%0, %1, %2
819 sub%.\\t%0, %1, #%n2"
820 [(set_attr "conds" "set")]
821 )
822
823 (define_insn "*compare_addsi2_op0"
824 [(set (reg:CC_C CC_REGNUM)
825 (compare:CC_C
826 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
827 (match_operand:SI 1 "arm_add_operand" "rI,L"))
828 (match_dup 0)))]
829 "TARGET_32BIT"
830 "@
831 cmn%?\\t%0, %1
832 cmp%?\\t%0, #%n1"
833 [(set_attr "conds" "set")]
834 )
835
836 (define_insn "*compare_addsi2_op1"
837 [(set (reg:CC_C CC_REGNUM)
838 (compare:CC_C
839 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
840 (match_operand:SI 1 "arm_add_operand" "rI,L"))
841 (match_dup 1)))]
842 "TARGET_32BIT"
843 "@
844 cmn%?\\t%0, %1
845 cmp%?\\t%0, #%n1"
846 [(set_attr "conds" "set")]
847 )
848
849 (define_insn "*addsi3_carryin"
850 [(set (match_operand:SI 0 "s_register_operand" "=r")
851 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
852 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
853 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
854 "TARGET_32BIT"
855 "adc%?\\t%0, %1, %2"
856 [(set_attr "conds" "use")]
857 )
858
859 (define_insn "*addsi3_carryin_shift"
860 [(set (match_operand:SI 0 "s_register_operand" "=r")
861 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
862 (plus:SI
863 (match_operator:SI 2 "shift_operator"
864 [(match_operand:SI 3 "s_register_operand" "r")
865 (match_operand:SI 4 "reg_or_int_operand" "rM")])
866 (match_operand:SI 1 "s_register_operand" "r"))))]
867 "TARGET_32BIT"
868 "adc%?\\t%0, %1, %3%S2"
869 [(set_attr "conds" "use")
870 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
871 (const_string "alu_shift")
872 (const_string "alu_shift_reg")))]
873 )
874
875 (define_insn "*addsi3_carryin_alt1"
876 [(set (match_operand:SI 0 "s_register_operand" "=r")
877 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
878 (match_operand:SI 2 "arm_rhs_operand" "rI"))
879 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
880 "TARGET_32BIT"
881 "adc%?\\t%0, %1, %2"
882 [(set_attr "conds" "use")]
883 )
884
885 (define_insn "*addsi3_carryin_alt2"
886 [(set (match_operand:SI 0 "s_register_operand" "=r")
887 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
888 (match_operand:SI 1 "s_register_operand" "r"))
889 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
890 "TARGET_32BIT"
891 "adc%?\\t%0, %1, %2"
892 [(set_attr "conds" "use")]
893 )
894
895 (define_insn "*addsi3_carryin_alt3"
896 [(set (match_operand:SI 0 "s_register_operand" "=r")
897 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
898 (match_operand:SI 2 "arm_rhs_operand" "rI"))
899 (match_operand:SI 1 "s_register_operand" "r")))]
900 "TARGET_32BIT"
901 "adc%?\\t%0, %1, %2"
902 [(set_attr "conds" "use")]
903 )
904
905 (define_expand "incscc"
906 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
907 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
908 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
909 (match_operand:SI 1 "s_register_operand" "0,?r")))]
910 "TARGET_32BIT"
911 ""
912 )
913
914 (define_insn "*arm_incscc"
915 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
916 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
917 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
918 (match_operand:SI 1 "s_register_operand" "0,?r")))]
919 "TARGET_ARM"
920 "@
921 add%d2\\t%0, %1, #1
922 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
923 [(set_attr "conds" "use")
924 (set_attr "length" "4,8")]
925 )
926
927 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
928 (define_split
929 [(set (match_operand:SI 0 "s_register_operand" "")
930 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
931 (match_operand:SI 2 "s_register_operand" ""))
932 (const_int -1)))
933 (clobber (match_operand:SI 3 "s_register_operand" ""))]
934 "TARGET_32BIT"
935 [(set (match_dup 3) (match_dup 1))
936 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
937 "
938 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
939 ")
940
941 (define_expand "addsf3"
942 [(set (match_operand:SF 0 "s_register_operand" "")
943 (plus:SF (match_operand:SF 1 "s_register_operand" "")
944 (match_operand:SF 2 "arm_float_add_operand" "")))]
945 "TARGET_32BIT && TARGET_HARD_FLOAT"
946 "
947 if (TARGET_MAVERICK
948 && !cirrus_fp_register (operands[2], SFmode))
949 operands[2] = force_reg (SFmode, operands[2]);
950 ")
951
952 (define_expand "adddf3"
953 [(set (match_operand:DF 0 "s_register_operand" "")
954 (plus:DF (match_operand:DF 1 "s_register_operand" "")
955 (match_operand:DF 2 "arm_float_add_operand" "")))]
956 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
957 "
958 if (TARGET_MAVERICK
959 && !cirrus_fp_register (operands[2], DFmode))
960 operands[2] = force_reg (DFmode, operands[2]);
961 ")
962
963 (define_expand "subdi3"
964 [(parallel
965 [(set (match_operand:DI 0 "s_register_operand" "")
966 (minus:DI (match_operand:DI 1 "s_register_operand" "")
967 (match_operand:DI 2 "s_register_operand" "")))
968 (clobber (reg:CC CC_REGNUM))])]
969 "TARGET_EITHER"
970 "
971 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
972 && TARGET_32BIT
973 && cirrus_fp_register (operands[0], DImode)
974 && cirrus_fp_register (operands[1], DImode))
975 {
976 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
977 DONE;
978 }
979
980 if (TARGET_THUMB1)
981 {
982 if (GET_CODE (operands[1]) != REG)
983 operands[1] = force_reg (DImode, operands[1]);
984 if (GET_CODE (operands[2]) != REG)
985 operands[2] = force_reg (DImode, operands[2]);
986 }
987 "
988 )
989
990 (define_insn "*arm_subdi3"
991 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
992 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
993 (match_operand:DI 2 "s_register_operand" "r,0,0")))
994 (clobber (reg:CC CC_REGNUM))]
995 "TARGET_32BIT"
996 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
997 [(set_attr "conds" "clob")
998 (set_attr "length" "8")]
999 )
1000
1001 (define_insn "*thumb_subdi3"
1002 [(set (match_operand:DI 0 "register_operand" "=l")
1003 (minus:DI (match_operand:DI 1 "register_operand" "0")
1004 (match_operand:DI 2 "register_operand" "l")))
1005 (clobber (reg:CC CC_REGNUM))]
1006 "TARGET_THUMB1"
1007 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1008 [(set_attr "length" "4")]
1009 )
1010
1011 (define_insn "*subdi_di_zesidi"
1012 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1013 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1014 (zero_extend:DI
1015 (match_operand:SI 2 "s_register_operand" "r,r"))))
1016 (clobber (reg:CC CC_REGNUM))]
1017 "TARGET_32BIT"
1018 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1019 [(set_attr "conds" "clob")
1020 (set_attr "length" "8")]
1021 )
1022
1023 (define_insn "*subdi_di_sesidi"
1024 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1025 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1026 (sign_extend:DI
1027 (match_operand:SI 2 "s_register_operand" "r,r"))))
1028 (clobber (reg:CC CC_REGNUM))]
1029 "TARGET_32BIT"
1030 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1031 [(set_attr "conds" "clob")
1032 (set_attr "length" "8")]
1033 )
1034
1035 (define_insn "*subdi_zesidi_di"
1036 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1037 (minus:DI (zero_extend:DI
1038 (match_operand:SI 2 "s_register_operand" "r,r"))
1039 (match_operand:DI 1 "s_register_operand" "0,r")))
1040 (clobber (reg:CC CC_REGNUM))]
1041 "TARGET_ARM"
1042 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1043 [(set_attr "conds" "clob")
1044 (set_attr "length" "8")]
1045 )
1046
1047 (define_insn "*subdi_sesidi_di"
1048 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1049 (minus:DI (sign_extend:DI
1050 (match_operand:SI 2 "s_register_operand" "r,r"))
1051 (match_operand:DI 1 "s_register_operand" "0,r")))
1052 (clobber (reg:CC CC_REGNUM))]
1053 "TARGET_ARM"
1054 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1055 [(set_attr "conds" "clob")
1056 (set_attr "length" "8")]
1057 )
1058
1059 (define_insn "*subdi_zesidi_zesidi"
1060 [(set (match_operand:DI 0 "s_register_operand" "=r")
1061 (minus:DI (zero_extend:DI
1062 (match_operand:SI 1 "s_register_operand" "r"))
1063 (zero_extend:DI
1064 (match_operand:SI 2 "s_register_operand" "r"))))
1065 (clobber (reg:CC CC_REGNUM))]
1066 "TARGET_32BIT"
1067 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1068 [(set_attr "conds" "clob")
1069 (set_attr "length" "8")]
1070 )
1071
1072 (define_expand "subsi3"
1073 [(set (match_operand:SI 0 "s_register_operand" "")
1074 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1075 (match_operand:SI 2 "s_register_operand" "")))]
1076 "TARGET_EITHER"
1077 "
1078 if (GET_CODE (operands[1]) == CONST_INT)
1079 {
1080 if (TARGET_32BIT)
1081 {
1082 arm_split_constant (MINUS, SImode, NULL_RTX,
1083 INTVAL (operands[1]), operands[0],
1084 operands[2], optimize && can_create_pseudo_p ());
1085 DONE;
1086 }
1087 else /* TARGET_THUMB1 */
1088 operands[1] = force_reg (SImode, operands[1]);
1089 }
1090 "
1091 )
1092
1093 (define_insn "*thumb1_subsi3_insn"
1094 [(set (match_operand:SI 0 "register_operand" "=l")
1095 (minus:SI (match_operand:SI 1 "register_operand" "l")
1096 (match_operand:SI 2 "register_operand" "l")))]
1097 "TARGET_THUMB1"
1098 "sub\\t%0, %1, %2"
1099 [(set_attr "length" "2")]
1100 )
1101
1102 ; ??? Check Thumb-2 split length
1103 (define_insn_and_split "*arm_subsi3_insn"
1104 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1105 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1106 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1107 "TARGET_32BIT"
1108 "@
1109 rsb%?\\t%0, %2, %1
1110 sub%?\\t%0, %1, %2
1111 #"
1112 "TARGET_32BIT
1113 && GET_CODE (operands[1]) == CONST_INT
1114 && !const_ok_for_arm (INTVAL (operands[1]))"
1115 [(clobber (const_int 0))]
1116 "
1117 arm_split_constant (MINUS, SImode, curr_insn,
1118 INTVAL (operands[1]), operands[0], operands[2], 0);
1119 DONE;
1120 "
1121 [(set_attr "length" "4,4,16")
1122 (set_attr "predicable" "yes")]
1123 )
1124
1125 (define_peephole2
1126 [(match_scratch:SI 3 "r")
1127 (set (match_operand:SI 0 "arm_general_register_operand" "")
1128 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1129 (match_operand:SI 2 "arm_general_register_operand" "")))]
1130 "TARGET_32BIT
1131 && !const_ok_for_arm (INTVAL (operands[1]))
1132 && const_ok_for_arm (~INTVAL (operands[1]))"
1133 [(set (match_dup 3) (match_dup 1))
1134 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1135 ""
1136 )
1137
1138 (define_insn "*subsi3_compare0"
1139 [(set (reg:CC_NOOV CC_REGNUM)
1140 (compare:CC_NOOV
1141 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1142 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1143 (const_int 0)))
1144 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1145 (minus:SI (match_dup 1) (match_dup 2)))]
1146 "TARGET_32BIT"
1147 "@
1148 sub%.\\t%0, %1, %2
1149 rsb%.\\t%0, %2, %1"
1150 [(set_attr "conds" "set")]
1151 )
1152
1153 (define_expand "decscc"
1154 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1155 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1156 (match_operator:SI 2 "arm_comparison_operator"
1157 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1158 "TARGET_32BIT"
1159 ""
1160 )
1161
1162 (define_insn "*arm_decscc"
1163 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1164 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1165 (match_operator:SI 2 "arm_comparison_operator"
1166 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1167 "TARGET_ARM"
1168 "@
1169 sub%d2\\t%0, %1, #1
1170 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1171 [(set_attr "conds" "use")
1172 (set_attr "length" "*,8")]
1173 )
1174
1175 (define_expand "subsf3"
1176 [(set (match_operand:SF 0 "s_register_operand" "")
1177 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1178 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT"
1180 "
1181 if (TARGET_MAVERICK)
1182 {
1183 if (!cirrus_fp_register (operands[1], SFmode))
1184 operands[1] = force_reg (SFmode, operands[1]);
1185 if (!cirrus_fp_register (operands[2], SFmode))
1186 operands[2] = force_reg (SFmode, operands[2]);
1187 }
1188 ")
1189
1190 (define_expand "subdf3"
1191 [(set (match_operand:DF 0 "s_register_operand" "")
1192 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1193 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1194 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1195 "
1196 if (TARGET_MAVERICK)
1197 {
1198 if (!cirrus_fp_register (operands[1], DFmode))
1199 operands[1] = force_reg (DFmode, operands[1]);
1200 if (!cirrus_fp_register (operands[2], DFmode))
1201 operands[2] = force_reg (DFmode, operands[2]);
1202 }
1203 ")
1204
1205 \f
1206 ;; Multiplication insns
1207
1208 (define_expand "mulsi3"
1209 [(set (match_operand:SI 0 "s_register_operand" "")
1210 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1211 (match_operand:SI 1 "s_register_operand" "")))]
1212 "TARGET_EITHER"
1213 ""
1214 )
1215
1216 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1217 (define_insn "*arm_mulsi3"
1218 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1219 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1220 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1221 "TARGET_32BIT && !arm_arch6"
1222 "mul%?\\t%0, %2, %1"
1223 [(set_attr "insn" "mul")
1224 (set_attr "predicable" "yes")]
1225 )
1226
1227 (define_insn "*arm_mulsi3_v6"
1228 [(set (match_operand:SI 0 "s_register_operand" "=r")
1229 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1230 (match_operand:SI 2 "s_register_operand" "r")))]
1231 "TARGET_32BIT && arm_arch6"
1232 "mul%?\\t%0, %1, %2"
1233 [(set_attr "insn" "mul")
1234 (set_attr "predicable" "yes")]
1235 )
1236
1237 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1238 ; 1 and 2; are the same, because reload will make operand 0 match
1239 ; operand 1 without realizing that this conflicts with operand 2. We fix
1240 ; this by adding another alternative to match this case, and then `reload'
1241 ; it ourselves. This alternative must come first.
1242 (define_insn "*thumb_mulsi3"
1243 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1244 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1245 (match_operand:SI 2 "register_operand" "l,l,l")))]
1246 "TARGET_THUMB1 && !arm_arch6"
1247 "*
1248 if (which_alternative < 2)
1249 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1250 else
1251 return \"mul\\t%0, %2\";
1252 "
1253 [(set_attr "length" "4,4,2")
1254 (set_attr "insn" "mul")]
1255 )
1256
1257 (define_insn "*thumb_mulsi3_v6"
1258 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1259 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1260 (match_operand:SI 2 "register_operand" "l,0,0")))]
1261 "TARGET_THUMB1 && arm_arch6"
1262 "@
1263 mul\\t%0, %2
1264 mul\\t%0, %1
1265 mul\\t%0, %1"
1266 [(set_attr "length" "2")
1267 (set_attr "insn" "mul")]
1268 )
1269
1270 (define_insn "*mulsi3_compare0"
1271 [(set (reg:CC_NOOV CC_REGNUM)
1272 (compare:CC_NOOV (mult:SI
1273 (match_operand:SI 2 "s_register_operand" "r,r")
1274 (match_operand:SI 1 "s_register_operand" "%0,r"))
1275 (const_int 0)))
1276 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1277 (mult:SI (match_dup 2) (match_dup 1)))]
1278 "TARGET_ARM && !arm_arch6"
1279 "mul%.\\t%0, %2, %1"
1280 [(set_attr "conds" "set")
1281 (set_attr "insn" "muls")]
1282 )
1283
1284 (define_insn "*mulsi3_compare0_v6"
1285 [(set (reg:CC_NOOV CC_REGNUM)
1286 (compare:CC_NOOV (mult:SI
1287 (match_operand:SI 2 "s_register_operand" "r")
1288 (match_operand:SI 1 "s_register_operand" "r"))
1289 (const_int 0)))
1290 (set (match_operand:SI 0 "s_register_operand" "=r")
1291 (mult:SI (match_dup 2) (match_dup 1)))]
1292 "TARGET_ARM && arm_arch6 && optimize_size"
1293 "mul%.\\t%0, %2, %1"
1294 [(set_attr "conds" "set")
1295 (set_attr "insn" "muls")]
1296 )
1297
1298 (define_insn "*mulsi_compare0_scratch"
1299 [(set (reg:CC_NOOV CC_REGNUM)
1300 (compare:CC_NOOV (mult:SI
1301 (match_operand:SI 2 "s_register_operand" "r,r")
1302 (match_operand:SI 1 "s_register_operand" "%0,r"))
1303 (const_int 0)))
1304 (clobber (match_scratch:SI 0 "=&r,&r"))]
1305 "TARGET_ARM && !arm_arch6"
1306 "mul%.\\t%0, %2, %1"
1307 [(set_attr "conds" "set")
1308 (set_attr "insn" "muls")]
1309 )
1310
1311 (define_insn "*mulsi_compare0_scratch_v6"
1312 [(set (reg:CC_NOOV CC_REGNUM)
1313 (compare:CC_NOOV (mult:SI
1314 (match_operand:SI 2 "s_register_operand" "r")
1315 (match_operand:SI 1 "s_register_operand" "r"))
1316 (const_int 0)))
1317 (clobber (match_scratch:SI 0 "=r"))]
1318 "TARGET_ARM && arm_arch6 && optimize_size"
1319 "mul%.\\t%0, %2, %1"
1320 [(set_attr "conds" "set")
1321 (set_attr "insn" "muls")]
1322 )
1323
1324 ;; Unnamed templates to match MLA instruction.
1325
1326 (define_insn "*mulsi3addsi"
1327 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1328 (plus:SI
1329 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1330 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1331 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1332 "TARGET_32BIT && !arm_arch6"
1333 "mla%?\\t%0, %2, %1, %3"
1334 [(set_attr "insn" "mla")
1335 (set_attr "predicable" "yes")]
1336 )
1337
1338 (define_insn "*mulsi3addsi_v6"
1339 [(set (match_operand:SI 0 "s_register_operand" "=r")
1340 (plus:SI
1341 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1342 (match_operand:SI 1 "s_register_operand" "r"))
1343 (match_operand:SI 3 "s_register_operand" "r")))]
1344 "TARGET_32BIT && arm_arch6"
1345 "mla%?\\t%0, %2, %1, %3"
1346 [(set_attr "insn" "mla")
1347 (set_attr "predicable" "yes")]
1348 )
1349
1350 (define_insn "*mulsi3addsi_compare0"
1351 [(set (reg:CC_NOOV CC_REGNUM)
1352 (compare:CC_NOOV
1353 (plus:SI (mult:SI
1354 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1355 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1356 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1357 (const_int 0)))
1358 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1359 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1360 (match_dup 3)))]
1361 "TARGET_ARM && arm_arch6"
1362 "mla%.\\t%0, %2, %1, %3"
1363 [(set_attr "conds" "set")
1364 (set_attr "insn" "mlas")]
1365 )
1366
1367 (define_insn "*mulsi3addsi_compare0_v6"
1368 [(set (reg:CC_NOOV CC_REGNUM)
1369 (compare:CC_NOOV
1370 (plus:SI (mult:SI
1371 (match_operand:SI 2 "s_register_operand" "r")
1372 (match_operand:SI 1 "s_register_operand" "r"))
1373 (match_operand:SI 3 "s_register_operand" "r"))
1374 (const_int 0)))
1375 (set (match_operand:SI 0 "s_register_operand" "=r")
1376 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1377 (match_dup 3)))]
1378 "TARGET_ARM && arm_arch6 && optimize_size"
1379 "mla%.\\t%0, %2, %1, %3"
1380 [(set_attr "conds" "set")
1381 (set_attr "insn" "mlas")]
1382 )
1383
1384 (define_insn "*mulsi3addsi_compare0_scratch"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1386 (compare:CC_NOOV
1387 (plus:SI (mult:SI
1388 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1389 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1390 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1391 (const_int 0)))
1392 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1393 "TARGET_ARM && !arm_arch6"
1394 "mla%.\\t%0, %2, %1, %3"
1395 [(set_attr "conds" "set")
1396 (set_attr "insn" "mlas")]
1397 )
1398
1399 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1400 [(set (reg:CC_NOOV CC_REGNUM)
1401 (compare:CC_NOOV
1402 (plus:SI (mult:SI
1403 (match_operand:SI 2 "s_register_operand" "r")
1404 (match_operand:SI 1 "s_register_operand" "r"))
1405 (match_operand:SI 3 "s_register_operand" "r"))
1406 (const_int 0)))
1407 (clobber (match_scratch:SI 0 "=r"))]
1408 "TARGET_ARM && arm_arch6 && optimize_size"
1409 "mla%.\\t%0, %2, %1, %3"
1410 [(set_attr "conds" "set")
1411 (set_attr "insn" "mlas")]
1412 )
1413
1414 (define_insn "*mulsi3subsi"
1415 [(set (match_operand:SI 0 "s_register_operand" "=r")
1416 (minus:SI
1417 (match_operand:SI 3 "s_register_operand" "r")
1418 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1419 (match_operand:SI 1 "s_register_operand" "r"))))]
1420 "TARGET_32BIT && arm_arch_thumb2"
1421 "mls%?\\t%0, %2, %1, %3"
1422 [(set_attr "insn" "mla")
1423 (set_attr "predicable" "yes")]
1424 )
1425
1426 ;; Unnamed template to match long long multiply-accumulate (smlal)
1427
1428 (define_insn "*mulsidi3adddi"
1429 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1430 (plus:DI
1431 (mult:DI
1432 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1433 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1434 (match_operand:DI 1 "s_register_operand" "0")))]
1435 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1436 "smlal%?\\t%Q0, %R0, %3, %2"
1437 [(set_attr "insn" "smlal")
1438 (set_attr "predicable" "yes")]
1439 )
1440
1441 (define_insn "*mulsidi3adddi_v6"
1442 [(set (match_operand:DI 0 "s_register_operand" "=r")
1443 (plus:DI
1444 (mult:DI
1445 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1446 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1447 (match_operand:DI 1 "s_register_operand" "0")))]
1448 "TARGET_32BIT && arm_arch6"
1449 "smlal%?\\t%Q0, %R0, %3, %2"
1450 [(set_attr "insn" "smlal")
1451 (set_attr "predicable" "yes")]
1452 )
1453
1454 ;; 32x32->64 widening multiply.
1455 ;; As with mulsi3, the only difference between the v3-5 and v6+
1456 ;; versions of these patterns is the requirement that the output not
1457 ;; overlap the inputs, but that still means we have to have a named
1458 ;; expander and two different starred insns.
1459
1460 (define_expand "mulsidi3"
1461 [(set (match_operand:DI 0 "s_register_operand" "")
1462 (mult:DI
1463 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1464 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1465 "TARGET_32BIT && arm_arch3m"
1466 ""
1467 )
1468
1469 (define_insn "*mulsidi3_nov6"
1470 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1471 (mult:DI
1472 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1473 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1474 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1475 "smull%?\\t%Q0, %R0, %1, %2"
1476 [(set_attr "insn" "smull")
1477 (set_attr "predicable" "yes")]
1478 )
1479
1480 (define_insn "*mulsidi3_v6"
1481 [(set (match_operand:DI 0 "s_register_operand" "=r")
1482 (mult:DI
1483 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1484 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1485 "TARGET_32BIT && arm_arch6"
1486 "smull%?\\t%Q0, %R0, %1, %2"
1487 [(set_attr "insn" "smull")
1488 (set_attr "predicable" "yes")]
1489 )
1490
1491 (define_expand "umulsidi3"
1492 [(set (match_operand:DI 0 "s_register_operand" "")
1493 (mult:DI
1494 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1495 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1496 "TARGET_32BIT && arm_arch3m"
1497 ""
1498 )
1499
1500 (define_insn "*umulsidi3_nov6"
1501 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1502 (mult:DI
1503 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1504 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1505 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1506 "umull%?\\t%Q0, %R0, %1, %2"
1507 [(set_attr "insn" "umull")
1508 (set_attr "predicable" "yes")]
1509 )
1510
1511 (define_insn "*umulsidi3_v6"
1512 [(set (match_operand:DI 0 "s_register_operand" "=r")
1513 (mult:DI
1514 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1515 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1516 "TARGET_32BIT && arm_arch6"
1517 "umull%?\\t%Q0, %R0, %1, %2"
1518 [(set_attr "insn" "umull")
1519 (set_attr "predicable" "yes")]
1520 )
1521
1522 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1523
1524 (define_insn "*umulsidi3adddi"
1525 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1526 (plus:DI
1527 (mult:DI
1528 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1529 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1530 (match_operand:DI 1 "s_register_operand" "0")))]
1531 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1532 "umlal%?\\t%Q0, %R0, %3, %2"
1533 [(set_attr "insn" "umlal")
1534 (set_attr "predicable" "yes")]
1535 )
1536
1537 (define_insn "*umulsidi3adddi_v6"
1538 [(set (match_operand:DI 0 "s_register_operand" "=r")
1539 (plus:DI
1540 (mult:DI
1541 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1542 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1543 (match_operand:DI 1 "s_register_operand" "0")))]
1544 "TARGET_32BIT && arm_arch6"
1545 "umlal%?\\t%Q0, %R0, %3, %2"
1546 [(set_attr "insn" "umlal")
1547 (set_attr "predicable" "yes")]
1548 )
1549
1550 (define_expand "smulsi3_highpart"
1551 [(parallel
1552 [(set (match_operand:SI 0 "s_register_operand" "")
1553 (truncate:SI
1554 (lshiftrt:DI
1555 (mult:DI
1556 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1557 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1558 (const_int 32))))
1559 (clobber (match_scratch:SI 3 ""))])]
1560 "TARGET_32BIT && arm_arch3m"
1561 ""
1562 )
1563
1564 (define_insn "*smulsi3_highpart_nov6"
1565 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1566 (truncate:SI
1567 (lshiftrt:DI
1568 (mult:DI
1569 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1570 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1571 (const_int 32))))
1572 (clobber (match_scratch:SI 3 "=&r,&r"))]
1573 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1574 "smull%?\\t%3, %0, %2, %1"
1575 [(set_attr "insn" "smull")
1576 (set_attr "predicable" "yes")]
1577 )
1578
1579 (define_insn "*smulsi3_highpart_v6"
1580 [(set (match_operand:SI 0 "s_register_operand" "=r")
1581 (truncate:SI
1582 (lshiftrt:DI
1583 (mult:DI
1584 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1585 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1586 (const_int 32))))
1587 (clobber (match_scratch:SI 3 "=r"))]
1588 "TARGET_32BIT && arm_arch6"
1589 "smull%?\\t%3, %0, %2, %1"
1590 [(set_attr "insn" "smull")
1591 (set_attr "predicable" "yes")]
1592 )
1593
1594 (define_expand "umulsi3_highpart"
1595 [(parallel
1596 [(set (match_operand:SI 0 "s_register_operand" "")
1597 (truncate:SI
1598 (lshiftrt:DI
1599 (mult:DI
1600 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1601 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1602 (const_int 32))))
1603 (clobber (match_scratch:SI 3 ""))])]
1604 "TARGET_32BIT && arm_arch3m"
1605 ""
1606 )
1607
1608 (define_insn "*umulsi3_highpart_nov6"
1609 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1610 (truncate:SI
1611 (lshiftrt:DI
1612 (mult:DI
1613 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1614 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1615 (const_int 32))))
1616 (clobber (match_scratch:SI 3 "=&r,&r"))]
1617 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1618 "umull%?\\t%3, %0, %2, %1"
1619 [(set_attr "insn" "umull")
1620 (set_attr "predicable" "yes")]
1621 )
1622
1623 (define_insn "*umulsi3_highpart_v6"
1624 [(set (match_operand:SI 0 "s_register_operand" "=r")
1625 (truncate:SI
1626 (lshiftrt:DI
1627 (mult:DI
1628 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1629 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1630 (const_int 32))))
1631 (clobber (match_scratch:SI 3 "=r"))]
1632 "TARGET_32BIT && arm_arch6"
1633 "umull%?\\t%3, %0, %2, %1"
1634 [(set_attr "insn" "umull")
1635 (set_attr "predicable" "yes")]
1636 )
1637
1638 (define_insn "mulhisi3"
1639 [(set (match_operand:SI 0 "s_register_operand" "=r")
1640 (mult:SI (sign_extend:SI
1641 (match_operand:HI 1 "s_register_operand" "%r"))
1642 (sign_extend:SI
1643 (match_operand:HI 2 "s_register_operand" "r"))))]
1644 "TARGET_DSP_MULTIPLY"
1645 "smulbb%?\\t%0, %1, %2"
1646 [(set_attr "insn" "smulxy")
1647 (set_attr "predicable" "yes")]
1648 )
1649
1650 (define_insn "*mulhisi3tb"
1651 [(set (match_operand:SI 0 "s_register_operand" "=r")
1652 (mult:SI (ashiftrt:SI
1653 (match_operand:SI 1 "s_register_operand" "r")
1654 (const_int 16))
1655 (sign_extend:SI
1656 (match_operand:HI 2 "s_register_operand" "r"))))]
1657 "TARGET_DSP_MULTIPLY"
1658 "smultb%?\\t%0, %1, %2"
1659 [(set_attr "insn" "smulxy")
1660 (set_attr "predicable" "yes")]
1661 )
1662
1663 (define_insn "*mulhisi3bt"
1664 [(set (match_operand:SI 0 "s_register_operand" "=r")
1665 (mult:SI (sign_extend:SI
1666 (match_operand:HI 1 "s_register_operand" "r"))
1667 (ashiftrt:SI
1668 (match_operand:SI 2 "s_register_operand" "r")
1669 (const_int 16))))]
1670 "TARGET_DSP_MULTIPLY"
1671 "smulbt%?\\t%0, %1, %2"
1672 [(set_attr "insn" "smulxy")
1673 (set_attr "predicable" "yes")]
1674 )
1675
1676 (define_insn "*mulhisi3tt"
1677 [(set (match_operand:SI 0 "s_register_operand" "=r")
1678 (mult:SI (ashiftrt:SI
1679 (match_operand:SI 1 "s_register_operand" "r")
1680 (const_int 16))
1681 (ashiftrt:SI
1682 (match_operand:SI 2 "s_register_operand" "r")
1683 (const_int 16))))]
1684 "TARGET_DSP_MULTIPLY"
1685 "smultt%?\\t%0, %1, %2"
1686 [(set_attr "insn" "smulxy")
1687 (set_attr "predicable" "yes")]
1688 )
1689
1690 (define_insn "*mulhisi3addsi"
1691 [(set (match_operand:SI 0 "s_register_operand" "=r")
1692 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1693 (mult:SI (sign_extend:SI
1694 (match_operand:HI 2 "s_register_operand" "%r"))
1695 (sign_extend:SI
1696 (match_operand:HI 3 "s_register_operand" "r")))))]
1697 "TARGET_DSP_MULTIPLY"
1698 "smlabb%?\\t%0, %2, %3, %1"
1699 [(set_attr "insn" "smlaxy")
1700 (set_attr "predicable" "yes")]
1701 )
1702
1703 (define_insn "*mulhidi3adddi"
1704 [(set (match_operand:DI 0 "s_register_operand" "=r")
1705 (plus:DI
1706 (match_operand:DI 1 "s_register_operand" "0")
1707 (mult:DI (sign_extend:DI
1708 (match_operand:HI 2 "s_register_operand" "%r"))
1709 (sign_extend:DI
1710 (match_operand:HI 3 "s_register_operand" "r")))))]
1711 "TARGET_DSP_MULTIPLY"
1712 "smlalbb%?\\t%Q0, %R0, %2, %3"
1713 [(set_attr "insn" "smlalxy")
1714 (set_attr "predicable" "yes")])
1715
1716 (define_expand "mulsf3"
1717 [(set (match_operand:SF 0 "s_register_operand" "")
1718 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1719 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1720 "TARGET_32BIT && TARGET_HARD_FLOAT"
1721 "
1722 if (TARGET_MAVERICK
1723 && !cirrus_fp_register (operands[2], SFmode))
1724 operands[2] = force_reg (SFmode, operands[2]);
1725 ")
1726
1727 (define_expand "muldf3"
1728 [(set (match_operand:DF 0 "s_register_operand" "")
1729 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1730 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1731 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1732 "
1733 if (TARGET_MAVERICK
1734 && !cirrus_fp_register (operands[2], DFmode))
1735 operands[2] = force_reg (DFmode, operands[2]);
1736 ")
1737 \f
1738 ;; Division insns
1739
1740 (define_expand "divsf3"
1741 [(set (match_operand:SF 0 "s_register_operand" "")
1742 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1743 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1744 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1745 "")
1746
1747 (define_expand "divdf3"
1748 [(set (match_operand:DF 0 "s_register_operand" "")
1749 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1750 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1751 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1752 "")
1753 \f
1754 ;; Modulo insns
1755
1756 (define_expand "modsf3"
1757 [(set (match_operand:SF 0 "s_register_operand" "")
1758 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1759 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1760 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1761 "")
1762
1763 (define_expand "moddf3"
1764 [(set (match_operand:DF 0 "s_register_operand" "")
1765 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1766 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1767 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1768 "")
1769 \f
1770 ;; Boolean and,ior,xor insns
1771
1772 ;; Split up double word logical operations
1773
1774 ;; Split up simple DImode logical operations. Simply perform the logical
1775 ;; operation on the upper and lower halves of the registers.
1776 (define_split
1777 [(set (match_operand:DI 0 "s_register_operand" "")
1778 (match_operator:DI 6 "logical_binary_operator"
1779 [(match_operand:DI 1 "s_register_operand" "")
1780 (match_operand:DI 2 "s_register_operand" "")]))]
1781 "TARGET_32BIT && reload_completed
1782 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1783 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1784 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1785 "
1786 {
1787 operands[3] = gen_highpart (SImode, operands[0]);
1788 operands[0] = gen_lowpart (SImode, operands[0]);
1789 operands[4] = gen_highpart (SImode, operands[1]);
1790 operands[1] = gen_lowpart (SImode, operands[1]);
1791 operands[5] = gen_highpart (SImode, operands[2]);
1792 operands[2] = gen_lowpart (SImode, operands[2]);
1793 }"
1794 )
1795
1796 (define_split
1797 [(set (match_operand:DI 0 "s_register_operand" "")
1798 (match_operator:DI 6 "logical_binary_operator"
1799 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1800 (match_operand:DI 1 "s_register_operand" "")]))]
1801 "TARGET_32BIT && reload_completed"
1802 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1803 (set (match_dup 3) (match_op_dup:SI 6
1804 [(ashiftrt:SI (match_dup 2) (const_int 31))
1805 (match_dup 4)]))]
1806 "
1807 {
1808 operands[3] = gen_highpart (SImode, operands[0]);
1809 operands[0] = gen_lowpart (SImode, operands[0]);
1810 operands[4] = gen_highpart (SImode, operands[1]);
1811 operands[1] = gen_lowpart (SImode, operands[1]);
1812 operands[5] = gen_highpart (SImode, operands[2]);
1813 operands[2] = gen_lowpart (SImode, operands[2]);
1814 }"
1815 )
1816
1817 ;; The zero extend of operand 2 means we can just copy the high part of
1818 ;; operand1 into operand0.
1819 (define_split
1820 [(set (match_operand:DI 0 "s_register_operand" "")
1821 (ior:DI
1822 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1823 (match_operand:DI 1 "s_register_operand" "")))]
1824 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1825 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1826 (set (match_dup 3) (match_dup 4))]
1827 "
1828 {
1829 operands[4] = gen_highpart (SImode, operands[1]);
1830 operands[3] = gen_highpart (SImode, operands[0]);
1831 operands[0] = gen_lowpart (SImode, operands[0]);
1832 operands[1] = gen_lowpart (SImode, operands[1]);
1833 }"
1834 )
1835
1836 ;; The zero extend of operand 2 means we can just copy the high part of
1837 ;; operand1 into operand0.
1838 (define_split
1839 [(set (match_operand:DI 0 "s_register_operand" "")
1840 (xor:DI
1841 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1842 (match_operand:DI 1 "s_register_operand" "")))]
1843 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1844 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1845 (set (match_dup 3) (match_dup 4))]
1846 "
1847 {
1848 operands[4] = gen_highpart (SImode, operands[1]);
1849 operands[3] = gen_highpart (SImode, operands[0]);
1850 operands[0] = gen_lowpart (SImode, operands[0]);
1851 operands[1] = gen_lowpart (SImode, operands[1]);
1852 }"
1853 )
1854
1855 (define_insn "anddi3"
1856 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1857 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1858 (match_operand:DI 2 "s_register_operand" "r,r")))]
1859 "TARGET_32BIT && ! TARGET_IWMMXT"
1860 "#"
1861 [(set_attr "length" "8")]
1862 )
1863
1864 (define_insn_and_split "*anddi_zesidi_di"
1865 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1866 (and:DI (zero_extend:DI
1867 (match_operand:SI 2 "s_register_operand" "r,r"))
1868 (match_operand:DI 1 "s_register_operand" "0,r")))]
1869 "TARGET_32BIT"
1870 "#"
1871 "TARGET_32BIT && reload_completed"
1872 ; The zero extend of operand 2 clears the high word of the output
1873 ; operand.
1874 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1875 (set (match_dup 3) (const_int 0))]
1876 "
1877 {
1878 operands[3] = gen_highpart (SImode, operands[0]);
1879 operands[0] = gen_lowpart (SImode, operands[0]);
1880 operands[1] = gen_lowpart (SImode, operands[1]);
1881 }"
1882 [(set_attr "length" "8")]
1883 )
1884
1885 (define_insn "*anddi_sesdi_di"
1886 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1887 (and:DI (sign_extend:DI
1888 (match_operand:SI 2 "s_register_operand" "r,r"))
1889 (match_operand:DI 1 "s_register_operand" "0,r")))]
1890 "TARGET_32BIT"
1891 "#"
1892 [(set_attr "length" "8")]
1893 )
1894
1895 (define_expand "andsi3"
1896 [(set (match_operand:SI 0 "s_register_operand" "")
1897 (and:SI (match_operand:SI 1 "s_register_operand" "")
1898 (match_operand:SI 2 "reg_or_int_operand" "")))]
1899 "TARGET_EITHER"
1900 "
1901 if (TARGET_32BIT)
1902 {
1903 if (GET_CODE (operands[2]) == CONST_INT)
1904 {
1905 arm_split_constant (AND, SImode, NULL_RTX,
1906 INTVAL (operands[2]), operands[0],
1907 operands[1], optimize && can_create_pseudo_p ());
1908
1909 DONE;
1910 }
1911 }
1912 else /* TARGET_THUMB1 */
1913 {
1914 if (GET_CODE (operands[2]) != CONST_INT)
1915 {
1916 rtx tmp = force_reg (SImode, operands[2]);
1917 if (rtx_equal_p (operands[0], operands[1]))
1918 operands[2] = tmp;
1919 else
1920 {
1921 operands[2] = operands[1];
1922 operands[1] = tmp;
1923 }
1924 }
1925 else
1926 {
1927 int i;
1928
1929 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1930 {
1931 operands[2] = force_reg (SImode,
1932 GEN_INT (~INTVAL (operands[2])));
1933
1934 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1935
1936 DONE;
1937 }
1938
1939 for (i = 9; i <= 31; i++)
1940 {
1941 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1942 {
1943 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1944 const0_rtx));
1945 DONE;
1946 }
1947 else if ((((HOST_WIDE_INT) 1) << i) - 1
1948 == ~INTVAL (operands[2]))
1949 {
1950 rtx shift = GEN_INT (i);
1951 rtx reg = gen_reg_rtx (SImode);
1952
1953 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1954 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1955
1956 DONE;
1957 }
1958 }
1959
1960 operands[2] = force_reg (SImode, operands[2]);
1961 }
1962 }
1963 "
1964 )
1965
1966 ; ??? Check split length for Thumb-2
1967 (define_insn_and_split "*arm_andsi3_insn"
1968 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1969 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1970 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1971 "TARGET_32BIT"
1972 "@
1973 and%?\\t%0, %1, %2
1974 bic%?\\t%0, %1, #%B2
1975 #"
1976 "TARGET_32BIT
1977 && GET_CODE (operands[2]) == CONST_INT
1978 && !(const_ok_for_arm (INTVAL (operands[2]))
1979 || const_ok_for_arm (~INTVAL (operands[2])))"
1980 [(clobber (const_int 0))]
1981 "
1982 arm_split_constant (AND, SImode, curr_insn,
1983 INTVAL (operands[2]), operands[0], operands[1], 0);
1984 DONE;
1985 "
1986 [(set_attr "length" "4,4,16")
1987 (set_attr "predicable" "yes")]
1988 )
1989
1990 (define_insn "*thumb1_andsi3_insn"
1991 [(set (match_operand:SI 0 "register_operand" "=l")
1992 (and:SI (match_operand:SI 1 "register_operand" "%0")
1993 (match_operand:SI 2 "register_operand" "l")))]
1994 "TARGET_THUMB1"
1995 "and\\t%0, %0, %2"
1996 [(set_attr "length" "2")]
1997 )
1998
1999 (define_insn "*andsi3_compare0"
2000 [(set (reg:CC_NOOV CC_REGNUM)
2001 (compare:CC_NOOV
2002 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2003 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2004 (const_int 0)))
2005 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2006 (and:SI (match_dup 1) (match_dup 2)))]
2007 "TARGET_32BIT"
2008 "@
2009 and%.\\t%0, %1, %2
2010 bic%.\\t%0, %1, #%B2"
2011 [(set_attr "conds" "set")]
2012 )
2013
2014 (define_insn "*andsi3_compare0_scratch"
2015 [(set (reg:CC_NOOV CC_REGNUM)
2016 (compare:CC_NOOV
2017 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2018 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2019 (const_int 0)))
2020 (clobber (match_scratch:SI 2 "=X,r"))]
2021 "TARGET_32BIT"
2022 "@
2023 tst%?\\t%0, %1
2024 bic%.\\t%2, %0, #%B1"
2025 [(set_attr "conds" "set")]
2026 )
2027
2028 (define_insn "*zeroextractsi_compare0_scratch"
2029 [(set (reg:CC_NOOV CC_REGNUM)
2030 (compare:CC_NOOV (zero_extract:SI
2031 (match_operand:SI 0 "s_register_operand" "r")
2032 (match_operand 1 "const_int_operand" "n")
2033 (match_operand 2 "const_int_operand" "n"))
2034 (const_int 0)))]
2035 "TARGET_32BIT
2036 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2037 && INTVAL (operands[1]) > 0
2038 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2039 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2040 "*
2041 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2042 << INTVAL (operands[2]));
2043 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2044 return \"\";
2045 "
2046 [(set_attr "conds" "set")]
2047 )
2048
2049 (define_insn_and_split "*ne_zeroextractsi"
2050 [(set (match_operand:SI 0 "s_register_operand" "=r")
2051 (ne:SI (zero_extract:SI
2052 (match_operand:SI 1 "s_register_operand" "r")
2053 (match_operand:SI 2 "const_int_operand" "n")
2054 (match_operand:SI 3 "const_int_operand" "n"))
2055 (const_int 0)))
2056 (clobber (reg:CC CC_REGNUM))]
2057 "TARGET_32BIT
2058 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2059 && INTVAL (operands[2]) > 0
2060 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2061 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2062 "#"
2063 "TARGET_32BIT
2064 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2065 && INTVAL (operands[2]) > 0
2066 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2067 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2068 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2069 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2070 (const_int 0)))
2071 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2072 (set (match_dup 0)
2073 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2074 (match_dup 0) (const_int 1)))]
2075 "
2076 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2077 << INTVAL (operands[3]));
2078 "
2079 [(set_attr "conds" "clob")
2080 (set (attr "length")
2081 (if_then_else (eq_attr "is_thumb" "yes")
2082 (const_int 12)
2083 (const_int 8)))]
2084 )
2085
2086 (define_insn_and_split "*ne_zeroextractsi_shifted"
2087 [(set (match_operand:SI 0 "s_register_operand" "=r")
2088 (ne:SI (zero_extract:SI
2089 (match_operand:SI 1 "s_register_operand" "r")
2090 (match_operand:SI 2 "const_int_operand" "n")
2091 (const_int 0))
2092 (const_int 0)))
2093 (clobber (reg:CC CC_REGNUM))]
2094 "TARGET_ARM"
2095 "#"
2096 "TARGET_ARM"
2097 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2098 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2099 (const_int 0)))
2100 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2101 (set (match_dup 0)
2102 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2103 (match_dup 0) (const_int 1)))]
2104 "
2105 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2106 "
2107 [(set_attr "conds" "clob")
2108 (set_attr "length" "8")]
2109 )
2110
2111 (define_insn_and_split "*ite_ne_zeroextractsi"
2112 [(set (match_operand:SI 0 "s_register_operand" "=r")
2113 (if_then_else:SI (ne (zero_extract:SI
2114 (match_operand:SI 1 "s_register_operand" "r")
2115 (match_operand:SI 2 "const_int_operand" "n")
2116 (match_operand:SI 3 "const_int_operand" "n"))
2117 (const_int 0))
2118 (match_operand:SI 4 "arm_not_operand" "rIK")
2119 (const_int 0)))
2120 (clobber (reg:CC CC_REGNUM))]
2121 "TARGET_ARM
2122 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2123 && INTVAL (operands[2]) > 0
2124 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2125 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2126 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2127 "#"
2128 "TARGET_ARM
2129 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2130 && INTVAL (operands[2]) > 0
2131 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2132 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2133 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2134 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2135 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2136 (const_int 0)))
2137 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2138 (set (match_dup 0)
2139 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2140 (match_dup 0) (match_dup 4)))]
2141 "
2142 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2143 << INTVAL (operands[3]));
2144 "
2145 [(set_attr "conds" "clob")
2146 (set_attr "length" "8")]
2147 )
2148
2149 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2150 [(set (match_operand:SI 0 "s_register_operand" "=r")
2151 (if_then_else:SI (ne (zero_extract:SI
2152 (match_operand:SI 1 "s_register_operand" "r")
2153 (match_operand:SI 2 "const_int_operand" "n")
2154 (const_int 0))
2155 (const_int 0))
2156 (match_operand:SI 3 "arm_not_operand" "rIK")
2157 (const_int 0)))
2158 (clobber (reg:CC CC_REGNUM))]
2159 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2160 "#"
2161 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2162 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2163 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2164 (const_int 0)))
2165 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2166 (set (match_dup 0)
2167 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2168 (match_dup 0) (match_dup 3)))]
2169 "
2170 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2171 "
2172 [(set_attr "conds" "clob")
2173 (set_attr "length" "8")]
2174 )
2175
2176 (define_split
2177 [(set (match_operand:SI 0 "s_register_operand" "")
2178 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2179 (match_operand:SI 2 "const_int_operand" "")
2180 (match_operand:SI 3 "const_int_operand" "")))
2181 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2182 "TARGET_THUMB1"
2183 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2184 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2185 "{
2186 HOST_WIDE_INT temp = INTVAL (operands[2]);
2187
2188 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2189 operands[3] = GEN_INT (32 - temp);
2190 }"
2191 )
2192
2193 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2194 (define_split
2195 [(set (match_operand:SI 0 "s_register_operand" "")
2196 (match_operator:SI 1 "shiftable_operator"
2197 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2198 (match_operand:SI 3 "const_int_operand" "")
2199 (match_operand:SI 4 "const_int_operand" ""))
2200 (match_operand:SI 5 "s_register_operand" "")]))
2201 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2202 "TARGET_ARM"
2203 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2204 (set (match_dup 0)
2205 (match_op_dup 1
2206 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2207 (match_dup 5)]))]
2208 "{
2209 HOST_WIDE_INT temp = INTVAL (operands[3]);
2210
2211 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2212 operands[4] = GEN_INT (32 - temp);
2213 }"
2214 )
2215
2216 (define_split
2217 [(set (match_operand:SI 0 "s_register_operand" "")
2218 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2219 (match_operand:SI 2 "const_int_operand" "")
2220 (match_operand:SI 3 "const_int_operand" "")))]
2221 "TARGET_THUMB1"
2222 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2223 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2224 "{
2225 HOST_WIDE_INT temp = INTVAL (operands[2]);
2226
2227 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2228 operands[3] = GEN_INT (32 - temp);
2229 }"
2230 )
2231
2232 (define_split
2233 [(set (match_operand:SI 0 "s_register_operand" "")
2234 (match_operator:SI 1 "shiftable_operator"
2235 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2236 (match_operand:SI 3 "const_int_operand" "")
2237 (match_operand:SI 4 "const_int_operand" ""))
2238 (match_operand:SI 5 "s_register_operand" "")]))
2239 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2240 "TARGET_ARM"
2241 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2242 (set (match_dup 0)
2243 (match_op_dup 1
2244 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2245 (match_dup 5)]))]
2246 "{
2247 HOST_WIDE_INT temp = INTVAL (operands[3]);
2248
2249 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2250 operands[4] = GEN_INT (32 - temp);
2251 }"
2252 )
2253
2254 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2255 ;;; represented by the bitfield, then this will produce incorrect results.
2256 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2257 ;;; which have a real bit-field insert instruction, the truncation happens
2258 ;;; in the bit-field insert instruction itself. Since arm does not have a
2259 ;;; bit-field insert instruction, we would have to emit code here to truncate
2260 ;;; the value before we insert. This loses some of the advantage of having
2261 ;;; this insv pattern, so this pattern needs to be reevalutated.
2262
2263 (define_expand "insv"
2264 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2265 (match_operand:SI 1 "general_operand" "")
2266 (match_operand:SI 2 "general_operand" ""))
2267 (match_operand:SI 3 "reg_or_int_operand" ""))]
2268 "TARGET_ARM || arm_arch_thumb2"
2269 "
2270 {
2271 int start_bit = INTVAL (operands[2]);
2272 int width = INTVAL (operands[1]);
2273 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2274 rtx target, subtarget;
2275
2276 if (arm_arch_thumb2)
2277 {
2278 bool use_bfi = TRUE;
2279
2280 if (GET_CODE (operands[3]) == CONST_INT)
2281 {
2282 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2283
2284 if (val == 0)
2285 {
2286 emit_insn (gen_insv_zero (operands[0], operands[1],
2287 operands[2]));
2288 DONE;
2289 }
2290
2291 /* See if the set can be done with a single orr instruction. */
2292 if (val == mask && const_ok_for_arm (val << start_bit))
2293 use_bfi = FALSE;
2294 }
2295
2296 if (use_bfi)
2297 {
2298 if (GET_CODE (operands[3]) != REG)
2299 operands[3] = force_reg (SImode, operands[3]);
2300
2301 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2302 operands[3]));
2303 DONE;
2304 }
2305 }
2306
2307 target = copy_rtx (operands[0]);
2308 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2309 subreg as the final target. */
2310 if (GET_CODE (target) == SUBREG)
2311 {
2312 subtarget = gen_reg_rtx (SImode);
2313 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2314 < GET_MODE_SIZE (SImode))
2315 target = SUBREG_REG (target);
2316 }
2317 else
2318 subtarget = target;
2319
2320 if (GET_CODE (operands[3]) == CONST_INT)
2321 {
2322 /* Since we are inserting a known constant, we may be able to
2323 reduce the number of bits that we have to clear so that
2324 the mask becomes simple. */
2325 /* ??? This code does not check to see if the new mask is actually
2326 simpler. It may not be. */
2327 rtx op1 = gen_reg_rtx (SImode);
2328 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2329 start of this pattern. */
2330 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2331 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2332
2333 emit_insn (gen_andsi3 (op1, operands[0],
2334 gen_int_mode (~mask2, SImode)));
2335 emit_insn (gen_iorsi3 (subtarget, op1,
2336 gen_int_mode (op3_value << start_bit, SImode)));
2337 }
2338 else if (start_bit == 0
2339 && !(const_ok_for_arm (mask)
2340 || const_ok_for_arm (~mask)))
2341 {
2342 /* A Trick, since we are setting the bottom bits in the word,
2343 we can shift operand[3] up, operand[0] down, OR them together
2344 and rotate the result back again. This takes 3 insns, and
2345 the third might be mergeable into another op. */
2346 /* The shift up copes with the possibility that operand[3] is
2347 wider than the bitfield. */
2348 rtx op0 = gen_reg_rtx (SImode);
2349 rtx op1 = gen_reg_rtx (SImode);
2350
2351 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2352 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2353 emit_insn (gen_iorsi3 (op1, op1, op0));
2354 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2355 }
2356 else if ((width + start_bit == 32)
2357 && !(const_ok_for_arm (mask)
2358 || const_ok_for_arm (~mask)))
2359 {
2360 /* Similar trick, but slightly less efficient. */
2361
2362 rtx op0 = gen_reg_rtx (SImode);
2363 rtx op1 = gen_reg_rtx (SImode);
2364
2365 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2366 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2367 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2368 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2369 }
2370 else
2371 {
2372 rtx op0 = gen_int_mode (mask, SImode);
2373 rtx op1 = gen_reg_rtx (SImode);
2374 rtx op2 = gen_reg_rtx (SImode);
2375
2376 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2377 {
2378 rtx tmp = gen_reg_rtx (SImode);
2379
2380 emit_insn (gen_movsi (tmp, op0));
2381 op0 = tmp;
2382 }
2383
2384 /* Mask out any bits in operand[3] that are not needed. */
2385 emit_insn (gen_andsi3 (op1, operands[3], op0));
2386
2387 if (GET_CODE (op0) == CONST_INT
2388 && (const_ok_for_arm (mask << start_bit)
2389 || const_ok_for_arm (~(mask << start_bit))))
2390 {
2391 op0 = gen_int_mode (~(mask << start_bit), SImode);
2392 emit_insn (gen_andsi3 (op2, operands[0], op0));
2393 }
2394 else
2395 {
2396 if (GET_CODE (op0) == CONST_INT)
2397 {
2398 rtx tmp = gen_reg_rtx (SImode);
2399
2400 emit_insn (gen_movsi (tmp, op0));
2401 op0 = tmp;
2402 }
2403
2404 if (start_bit != 0)
2405 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2406
2407 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2408 }
2409
2410 if (start_bit != 0)
2411 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2412
2413 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2414 }
2415
2416 if (subtarget != target)
2417 {
2418 /* If TARGET is still a SUBREG, then it must be wider than a word,
2419 so we must be careful only to set the subword we were asked to. */
2420 if (GET_CODE (target) == SUBREG)
2421 emit_move_insn (target, subtarget);
2422 else
2423 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2424 }
2425
2426 DONE;
2427 }"
2428 )
2429
2430 (define_insn "insv_zero"
2431 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2432 (match_operand:SI 1 "const_int_operand" "M")
2433 (match_operand:SI 2 "const_int_operand" "M"))
2434 (const_int 0))]
2435 "arm_arch_thumb2"
2436 "bfc%?\t%0, %2, %1"
2437 [(set_attr "length" "4")
2438 (set_attr "predicable" "yes")]
2439 )
2440
2441 (define_insn "insv_t2"
2442 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2443 (match_operand:SI 1 "const_int_operand" "M")
2444 (match_operand:SI 2 "const_int_operand" "M"))
2445 (match_operand:SI 3 "s_register_operand" "r"))]
2446 "arm_arch_thumb2"
2447 "bfi%?\t%0, %3, %2, %1"
2448 [(set_attr "length" "4")
2449 (set_attr "predicable" "yes")]
2450 )
2451
2452 ; constants for op 2 will never be given to these patterns.
2453 (define_insn_and_split "*anddi_notdi_di"
2454 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2455 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2456 (match_operand:DI 2 "s_register_operand" "r,0")))]
2457 "TARGET_32BIT"
2458 "#"
2459 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2460 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2461 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2462 "
2463 {
2464 operands[3] = gen_highpart (SImode, operands[0]);
2465 operands[0] = gen_lowpart (SImode, operands[0]);
2466 operands[4] = gen_highpart (SImode, operands[1]);
2467 operands[1] = gen_lowpart (SImode, operands[1]);
2468 operands[5] = gen_highpart (SImode, operands[2]);
2469 operands[2] = gen_lowpart (SImode, operands[2]);
2470 }"
2471 [(set_attr "length" "8")
2472 (set_attr "predicable" "yes")]
2473 )
2474
2475 (define_insn_and_split "*anddi_notzesidi_di"
2476 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2477 (and:DI (not:DI (zero_extend:DI
2478 (match_operand:SI 2 "s_register_operand" "r,r")))
2479 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2480 "TARGET_32BIT"
2481 "@
2482 bic%?\\t%Q0, %Q1, %2
2483 #"
2484 ; (not (zero_extend ...)) allows us to just copy the high word from
2485 ; operand1 to operand0.
2486 "TARGET_32BIT
2487 && reload_completed
2488 && operands[0] != operands[1]"
2489 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2490 (set (match_dup 3) (match_dup 4))]
2491 "
2492 {
2493 operands[3] = gen_highpart (SImode, operands[0]);
2494 operands[0] = gen_lowpart (SImode, operands[0]);
2495 operands[4] = gen_highpart (SImode, operands[1]);
2496 operands[1] = gen_lowpart (SImode, operands[1]);
2497 }"
2498 [(set_attr "length" "4,8")
2499 (set_attr "predicable" "yes")]
2500 )
2501
2502 (define_insn_and_split "*anddi_notsesidi_di"
2503 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2504 (and:DI (not:DI (sign_extend:DI
2505 (match_operand:SI 2 "s_register_operand" "r,r")))
2506 (match_operand:DI 1 "s_register_operand" "0,r")))]
2507 "TARGET_32BIT"
2508 "#"
2509 "TARGET_32BIT && reload_completed"
2510 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2511 (set (match_dup 3) (and:SI (not:SI
2512 (ashiftrt:SI (match_dup 2) (const_int 31)))
2513 (match_dup 4)))]
2514 "
2515 {
2516 operands[3] = gen_highpart (SImode, operands[0]);
2517 operands[0] = gen_lowpart (SImode, operands[0]);
2518 operands[4] = gen_highpart (SImode, operands[1]);
2519 operands[1] = gen_lowpart (SImode, operands[1]);
2520 }"
2521 [(set_attr "length" "8")
2522 (set_attr "predicable" "yes")]
2523 )
2524
2525 (define_insn "andsi_notsi_si"
2526 [(set (match_operand:SI 0 "s_register_operand" "=r")
2527 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2528 (match_operand:SI 1 "s_register_operand" "r")))]
2529 "TARGET_32BIT"
2530 "bic%?\\t%0, %1, %2"
2531 [(set_attr "predicable" "yes")]
2532 )
2533
2534 (define_insn "bicsi3"
2535 [(set (match_operand:SI 0 "register_operand" "=l")
2536 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2537 (match_operand:SI 2 "register_operand" "0")))]
2538 "TARGET_THUMB1"
2539 "bic\\t%0, %0, %1"
2540 [(set_attr "length" "2")]
2541 )
2542
2543 (define_insn "andsi_not_shiftsi_si"
2544 [(set (match_operand:SI 0 "s_register_operand" "=r")
2545 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2546 [(match_operand:SI 2 "s_register_operand" "r")
2547 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2548 (match_operand:SI 1 "s_register_operand" "r")))]
2549 "TARGET_ARM"
2550 "bic%?\\t%0, %1, %2%S4"
2551 [(set_attr "predicable" "yes")
2552 (set_attr "shift" "2")
2553 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2554 (const_string "alu_shift")
2555 (const_string "alu_shift_reg")))]
2556 )
2557
2558 (define_insn "*andsi_notsi_si_compare0"
2559 [(set (reg:CC_NOOV CC_REGNUM)
2560 (compare:CC_NOOV
2561 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2562 (match_operand:SI 1 "s_register_operand" "r"))
2563 (const_int 0)))
2564 (set (match_operand:SI 0 "s_register_operand" "=r")
2565 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2566 "TARGET_32BIT"
2567 "bic%.\\t%0, %1, %2"
2568 [(set_attr "conds" "set")]
2569 )
2570
2571 (define_insn "*andsi_notsi_si_compare0_scratch"
2572 [(set (reg:CC_NOOV CC_REGNUM)
2573 (compare:CC_NOOV
2574 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2575 (match_operand:SI 1 "s_register_operand" "r"))
2576 (const_int 0)))
2577 (clobber (match_scratch:SI 0 "=r"))]
2578 "TARGET_32BIT"
2579 "bic%.\\t%0, %1, %2"
2580 [(set_attr "conds" "set")]
2581 )
2582
2583 (define_insn "iordi3"
2584 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2585 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2586 (match_operand:DI 2 "s_register_operand" "r,r")))]
2587 "TARGET_32BIT && ! TARGET_IWMMXT"
2588 "#"
2589 [(set_attr "length" "8")
2590 (set_attr "predicable" "yes")]
2591 )
2592
2593 (define_insn "*iordi_zesidi_di"
2594 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2595 (ior:DI (zero_extend:DI
2596 (match_operand:SI 2 "s_register_operand" "r,r"))
2597 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2598 "TARGET_32BIT"
2599 "@
2600 orr%?\\t%Q0, %Q1, %2
2601 #"
2602 [(set_attr "length" "4,8")
2603 (set_attr "predicable" "yes")]
2604 )
2605
2606 (define_insn "*iordi_sesidi_di"
2607 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2608 (ior:DI (sign_extend:DI
2609 (match_operand:SI 2 "s_register_operand" "r,r"))
2610 (match_operand:DI 1 "s_register_operand" "0,r")))]
2611 "TARGET_32BIT"
2612 "#"
2613 [(set_attr "length" "8")
2614 (set_attr "predicable" "yes")]
2615 )
2616
2617 (define_expand "iorsi3"
2618 [(set (match_operand:SI 0 "s_register_operand" "")
2619 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2620 (match_operand:SI 2 "reg_or_int_operand" "")))]
2621 "TARGET_EITHER"
2622 "
2623 if (GET_CODE (operands[2]) == CONST_INT)
2624 {
2625 if (TARGET_32BIT)
2626 {
2627 arm_split_constant (IOR, SImode, NULL_RTX,
2628 INTVAL (operands[2]), operands[0], operands[1],
2629 optimize && can_create_pseudo_p ());
2630 DONE;
2631 }
2632 else /* TARGET_THUMB1 */
2633 {
2634 rtx tmp = force_reg (SImode, operands[2]);
2635 if (rtx_equal_p (operands[0], operands[1]))
2636 operands[2] = tmp;
2637 else
2638 {
2639 operands[2] = operands[1];
2640 operands[1] = tmp;
2641 }
2642 }
2643 }
2644 "
2645 )
2646
2647 (define_insn_and_split "*arm_iorsi3"
2648 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2649 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2650 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2651 "TARGET_ARM"
2652 "@
2653 orr%?\\t%0, %1, %2
2654 #"
2655 "TARGET_ARM
2656 && GET_CODE (operands[2]) == CONST_INT
2657 && !const_ok_for_arm (INTVAL (operands[2]))"
2658 [(clobber (const_int 0))]
2659 "
2660 arm_split_constant (IOR, SImode, curr_insn,
2661 INTVAL (operands[2]), operands[0], operands[1], 0);
2662 DONE;
2663 "
2664 [(set_attr "length" "4,16")
2665 (set_attr "predicable" "yes")]
2666 )
2667
2668 (define_insn "*thumb1_iorsi3"
2669 [(set (match_operand:SI 0 "register_operand" "=l")
2670 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2671 (match_operand:SI 2 "register_operand" "l")))]
2672 "TARGET_THUMB1"
2673 "orr\\t%0, %0, %2"
2674 [(set_attr "length" "2")]
2675 )
2676
2677 (define_peephole2
2678 [(match_scratch:SI 3 "r")
2679 (set (match_operand:SI 0 "arm_general_register_operand" "")
2680 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2681 (match_operand:SI 2 "const_int_operand" "")))]
2682 "TARGET_ARM
2683 && !const_ok_for_arm (INTVAL (operands[2]))
2684 && const_ok_for_arm (~INTVAL (operands[2]))"
2685 [(set (match_dup 3) (match_dup 2))
2686 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2687 ""
2688 )
2689
2690 (define_insn "*iorsi3_compare0"
2691 [(set (reg:CC_NOOV CC_REGNUM)
2692 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2693 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2694 (const_int 0)))
2695 (set (match_operand:SI 0 "s_register_operand" "=r")
2696 (ior:SI (match_dup 1) (match_dup 2)))]
2697 "TARGET_32BIT"
2698 "orr%.\\t%0, %1, %2"
2699 [(set_attr "conds" "set")]
2700 )
2701
2702 (define_insn "*iorsi3_compare0_scratch"
2703 [(set (reg:CC_NOOV CC_REGNUM)
2704 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2705 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2706 (const_int 0)))
2707 (clobber (match_scratch:SI 0 "=r"))]
2708 "TARGET_32BIT"
2709 "orr%.\\t%0, %1, %2"
2710 [(set_attr "conds" "set")]
2711 )
2712
2713 (define_insn "xordi3"
2714 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2715 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2716 (match_operand:DI 2 "s_register_operand" "r,r")))]
2717 "TARGET_32BIT && !TARGET_IWMMXT"
2718 "#"
2719 [(set_attr "length" "8")
2720 (set_attr "predicable" "yes")]
2721 )
2722
2723 (define_insn "*xordi_zesidi_di"
2724 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2725 (xor:DI (zero_extend:DI
2726 (match_operand:SI 2 "s_register_operand" "r,r"))
2727 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2728 "TARGET_32BIT"
2729 "@
2730 eor%?\\t%Q0, %Q1, %2
2731 #"
2732 [(set_attr "length" "4,8")
2733 (set_attr "predicable" "yes")]
2734 )
2735
2736 (define_insn "*xordi_sesidi_di"
2737 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2738 (xor:DI (sign_extend:DI
2739 (match_operand:SI 2 "s_register_operand" "r,r"))
2740 (match_operand:DI 1 "s_register_operand" "0,r")))]
2741 "TARGET_32BIT"
2742 "#"
2743 [(set_attr "length" "8")
2744 (set_attr "predicable" "yes")]
2745 )
2746
2747 (define_expand "xorsi3"
2748 [(set (match_operand:SI 0 "s_register_operand" "")
2749 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2750 (match_operand:SI 2 "reg_or_int_operand" "")))]
2751 "TARGET_EITHER"
2752 "if (GET_CODE (operands[2]) == CONST_INT)
2753 {
2754 if (TARGET_32BIT)
2755 {
2756 arm_split_constant (XOR, SImode, NULL_RTX,
2757 INTVAL (operands[2]), operands[0], operands[1],
2758 optimize && can_create_pseudo_p ());
2759 DONE;
2760 }
2761 else /* TARGET_THUMB1 */
2762 {
2763 rtx tmp = force_reg (SImode, operands[2]);
2764 if (rtx_equal_p (operands[0], operands[1]))
2765 operands[2] = tmp;
2766 else
2767 {
2768 operands[2] = operands[1];
2769 operands[1] = tmp;
2770 }
2771 }
2772 }"
2773 )
2774
2775 (define_insn "*arm_xorsi3"
2776 [(set (match_operand:SI 0 "s_register_operand" "=r")
2777 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2778 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2779 "TARGET_32BIT"
2780 "eor%?\\t%0, %1, %2"
2781 [(set_attr "predicable" "yes")]
2782 )
2783
2784 (define_insn "*thumb1_xorsi3"
2785 [(set (match_operand:SI 0 "register_operand" "=l")
2786 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2787 (match_operand:SI 2 "register_operand" "l")))]
2788 "TARGET_THUMB1"
2789 "eor\\t%0, %0, %2"
2790 [(set_attr "length" "2")]
2791 )
2792
2793 (define_insn "*xorsi3_compare0"
2794 [(set (reg:CC_NOOV CC_REGNUM)
2795 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2796 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2797 (const_int 0)))
2798 (set (match_operand:SI 0 "s_register_operand" "=r")
2799 (xor:SI (match_dup 1) (match_dup 2)))]
2800 "TARGET_32BIT"
2801 "eor%.\\t%0, %1, %2"
2802 [(set_attr "conds" "set")]
2803 )
2804
2805 (define_insn "*xorsi3_compare0_scratch"
2806 [(set (reg:CC_NOOV CC_REGNUM)
2807 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2808 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2809 (const_int 0)))]
2810 "TARGET_32BIT"
2811 "teq%?\\t%0, %1"
2812 [(set_attr "conds" "set")]
2813 )
2814
2815 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2816 ; (NOT D) we can sometimes merge the final NOT into one of the following
2817 ; insns.
2818
2819 (define_split
2820 [(set (match_operand:SI 0 "s_register_operand" "")
2821 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2822 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2823 (match_operand:SI 3 "arm_rhs_operand" "")))
2824 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2825 "TARGET_32BIT"
2826 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2827 (not:SI (match_dup 3))))
2828 (set (match_dup 0) (not:SI (match_dup 4)))]
2829 ""
2830 )
2831
2832 (define_insn "*andsi_iorsi3_notsi"
2833 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2834 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2835 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2836 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2837 "TARGET_32BIT"
2838 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2839 [(set_attr "length" "8")
2840 (set_attr "ce_count" "2")
2841 (set_attr "predicable" "yes")]
2842 )
2843
2844 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2845 ; insns are available?
2846 (define_split
2847 [(set (match_operand:SI 0 "s_register_operand" "")
2848 (match_operator:SI 1 "logical_binary_operator"
2849 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2850 (match_operand:SI 3 "const_int_operand" "")
2851 (match_operand:SI 4 "const_int_operand" ""))
2852 (match_operator:SI 9 "logical_binary_operator"
2853 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2854 (match_operand:SI 6 "const_int_operand" ""))
2855 (match_operand:SI 7 "s_register_operand" "")])]))
2856 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2857 "TARGET_32BIT
2858 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2859 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2860 [(set (match_dup 8)
2861 (match_op_dup 1
2862 [(ashift:SI (match_dup 2) (match_dup 4))
2863 (match_dup 5)]))
2864 (set (match_dup 0)
2865 (match_op_dup 1
2866 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2867 (match_dup 7)]))]
2868 "
2869 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2870 ")
2871
2872 (define_split
2873 [(set (match_operand:SI 0 "s_register_operand" "")
2874 (match_operator:SI 1 "logical_binary_operator"
2875 [(match_operator:SI 9 "logical_binary_operator"
2876 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2877 (match_operand:SI 6 "const_int_operand" ""))
2878 (match_operand:SI 7 "s_register_operand" "")])
2879 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2880 (match_operand:SI 3 "const_int_operand" "")
2881 (match_operand:SI 4 "const_int_operand" ""))]))
2882 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2883 "TARGET_32BIT
2884 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2885 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2886 [(set (match_dup 8)
2887 (match_op_dup 1
2888 [(ashift:SI (match_dup 2) (match_dup 4))
2889 (match_dup 5)]))
2890 (set (match_dup 0)
2891 (match_op_dup 1
2892 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2893 (match_dup 7)]))]
2894 "
2895 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2896 ")
2897
2898 (define_split
2899 [(set (match_operand:SI 0 "s_register_operand" "")
2900 (match_operator:SI 1 "logical_binary_operator"
2901 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2902 (match_operand:SI 3 "const_int_operand" "")
2903 (match_operand:SI 4 "const_int_operand" ""))
2904 (match_operator:SI 9 "logical_binary_operator"
2905 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2906 (match_operand:SI 6 "const_int_operand" ""))
2907 (match_operand:SI 7 "s_register_operand" "")])]))
2908 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2909 "TARGET_32BIT
2910 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2911 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2912 [(set (match_dup 8)
2913 (match_op_dup 1
2914 [(ashift:SI (match_dup 2) (match_dup 4))
2915 (match_dup 5)]))
2916 (set (match_dup 0)
2917 (match_op_dup 1
2918 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2919 (match_dup 7)]))]
2920 "
2921 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2922 ")
2923
2924 (define_split
2925 [(set (match_operand:SI 0 "s_register_operand" "")
2926 (match_operator:SI 1 "logical_binary_operator"
2927 [(match_operator:SI 9 "logical_binary_operator"
2928 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2929 (match_operand:SI 6 "const_int_operand" ""))
2930 (match_operand:SI 7 "s_register_operand" "")])
2931 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2932 (match_operand:SI 3 "const_int_operand" "")
2933 (match_operand:SI 4 "const_int_operand" ""))]))
2934 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2935 "TARGET_32BIT
2936 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2937 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2938 [(set (match_dup 8)
2939 (match_op_dup 1
2940 [(ashift:SI (match_dup 2) (match_dup 4))
2941 (match_dup 5)]))
2942 (set (match_dup 0)
2943 (match_op_dup 1
2944 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2945 (match_dup 7)]))]
2946 "
2947 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2948 ")
2949 \f
2950
2951 ;; Minimum and maximum insns
2952
2953 (define_expand "smaxsi3"
2954 [(parallel [
2955 (set (match_operand:SI 0 "s_register_operand" "")
2956 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2957 (match_operand:SI 2 "arm_rhs_operand" "")))
2958 (clobber (reg:CC CC_REGNUM))])]
2959 "TARGET_32BIT"
2960 "
2961 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2962 {
2963 /* No need for a clobber of the condition code register here. */
2964 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2965 gen_rtx_SMAX (SImode, operands[1],
2966 operands[2])));
2967 DONE;
2968 }
2969 ")
2970
2971 (define_insn "*smax_0"
2972 [(set (match_operand:SI 0 "s_register_operand" "=r")
2973 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2974 (const_int 0)))]
2975 "TARGET_32BIT"
2976 "bic%?\\t%0, %1, %1, asr #31"
2977 [(set_attr "predicable" "yes")]
2978 )
2979
2980 (define_insn "*smax_m1"
2981 [(set (match_operand:SI 0 "s_register_operand" "=r")
2982 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2983 (const_int -1)))]
2984 "TARGET_32BIT"
2985 "orr%?\\t%0, %1, %1, asr #31"
2986 [(set_attr "predicable" "yes")]
2987 )
2988
2989 (define_insn "*arm_smax_insn"
2990 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2991 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2992 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2993 (clobber (reg:CC CC_REGNUM))]
2994 "TARGET_ARM"
2995 "@
2996 cmp\\t%1, %2\;movlt\\t%0, %2
2997 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2998 [(set_attr "conds" "clob")
2999 (set_attr "length" "8,12")]
3000 )
3001
3002 (define_expand "sminsi3"
3003 [(parallel [
3004 (set (match_operand:SI 0 "s_register_operand" "")
3005 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3006 (match_operand:SI 2 "arm_rhs_operand" "")))
3007 (clobber (reg:CC CC_REGNUM))])]
3008 "TARGET_32BIT"
3009 "
3010 if (operands[2] == const0_rtx)
3011 {
3012 /* No need for a clobber of the condition code register here. */
3013 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3014 gen_rtx_SMIN (SImode, operands[1],
3015 operands[2])));
3016 DONE;
3017 }
3018 ")
3019
3020 (define_insn "*smin_0"
3021 [(set (match_operand:SI 0 "s_register_operand" "=r")
3022 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3023 (const_int 0)))]
3024 "TARGET_32BIT"
3025 "and%?\\t%0, %1, %1, asr #31"
3026 [(set_attr "predicable" "yes")]
3027 )
3028
3029 (define_insn "*arm_smin_insn"
3030 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3031 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3032 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3033 (clobber (reg:CC CC_REGNUM))]
3034 "TARGET_ARM"
3035 "@
3036 cmp\\t%1, %2\;movge\\t%0, %2
3037 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3038 [(set_attr "conds" "clob")
3039 (set_attr "length" "8,12")]
3040 )
3041
3042 (define_expand "umaxsi3"
3043 [(parallel [
3044 (set (match_operand:SI 0 "s_register_operand" "")
3045 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3046 (match_operand:SI 2 "arm_rhs_operand" "")))
3047 (clobber (reg:CC CC_REGNUM))])]
3048 "TARGET_32BIT"
3049 ""
3050 )
3051
3052 (define_insn "*arm_umaxsi3"
3053 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3054 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3055 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3056 (clobber (reg:CC CC_REGNUM))]
3057 "TARGET_ARM"
3058 "@
3059 cmp\\t%1, %2\;movcc\\t%0, %2
3060 cmp\\t%1, %2\;movcs\\t%0, %1
3061 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3062 [(set_attr "conds" "clob")
3063 (set_attr "length" "8,8,12")]
3064 )
3065
3066 (define_expand "uminsi3"
3067 [(parallel [
3068 (set (match_operand:SI 0 "s_register_operand" "")
3069 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3070 (match_operand:SI 2 "arm_rhs_operand" "")))
3071 (clobber (reg:CC CC_REGNUM))])]
3072 "TARGET_32BIT"
3073 ""
3074 )
3075
3076 (define_insn "*arm_uminsi3"
3077 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3078 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3079 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3080 (clobber (reg:CC CC_REGNUM))]
3081 "TARGET_ARM"
3082 "@
3083 cmp\\t%1, %2\;movcs\\t%0, %2
3084 cmp\\t%1, %2\;movcc\\t%0, %1
3085 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3086 [(set_attr "conds" "clob")
3087 (set_attr "length" "8,8,12")]
3088 )
3089
3090 (define_insn "*store_minmaxsi"
3091 [(set (match_operand:SI 0 "memory_operand" "=m")
3092 (match_operator:SI 3 "minmax_operator"
3093 [(match_operand:SI 1 "s_register_operand" "r")
3094 (match_operand:SI 2 "s_register_operand" "r")]))
3095 (clobber (reg:CC CC_REGNUM))]
3096 "TARGET_32BIT"
3097 "*
3098 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3099 operands[1], operands[2]);
3100 output_asm_insn (\"cmp\\t%1, %2\", operands);
3101 if (TARGET_THUMB2)
3102 output_asm_insn (\"ite\t%d3\", operands);
3103 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3104 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3105 return \"\";
3106 "
3107 [(set_attr "conds" "clob")
3108 (set (attr "length")
3109 (if_then_else (eq_attr "is_thumb" "yes")
3110 (const_int 14)
3111 (const_int 12)))
3112 (set_attr "type" "store1")]
3113 )
3114
3115 ; Reject the frame pointer in operand[1], since reloading this after
3116 ; it has been eliminated can cause carnage.
3117 (define_insn "*minmax_arithsi"
3118 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3119 (match_operator:SI 4 "shiftable_operator"
3120 [(match_operator:SI 5 "minmax_operator"
3121 [(match_operand:SI 2 "s_register_operand" "r,r")
3122 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3123 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3124 (clobber (reg:CC CC_REGNUM))]
3125 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3126 "*
3127 {
3128 enum rtx_code code = GET_CODE (operands[4]);
3129 bool need_else;
3130
3131 if (which_alternative != 0 || operands[3] != const0_rtx
3132 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
3133 need_else = true;
3134 else
3135 need_else = false;
3136
3137 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3138 operands[2], operands[3]);
3139 output_asm_insn (\"cmp\\t%2, %3\", operands);
3140 if (TARGET_THUMB2)
3141 {
3142 if (need_else)
3143 output_asm_insn (\"ite\\t%d5\", operands);
3144 else
3145 output_asm_insn (\"it\\t%d5\", operands);
3146 }
3147 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3148 if (need_else)
3149 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3150 return \"\";
3151 }"
3152 [(set_attr "conds" "clob")
3153 (set (attr "length")
3154 (if_then_else (eq_attr "is_thumb" "yes")
3155 (const_int 14)
3156 (const_int 12)))]
3157 )
3158
3159 \f
3160 ;; Shift and rotation insns
3161
3162 (define_expand "ashldi3"
3163 [(set (match_operand:DI 0 "s_register_operand" "")
3164 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3165 (match_operand:SI 2 "reg_or_int_operand" "")))]
3166 "TARGET_32BIT"
3167 "
3168 if (GET_CODE (operands[2]) == CONST_INT)
3169 {
3170 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3171 {
3172 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3173 DONE;
3174 }
3175 /* Ideally we shouldn't fail here if we could know that operands[1]
3176 ends up already living in an iwmmxt register. Otherwise it's
3177 cheaper to have the alternate code being generated than moving
3178 values to iwmmxt regs and back. */
3179 FAIL;
3180 }
3181 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3182 FAIL;
3183 "
3184 )
3185
3186 (define_insn "arm_ashldi3_1bit"
3187 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3188 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3189 (const_int 1)))
3190 (clobber (reg:CC CC_REGNUM))]
3191 "TARGET_32BIT"
3192 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3193 [(set_attr "conds" "clob")
3194 (set_attr "length" "8")]
3195 )
3196
3197 (define_expand "ashlsi3"
3198 [(set (match_operand:SI 0 "s_register_operand" "")
3199 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3200 (match_operand:SI 2 "arm_rhs_operand" "")))]
3201 "TARGET_EITHER"
3202 "
3203 if (GET_CODE (operands[2]) == CONST_INT
3204 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3205 {
3206 emit_insn (gen_movsi (operands[0], const0_rtx));
3207 DONE;
3208 }
3209 "
3210 )
3211
3212 (define_insn "*thumb1_ashlsi3"
3213 [(set (match_operand:SI 0 "register_operand" "=l,l")
3214 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3215 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3216 "TARGET_THUMB1"
3217 "lsl\\t%0, %1, %2"
3218 [(set_attr "length" "2")]
3219 )
3220
3221 (define_expand "ashrdi3"
3222 [(set (match_operand:DI 0 "s_register_operand" "")
3223 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3224 (match_operand:SI 2 "reg_or_int_operand" "")))]
3225 "TARGET_32BIT"
3226 "
3227 if (GET_CODE (operands[2]) == CONST_INT)
3228 {
3229 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3230 {
3231 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3232 DONE;
3233 }
3234 /* Ideally we shouldn't fail here if we could know that operands[1]
3235 ends up already living in an iwmmxt register. Otherwise it's
3236 cheaper to have the alternate code being generated than moving
3237 values to iwmmxt regs and back. */
3238 FAIL;
3239 }
3240 else if (!TARGET_REALLY_IWMMXT)
3241 FAIL;
3242 "
3243 )
3244
3245 (define_insn "arm_ashrdi3_1bit"
3246 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3247 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3248 (const_int 1)))
3249 (clobber (reg:CC CC_REGNUM))]
3250 "TARGET_32BIT"
3251 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3252 [(set_attr "conds" "clob")
3253 (set_attr "length" "8")]
3254 )
3255
3256 (define_expand "ashrsi3"
3257 [(set (match_operand:SI 0 "s_register_operand" "")
3258 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3259 (match_operand:SI 2 "arm_rhs_operand" "")))]
3260 "TARGET_EITHER"
3261 "
3262 if (GET_CODE (operands[2]) == CONST_INT
3263 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3264 operands[2] = GEN_INT (31);
3265 "
3266 )
3267
3268 (define_insn "*thumb1_ashrsi3"
3269 [(set (match_operand:SI 0 "register_operand" "=l,l")
3270 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3271 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3272 "TARGET_THUMB1"
3273 "asr\\t%0, %1, %2"
3274 [(set_attr "length" "2")]
3275 )
3276
3277 (define_expand "lshrdi3"
3278 [(set (match_operand:DI 0 "s_register_operand" "")
3279 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3280 (match_operand:SI 2 "reg_or_int_operand" "")))]
3281 "TARGET_32BIT"
3282 "
3283 if (GET_CODE (operands[2]) == CONST_INT)
3284 {
3285 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3286 {
3287 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3288 DONE;
3289 }
3290 /* Ideally we shouldn't fail here if we could know that operands[1]
3291 ends up already living in an iwmmxt register. Otherwise it's
3292 cheaper to have the alternate code being generated than moving
3293 values to iwmmxt regs and back. */
3294 FAIL;
3295 }
3296 else if (!TARGET_REALLY_IWMMXT)
3297 FAIL;
3298 "
3299 )
3300
3301 (define_insn "arm_lshrdi3_1bit"
3302 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3303 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3304 (const_int 1)))
3305 (clobber (reg:CC CC_REGNUM))]
3306 "TARGET_32BIT"
3307 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3308 [(set_attr "conds" "clob")
3309 (set_attr "length" "8")]
3310 )
3311
3312 (define_expand "lshrsi3"
3313 [(set (match_operand:SI 0 "s_register_operand" "")
3314 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3315 (match_operand:SI 2 "arm_rhs_operand" "")))]
3316 "TARGET_EITHER"
3317 "
3318 if (GET_CODE (operands[2]) == CONST_INT
3319 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3320 {
3321 emit_insn (gen_movsi (operands[0], const0_rtx));
3322 DONE;
3323 }
3324 "
3325 )
3326
3327 (define_insn "*thumb1_lshrsi3"
3328 [(set (match_operand:SI 0 "register_operand" "=l,l")
3329 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3330 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3331 "TARGET_THUMB1"
3332 "lsr\\t%0, %1, %2"
3333 [(set_attr "length" "2")]
3334 )
3335
3336 (define_expand "rotlsi3"
3337 [(set (match_operand:SI 0 "s_register_operand" "")
3338 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3339 (match_operand:SI 2 "reg_or_int_operand" "")))]
3340 "TARGET_32BIT"
3341 "
3342 if (GET_CODE (operands[2]) == CONST_INT)
3343 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3344 else
3345 {
3346 rtx reg = gen_reg_rtx (SImode);
3347 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3348 operands[2] = reg;
3349 }
3350 "
3351 )
3352
3353 (define_expand "rotrsi3"
3354 [(set (match_operand:SI 0 "s_register_operand" "")
3355 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3356 (match_operand:SI 2 "arm_rhs_operand" "")))]
3357 "TARGET_EITHER"
3358 "
3359 if (TARGET_32BIT)
3360 {
3361 if (GET_CODE (operands[2]) == CONST_INT
3362 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3363 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3364 }
3365 else /* TARGET_THUMB1 */
3366 {
3367 if (GET_CODE (operands [2]) == CONST_INT)
3368 operands [2] = force_reg (SImode, operands[2]);
3369 }
3370 "
3371 )
3372
3373 (define_insn "*thumb1_rotrsi3"
3374 [(set (match_operand:SI 0 "register_operand" "=l")
3375 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3376 (match_operand:SI 2 "register_operand" "l")))]
3377 "TARGET_THUMB1"
3378 "ror\\t%0, %0, %2"
3379 [(set_attr "length" "2")]
3380 )
3381
3382 (define_insn "*arm_shiftsi3"
3383 [(set (match_operand:SI 0 "s_register_operand" "=r")
3384 (match_operator:SI 3 "shift_operator"
3385 [(match_operand:SI 1 "s_register_operand" "r")
3386 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3387 "TARGET_32BIT"
3388 "* return arm_output_shift(operands, 0);"
3389 [(set_attr "predicable" "yes")
3390 (set_attr "shift" "1")
3391 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3392 (const_string "alu_shift")
3393 (const_string "alu_shift_reg")))]
3394 )
3395
3396 (define_insn "*shiftsi3_compare0"
3397 [(set (reg:CC_NOOV CC_REGNUM)
3398 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3399 [(match_operand:SI 1 "s_register_operand" "r")
3400 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3401 (const_int 0)))
3402 (set (match_operand:SI 0 "s_register_operand" "=r")
3403 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3404 "TARGET_32BIT"
3405 "* return arm_output_shift(operands, 1);"
3406 [(set_attr "conds" "set")
3407 (set_attr "shift" "1")
3408 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3409 (const_string "alu_shift")
3410 (const_string "alu_shift_reg")))]
3411 )
3412
3413 (define_insn "*shiftsi3_compare0_scratch"
3414 [(set (reg:CC_NOOV CC_REGNUM)
3415 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3416 [(match_operand:SI 1 "s_register_operand" "r")
3417 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3418 (const_int 0)))
3419 (clobber (match_scratch:SI 0 "=r"))]
3420 "TARGET_32BIT"
3421 "* return arm_output_shift(operands, 1);"
3422 [(set_attr "conds" "set")
3423 (set_attr "shift" "1")]
3424 )
3425
3426 (define_insn "*arm_notsi_shiftsi"
3427 [(set (match_operand:SI 0 "s_register_operand" "=r")
3428 (not:SI (match_operator:SI 3 "shift_operator"
3429 [(match_operand:SI 1 "s_register_operand" "r")
3430 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3431 "TARGET_ARM"
3432 "mvn%?\\t%0, %1%S3"
3433 [(set_attr "predicable" "yes")
3434 (set_attr "shift" "1")
3435 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3436 (const_string "alu_shift")
3437 (const_string "alu_shift_reg")))]
3438 )
3439
3440 (define_insn "*arm_notsi_shiftsi_compare0"
3441 [(set (reg:CC_NOOV CC_REGNUM)
3442 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3443 [(match_operand:SI 1 "s_register_operand" "r")
3444 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3445 (const_int 0)))
3446 (set (match_operand:SI 0 "s_register_operand" "=r")
3447 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3448 "TARGET_ARM"
3449 "mvn%.\\t%0, %1%S3"
3450 [(set_attr "conds" "set")
3451 (set_attr "shift" "1")
3452 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3453 (const_string "alu_shift")
3454 (const_string "alu_shift_reg")))]
3455 )
3456
3457 (define_insn "*arm_not_shiftsi_compare0_scratch"
3458 [(set (reg:CC_NOOV CC_REGNUM)
3459 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3460 [(match_operand:SI 1 "s_register_operand" "r")
3461 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3462 (const_int 0)))
3463 (clobber (match_scratch:SI 0 "=r"))]
3464 "TARGET_ARM"
3465 "mvn%.\\t%0, %1%S3"
3466 [(set_attr "conds" "set")
3467 (set_attr "shift" "1")
3468 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3469 (const_string "alu_shift")
3470 (const_string "alu_shift_reg")))]
3471 )
3472
3473 ;; We don't really have extzv, but defining this using shifts helps
3474 ;; to reduce register pressure later on.
3475
3476 (define_expand "extzv"
3477 [(set (match_dup 4)
3478 (ashift:SI (match_operand:SI 1 "register_operand" "")
3479 (match_operand:SI 2 "const_int_operand" "")))
3480 (set (match_operand:SI 0 "register_operand" "")
3481 (lshiftrt:SI (match_dup 4)
3482 (match_operand:SI 3 "const_int_operand" "")))]
3483 "TARGET_THUMB1 || arm_arch_thumb2"
3484 "
3485 {
3486 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3487 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3488
3489 if (arm_arch_thumb2)
3490 {
3491 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3492 operands[3]));
3493 DONE;
3494 }
3495
3496 operands[3] = GEN_INT (rshift);
3497
3498 if (lshift == 0)
3499 {
3500 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3501 DONE;
3502 }
3503
3504 operands[2] = GEN_INT (lshift);
3505 operands[4] = gen_reg_rtx (SImode);
3506 }"
3507 )
3508
3509 (define_insn "extv"
3510 [(set (match_operand:SI 0 "s_register_operand" "=r")
3511 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3512 (match_operand:SI 2 "const_int_operand" "M")
3513 (match_operand:SI 3 "const_int_operand" "M")))]
3514 "arm_arch_thumb2"
3515 "sbfx%?\t%0, %1, %3, %2"
3516 [(set_attr "length" "4")
3517 (set_attr "predicable" "yes")]
3518 )
3519
3520 (define_insn "extzv_t2"
3521 [(set (match_operand:SI 0 "s_register_operand" "=r")
3522 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3523 (match_operand:SI 2 "const_int_operand" "M")
3524 (match_operand:SI 3 "const_int_operand" "M")))]
3525 "arm_arch_thumb2"
3526 "ubfx%?\t%0, %1, %3, %2"
3527 [(set_attr "length" "4")
3528 (set_attr "predicable" "yes")]
3529 )
3530
3531 \f
3532 ;; Unary arithmetic insns
3533
3534 (define_expand "negdi2"
3535 [(parallel
3536 [(set (match_operand:DI 0 "s_register_operand" "")
3537 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3538 (clobber (reg:CC CC_REGNUM))])]
3539 "TARGET_EITHER"
3540 ""
3541 )
3542
3543 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3544 ;; The first alternative allows the common case of a *full* overlap.
3545 (define_insn "*arm_negdi2"
3546 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3547 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
3548 (clobber (reg:CC CC_REGNUM))]
3549 "TARGET_ARM"
3550 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3551 [(set_attr "conds" "clob")
3552 (set_attr "length" "8")]
3553 )
3554
3555 (define_insn "*thumb1_negdi2"
3556 [(set (match_operand:DI 0 "register_operand" "=&l")
3557 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3558 (clobber (reg:CC CC_REGNUM))]
3559 "TARGET_THUMB1"
3560 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3561 [(set_attr "length" "6")]
3562 )
3563
3564 (define_expand "negsi2"
3565 [(set (match_operand:SI 0 "s_register_operand" "")
3566 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3567 "TARGET_EITHER"
3568 ""
3569 )
3570
3571 (define_insn "*arm_negsi2"
3572 [(set (match_operand:SI 0 "s_register_operand" "=r")
3573 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3574 "TARGET_32BIT"
3575 "rsb%?\\t%0, %1, #0"
3576 [(set_attr "predicable" "yes")]
3577 )
3578
3579 (define_insn "*thumb1_negsi2"
3580 [(set (match_operand:SI 0 "register_operand" "=l")
3581 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3582 "TARGET_THUMB1"
3583 "neg\\t%0, %1"
3584 [(set_attr "length" "2")]
3585 )
3586
3587 (define_expand "negsf2"
3588 [(set (match_operand:SF 0 "s_register_operand" "")
3589 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3590 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3591 ""
3592 )
3593
3594 (define_expand "negdf2"
3595 [(set (match_operand:DF 0 "s_register_operand" "")
3596 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3597 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3598 "")
3599
3600 ;; abssi2 doesn't really clobber the condition codes if a different register
3601 ;; is being set. To keep things simple, assume during rtl manipulations that
3602 ;; it does, but tell the final scan operator the truth. Similarly for
3603 ;; (neg (abs...))
3604
3605 (define_expand "abssi2"
3606 [(parallel
3607 [(set (match_operand:SI 0 "s_register_operand" "")
3608 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3609 (clobber (match_dup 2))])]
3610 "TARGET_EITHER"
3611 "
3612 if (TARGET_THUMB1)
3613 operands[2] = gen_rtx_SCRATCH (SImode);
3614 else
3615 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3616 ")
3617
3618 (define_insn "*arm_abssi2"
3619 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3620 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3621 (clobber (reg:CC CC_REGNUM))]
3622 "TARGET_ARM"
3623 "@
3624 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3625 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3626 [(set_attr "conds" "clob,*")
3627 (set_attr "shift" "1")
3628 ;; predicable can't be set based on the variant, so left as no
3629 (set_attr "length" "8")]
3630 )
3631
3632 (define_insn_and_split "*thumb1_abssi2"
3633 [(set (match_operand:SI 0 "s_register_operand" "=l")
3634 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3635 (clobber (match_scratch:SI 2 "=&l"))]
3636 "TARGET_THUMB1"
3637 "#"
3638 "TARGET_THUMB1 && reload_completed"
3639 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3640 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3641 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3642 ""
3643 [(set_attr "length" "6")]
3644 )
3645
3646 (define_insn "*arm_neg_abssi2"
3647 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3648 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3649 (clobber (reg:CC CC_REGNUM))]
3650 "TARGET_ARM"
3651 "@
3652 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3653 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3654 [(set_attr "conds" "clob,*")
3655 (set_attr "shift" "1")
3656 ;; predicable can't be set based on the variant, so left as no
3657 (set_attr "length" "8")]
3658 )
3659
3660 (define_insn_and_split "*thumb1_neg_abssi2"
3661 [(set (match_operand:SI 0 "s_register_operand" "=l")
3662 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3663 (clobber (match_scratch:SI 2 "=&l"))]
3664 "TARGET_THUMB1"
3665 "#"
3666 "TARGET_THUMB1 && reload_completed"
3667 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3668 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3669 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3670 ""
3671 [(set_attr "length" "6")]
3672 )
3673
3674 (define_expand "abssf2"
3675 [(set (match_operand:SF 0 "s_register_operand" "")
3676 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3677 "TARGET_32BIT && TARGET_HARD_FLOAT"
3678 "")
3679
3680 (define_expand "absdf2"
3681 [(set (match_operand:DF 0 "s_register_operand" "")
3682 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3683 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3684 "")
3685
3686 (define_expand "sqrtsf2"
3687 [(set (match_operand:SF 0 "s_register_operand" "")
3688 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3689 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3690 "")
3691
3692 (define_expand "sqrtdf2"
3693 [(set (match_operand:DF 0 "s_register_operand" "")
3694 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3695 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3696 "")
3697
3698 (define_insn_and_split "one_cmpldi2"
3699 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3700 (not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3701 "TARGET_32BIT"
3702 "#"
3703 "TARGET_32BIT && reload_completed"
3704 [(set (match_dup 0) (not:SI (match_dup 1)))
3705 (set (match_dup 2) (not:SI (match_dup 3)))]
3706 "
3707 {
3708 operands[2] = gen_highpart (SImode, operands[0]);
3709 operands[0] = gen_lowpart (SImode, operands[0]);
3710 operands[3] = gen_highpart (SImode, operands[1]);
3711 operands[1] = gen_lowpart (SImode, operands[1]);
3712 }"
3713 [(set_attr "length" "8")
3714 (set_attr "predicable" "yes")]
3715 )
3716
3717 (define_expand "one_cmplsi2"
3718 [(set (match_operand:SI 0 "s_register_operand" "")
3719 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3720 "TARGET_EITHER"
3721 ""
3722 )
3723
3724 (define_insn "*arm_one_cmplsi2"
3725 [(set (match_operand:SI 0 "s_register_operand" "=r")
3726 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3727 "TARGET_32BIT"
3728 "mvn%?\\t%0, %1"
3729 [(set_attr "predicable" "yes")]
3730 )
3731
3732 (define_insn "*thumb1_one_cmplsi2"
3733 [(set (match_operand:SI 0 "register_operand" "=l")
3734 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3735 "TARGET_THUMB1"
3736 "mvn\\t%0, %1"
3737 [(set_attr "length" "2")]
3738 )
3739
3740 (define_insn "*notsi_compare0"
3741 [(set (reg:CC_NOOV CC_REGNUM)
3742 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3743 (const_int 0)))
3744 (set (match_operand:SI 0 "s_register_operand" "=r")
3745 (not:SI (match_dup 1)))]
3746 "TARGET_32BIT"
3747 "mvn%.\\t%0, %1"
3748 [(set_attr "conds" "set")]
3749 )
3750
3751 (define_insn "*notsi_compare0_scratch"
3752 [(set (reg:CC_NOOV CC_REGNUM)
3753 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3754 (const_int 0)))
3755 (clobber (match_scratch:SI 0 "=r"))]
3756 "TARGET_32BIT"
3757 "mvn%.\\t%0, %1"
3758 [(set_attr "conds" "set")]
3759 )
3760 \f
3761 ;; Fixed <--> Floating conversion insns
3762
3763 (define_expand "floatsihf2"
3764 [(set (match_operand:HF 0 "general_operand" "")
3765 (float:HF (match_operand:SI 1 "general_operand" "")))]
3766 "TARGET_EITHER"
3767 "
3768 {
3769 rtx op1 = gen_reg_rtx (SFmode);
3770 expand_float (op1, operands[1], 0);
3771 op1 = convert_to_mode (HFmode, op1, 0);
3772 emit_move_insn (operands[0], op1);
3773 DONE;
3774 }"
3775 )
3776
3777 (define_expand "floatdihf2"
3778 [(set (match_operand:HF 0 "general_operand" "")
3779 (float:HF (match_operand:DI 1 "general_operand" "")))]
3780 "TARGET_EITHER"
3781 "
3782 {
3783 rtx op1 = gen_reg_rtx (SFmode);
3784 expand_float (op1, operands[1], 0);
3785 op1 = convert_to_mode (HFmode, op1, 0);
3786 emit_move_insn (operands[0], op1);
3787 DONE;
3788 }"
3789 )
3790
3791 (define_expand "floatsisf2"
3792 [(set (match_operand:SF 0 "s_register_operand" "")
3793 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3794 "TARGET_32BIT && TARGET_HARD_FLOAT"
3795 "
3796 if (TARGET_MAVERICK)
3797 {
3798 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3799 DONE;
3800 }
3801 ")
3802
3803 (define_expand "floatsidf2"
3804 [(set (match_operand:DF 0 "s_register_operand" "")
3805 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3806 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3807 "
3808 if (TARGET_MAVERICK)
3809 {
3810 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3811 DONE;
3812 }
3813 ")
3814
3815 (define_expand "fix_trunchfsi2"
3816 [(set (match_operand:SI 0 "general_operand" "")
3817 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3818 "TARGET_EITHER"
3819 "
3820 {
3821 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3822 expand_fix (operands[0], op1, 0);
3823 DONE;
3824 }"
3825 )
3826
3827 (define_expand "fix_trunchfdi2"
3828 [(set (match_operand:DI 0 "general_operand" "")
3829 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
3830 "TARGET_EITHER"
3831 "
3832 {
3833 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3834 expand_fix (operands[0], op1, 0);
3835 DONE;
3836 }"
3837 )
3838
3839 (define_expand "fix_truncsfsi2"
3840 [(set (match_operand:SI 0 "s_register_operand" "")
3841 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3842 "TARGET_32BIT && TARGET_HARD_FLOAT"
3843 "
3844 if (TARGET_MAVERICK)
3845 {
3846 if (!cirrus_fp_register (operands[0], SImode))
3847 operands[0] = force_reg (SImode, operands[0]);
3848 if (!cirrus_fp_register (operands[1], SFmode))
3849 operands[1] = force_reg (SFmode, operands[0]);
3850 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3851 DONE;
3852 }
3853 ")
3854
3855 (define_expand "fix_truncdfsi2"
3856 [(set (match_operand:SI 0 "s_register_operand" "")
3857 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3858 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3859 "
3860 if (TARGET_MAVERICK)
3861 {
3862 if (!cirrus_fp_register (operands[1], DFmode))
3863 operands[1] = force_reg (DFmode, operands[0]);
3864 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3865 DONE;
3866 }
3867 ")
3868
3869 ;; Truncation insns
3870
3871 (define_expand "truncdfsf2"
3872 [(set (match_operand:SF 0 "s_register_operand" "")
3873 (float_truncate:SF
3874 (match_operand:DF 1 "s_register_operand" "")))]
3875 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3876 ""
3877 )
3878
3879 /* DFmode -> HFmode conversions have to go through SFmode. */
3880 (define_expand "truncdfhf2"
3881 [(set (match_operand:HF 0 "general_operand" "")
3882 (float_truncate:HF
3883 (match_operand:DF 1 "general_operand" "")))]
3884 "TARGET_EITHER"
3885 "
3886 {
3887 rtx op1;
3888 op1 = convert_to_mode (SFmode, operands[1], 0);
3889 op1 = convert_to_mode (HFmode, op1, 0);
3890 emit_move_insn (operands[0], op1);
3891 DONE;
3892 }"
3893 )
3894 \f
3895 ;; Zero and sign extension instructions.
3896
3897 (define_expand "zero_extendsidi2"
3898 [(set (match_operand:DI 0 "s_register_operand" "")
3899 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3900 "TARGET_32BIT"
3901 ""
3902 )
3903
3904 (define_insn "*arm_zero_extendsidi2"
3905 [(set (match_operand:DI 0 "s_register_operand" "=r")
3906 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3907 "TARGET_ARM"
3908 "*
3909 if (REGNO (operands[1])
3910 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3911 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3912 return \"mov%?\\t%R0, #0\";
3913 "
3914 [(set_attr "length" "8")
3915 (set_attr "predicable" "yes")]
3916 )
3917
3918 (define_expand "zero_extendqidi2"
3919 [(set (match_operand:DI 0 "s_register_operand" "")
3920 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3921 "TARGET_32BIT"
3922 ""
3923 )
3924
3925 (define_insn "*arm_zero_extendqidi2"
3926 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3927 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3928 "TARGET_ARM"
3929 "@
3930 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3931 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3932 [(set_attr "length" "8")
3933 (set_attr "predicable" "yes")
3934 (set_attr "type" "*,load_byte")
3935 (set_attr "pool_range" "*,4092")
3936 (set_attr "neg_pool_range" "*,4084")]
3937 )
3938
3939 (define_expand "extendsidi2"
3940 [(set (match_operand:DI 0 "s_register_operand" "")
3941 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3942 "TARGET_32BIT"
3943 ""
3944 )
3945
3946 (define_insn "*arm_extendsidi2"
3947 [(set (match_operand:DI 0 "s_register_operand" "=r")
3948 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3949 "TARGET_ARM"
3950 "*
3951 if (REGNO (operands[1])
3952 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3953 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3954 return \"mov%?\\t%R0, %Q0, asr #31\";
3955 "
3956 [(set_attr "length" "8")
3957 (set_attr "shift" "1")
3958 (set_attr "predicable" "yes")]
3959 )
3960
3961 (define_expand "zero_extendhisi2"
3962 [(set (match_dup 2)
3963 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3964 (const_int 16)))
3965 (set (match_operand:SI 0 "s_register_operand" "")
3966 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3967 "TARGET_EITHER"
3968 "
3969 {
3970 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3971 {
3972 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3973 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3974 DONE;
3975 }
3976
3977 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3978 {
3979 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3980 DONE;
3981 }
3982
3983 if (!s_register_operand (operands[1], HImode))
3984 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3985
3986 if (arm_arch6)
3987 {
3988 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3989 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3990 DONE;
3991 }
3992
3993 operands[1] = gen_lowpart (SImode, operands[1]);
3994 operands[2] = gen_reg_rtx (SImode);
3995 }"
3996 )
3997
3998 (define_insn "*thumb1_zero_extendhisi2"
3999 [(set (match_operand:SI 0 "register_operand" "=l")
4000 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4001 "TARGET_THUMB1 && !arm_arch6"
4002 "*
4003 rtx mem = XEXP (operands[1], 0);
4004
4005 if (GET_CODE (mem) == CONST)
4006 mem = XEXP (mem, 0);
4007
4008 if (GET_CODE (mem) == LABEL_REF)
4009 return \"ldr\\t%0, %1\";
4010
4011 if (GET_CODE (mem) == PLUS)
4012 {
4013 rtx a = XEXP (mem, 0);
4014 rtx b = XEXP (mem, 1);
4015
4016 /* This can happen due to bugs in reload. */
4017 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4018 {
4019 rtx ops[2];
4020 ops[0] = operands[0];
4021 ops[1] = a;
4022
4023 output_asm_insn (\"mov %0, %1\", ops);
4024
4025 XEXP (mem, 0) = operands[0];
4026 }
4027
4028 else if ( GET_CODE (a) == LABEL_REF
4029 && GET_CODE (b) == CONST_INT)
4030 return \"ldr\\t%0, %1\";
4031 }
4032
4033 return \"ldrh\\t%0, %1\";
4034 "
4035 [(set_attr "length" "4")
4036 (set_attr "type" "load_byte")
4037 (set_attr "pool_range" "60")]
4038 )
4039
4040 (define_insn "*thumb1_zero_extendhisi2_v6"
4041 [(set (match_operand:SI 0 "register_operand" "=l,l")
4042 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4043 "TARGET_THUMB1 && arm_arch6"
4044 "*
4045 rtx mem;
4046
4047 if (which_alternative == 0)
4048 return \"uxth\\t%0, %1\";
4049
4050 mem = XEXP (operands[1], 0);
4051
4052 if (GET_CODE (mem) == CONST)
4053 mem = XEXP (mem, 0);
4054
4055 if (GET_CODE (mem) == LABEL_REF)
4056 return \"ldr\\t%0, %1\";
4057
4058 if (GET_CODE (mem) == PLUS)
4059 {
4060 rtx a = XEXP (mem, 0);
4061 rtx b = XEXP (mem, 1);
4062
4063 /* This can happen due to bugs in reload. */
4064 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4065 {
4066 rtx ops[2];
4067 ops[0] = operands[0];
4068 ops[1] = a;
4069
4070 output_asm_insn (\"mov %0, %1\", ops);
4071
4072 XEXP (mem, 0) = operands[0];
4073 }
4074
4075 else if ( GET_CODE (a) == LABEL_REF
4076 && GET_CODE (b) == CONST_INT)
4077 return \"ldr\\t%0, %1\";
4078 }
4079
4080 return \"ldrh\\t%0, %1\";
4081 "
4082 [(set_attr "length" "2,4")
4083 (set_attr "type" "alu_shift,load_byte")
4084 (set_attr "pool_range" "*,60")]
4085 )
4086
4087 (define_insn "*arm_zero_extendhisi2"
4088 [(set (match_operand:SI 0 "s_register_operand" "=r")
4089 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4090 "TARGET_ARM && arm_arch4 && !arm_arch6"
4091 "ldr%(h%)\\t%0, %1"
4092 [(set_attr "type" "load_byte")
4093 (set_attr "predicable" "yes")
4094 (set_attr "pool_range" "256")
4095 (set_attr "neg_pool_range" "244")]
4096 )
4097
4098 (define_insn "*arm_zero_extendhisi2_v6"
4099 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4100 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4101 "TARGET_ARM && arm_arch6"
4102 "@
4103 uxth%?\\t%0, %1
4104 ldr%(h%)\\t%0, %1"
4105 [(set_attr "type" "alu_shift,load_byte")
4106 (set_attr "predicable" "yes")
4107 (set_attr "pool_range" "*,256")
4108 (set_attr "neg_pool_range" "*,244")]
4109 )
4110
4111 (define_insn "*arm_zero_extendhisi2addsi"
4112 [(set (match_operand:SI 0 "s_register_operand" "=r")
4113 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4114 (match_operand:SI 2 "s_register_operand" "r")))]
4115 "TARGET_INT_SIMD"
4116 "uxtah%?\\t%0, %2, %1"
4117 [(set_attr "type" "alu_shift")
4118 (set_attr "predicable" "yes")]
4119 )
4120
4121 (define_expand "zero_extendqisi2"
4122 [(set (match_operand:SI 0 "s_register_operand" "")
4123 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4124 "TARGET_EITHER"
4125 "
4126 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4127 {
4128 if (TARGET_ARM)
4129 {
4130 emit_insn (gen_andsi3 (operands[0],
4131 gen_lowpart (SImode, operands[1]),
4132 GEN_INT (255)));
4133 }
4134 else /* TARGET_THUMB */
4135 {
4136 rtx temp = gen_reg_rtx (SImode);
4137 rtx ops[3];
4138
4139 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4140 operands[1] = gen_lowpart (SImode, operands[1]);
4141
4142 ops[0] = temp;
4143 ops[1] = operands[1];
4144 ops[2] = GEN_INT (24);
4145
4146 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4147 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4148
4149 ops[0] = operands[0];
4150 ops[1] = temp;
4151 ops[2] = GEN_INT (24);
4152
4153 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4154 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4155 }
4156 DONE;
4157 }
4158 "
4159 )
4160
4161 (define_insn "*thumb1_zero_extendqisi2"
4162 [(set (match_operand:SI 0 "register_operand" "=l")
4163 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4164 "TARGET_THUMB1 && !arm_arch6"
4165 "ldrb\\t%0, %1"
4166 [(set_attr "length" "2")
4167 (set_attr "type" "load_byte")
4168 (set_attr "pool_range" "32")]
4169 )
4170
4171 (define_insn "*thumb1_zero_extendqisi2_v6"
4172 [(set (match_operand:SI 0 "register_operand" "=l,l")
4173 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4174 "TARGET_THUMB1 && arm_arch6"
4175 "@
4176 uxtb\\t%0, %1
4177 ldrb\\t%0, %1"
4178 [(set_attr "length" "2,2")
4179 (set_attr "type" "alu_shift,load_byte")
4180 (set_attr "pool_range" "*,32")]
4181 )
4182
4183 (define_insn "*arm_zero_extendqisi2"
4184 [(set (match_operand:SI 0 "s_register_operand" "=r")
4185 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4186 "TARGET_ARM && !arm_arch6"
4187 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4188 [(set_attr "type" "load_byte")
4189 (set_attr "predicable" "yes")
4190 (set_attr "pool_range" "4096")
4191 (set_attr "neg_pool_range" "4084")]
4192 )
4193
4194 (define_insn "*arm_zero_extendqisi2_v6"
4195 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4196 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4197 "TARGET_ARM && arm_arch6"
4198 "@
4199 uxtb%(%)\\t%0, %1
4200 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4201 [(set_attr "type" "alu_shift,load_byte")
4202 (set_attr "predicable" "yes")
4203 (set_attr "pool_range" "*,4096")
4204 (set_attr "neg_pool_range" "*,4084")]
4205 )
4206
4207 (define_insn "*arm_zero_extendqisi2addsi"
4208 [(set (match_operand:SI 0 "s_register_operand" "=r")
4209 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4210 (match_operand:SI 2 "s_register_operand" "r")))]
4211 "TARGET_INT_SIMD"
4212 "uxtab%?\\t%0, %2, %1"
4213 [(set_attr "predicable" "yes")
4214 (set_attr "insn" "xtab")
4215 (set_attr "type" "alu_shift")]
4216 )
4217
4218 (define_split
4219 [(set (match_operand:SI 0 "s_register_operand" "")
4220 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4221 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4222 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4223 [(set (match_dup 2) (match_dup 1))
4224 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4225 ""
4226 )
4227
4228 (define_split
4229 [(set (match_operand:SI 0 "s_register_operand" "")
4230 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4231 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4232 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4233 [(set (match_dup 2) (match_dup 1))
4234 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4235 ""
4236 )
4237
4238 (define_code_iterator ior_xor [ior xor])
4239
4240 (define_split
4241 [(set (match_operand:SI 0 "s_register_operand" "")
4242 (ior_xor:SI (and:SI (ashift:SI
4243 (match_operand:SI 1 "s_register_operand" "")
4244 (match_operand:SI 2 "const_int_operand" ""))
4245 (match_operand:SI 3 "const_int_operand" ""))
4246 (zero_extend:SI
4247 (match_operator 5 "subreg_lowpart_operator"
4248 [(match_operand:SI 4 "s_register_operand" "")]))))]
4249 "TARGET_32BIT
4250 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4251 == (GET_MODE_MASK (GET_MODE (operands[5]))
4252 & (GET_MODE_MASK (GET_MODE (operands[5]))
4253 << (INTVAL (operands[2])))))"
4254 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4255 (match_dup 4)))
4256 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4257 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4258 )
4259
4260 (define_insn "*compareqi_eq0"
4261 [(set (reg:CC_Z CC_REGNUM)
4262 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4263 (const_int 0)))]
4264 "TARGET_32BIT"
4265 "tst\\t%0, #255"
4266 [(set_attr "conds" "set")]
4267 )
4268
4269 (define_expand "extendhisi2"
4270 [(set (match_dup 2)
4271 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4272 (const_int 16)))
4273 (set (match_operand:SI 0 "s_register_operand" "")
4274 (ashiftrt:SI (match_dup 2)
4275 (const_int 16)))]
4276 "TARGET_EITHER"
4277 "
4278 {
4279 if (GET_CODE (operands[1]) == MEM)
4280 {
4281 if (TARGET_THUMB1)
4282 {
4283 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4284 DONE;
4285 }
4286 else if (arm_arch4)
4287 {
4288 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4289 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4290 DONE;
4291 }
4292 }
4293
4294 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4295 {
4296 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4297 DONE;
4298 }
4299
4300 if (!s_register_operand (operands[1], HImode))
4301 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4302
4303 if (arm_arch6)
4304 {
4305 if (TARGET_THUMB1)
4306 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4307 else
4308 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4309 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4310
4311 DONE;
4312 }
4313
4314 operands[1] = gen_lowpart (SImode, operands[1]);
4315 operands[2] = gen_reg_rtx (SImode);
4316 }"
4317 )
4318
4319 (define_insn "thumb1_extendhisi2"
4320 [(set (match_operand:SI 0 "register_operand" "=l")
4321 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4322 (clobber (match_scratch:SI 2 "=&l"))]
4323 "TARGET_THUMB1 && !arm_arch6"
4324 "*
4325 {
4326 rtx ops[4];
4327 rtx mem = XEXP (operands[1], 0);
4328
4329 /* This code used to try to use 'V', and fix the address only if it was
4330 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4331 range of QImode offsets, and offsettable_address_p does a QImode
4332 address check. */
4333
4334 if (GET_CODE (mem) == CONST)
4335 mem = XEXP (mem, 0);
4336
4337 if (GET_CODE (mem) == LABEL_REF)
4338 return \"ldr\\t%0, %1\";
4339
4340 if (GET_CODE (mem) == PLUS)
4341 {
4342 rtx a = XEXP (mem, 0);
4343 rtx b = XEXP (mem, 1);
4344
4345 if (GET_CODE (a) == LABEL_REF
4346 && GET_CODE (b) == CONST_INT)
4347 return \"ldr\\t%0, %1\";
4348
4349 if (GET_CODE (b) == REG)
4350 return \"ldrsh\\t%0, %1\";
4351
4352 ops[1] = a;
4353 ops[2] = b;
4354 }
4355 else
4356 {
4357 ops[1] = mem;
4358 ops[2] = const0_rtx;
4359 }
4360
4361 gcc_assert (GET_CODE (ops[1]) == REG);
4362
4363 ops[0] = operands[0];
4364 ops[3] = operands[2];
4365 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4366 return \"\";
4367 }"
4368 [(set_attr "length" "4")
4369 (set_attr "type" "load_byte")
4370 (set_attr "pool_range" "1020")]
4371 )
4372
4373 ;; We used to have an early-clobber on the scratch register here.
4374 ;; However, there's a bug somewhere in reload which means that this
4375 ;; can be partially ignored during spill allocation if the memory
4376 ;; address also needs reloading; this causes us to die later on when
4377 ;; we try to verify the operands. Fortunately, we don't really need
4378 ;; the early-clobber: we can always use operand 0 if operand 2
4379 ;; overlaps the address.
4380 (define_insn "*thumb1_extendhisi2_insn_v6"
4381 [(set (match_operand:SI 0 "register_operand" "=l,l")
4382 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4383 (clobber (match_scratch:SI 2 "=X,l"))]
4384 "TARGET_THUMB1 && arm_arch6"
4385 "*
4386 {
4387 rtx ops[4];
4388 rtx mem;
4389
4390 if (which_alternative == 0)
4391 return \"sxth\\t%0, %1\";
4392
4393 mem = XEXP (operands[1], 0);
4394
4395 /* This code used to try to use 'V', and fix the address only if it was
4396 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4397 range of QImode offsets, and offsettable_address_p does a QImode
4398 address check. */
4399
4400 if (GET_CODE (mem) == CONST)
4401 mem = XEXP (mem, 0);
4402
4403 if (GET_CODE (mem) == LABEL_REF)
4404 return \"ldr\\t%0, %1\";
4405
4406 if (GET_CODE (mem) == PLUS)
4407 {
4408 rtx a = XEXP (mem, 0);
4409 rtx b = XEXP (mem, 1);
4410
4411 if (GET_CODE (a) == LABEL_REF
4412 && GET_CODE (b) == CONST_INT)
4413 return \"ldr\\t%0, %1\";
4414
4415 if (GET_CODE (b) == REG)
4416 return \"ldrsh\\t%0, %1\";
4417
4418 ops[1] = a;
4419 ops[2] = b;
4420 }
4421 else
4422 {
4423 ops[1] = mem;
4424 ops[2] = const0_rtx;
4425 }
4426
4427 gcc_assert (GET_CODE (ops[1]) == REG);
4428
4429 ops[0] = operands[0];
4430 if (reg_mentioned_p (operands[2], ops[1]))
4431 ops[3] = ops[0];
4432 else
4433 ops[3] = operands[2];
4434 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4435 return \"\";
4436 }"
4437 [(set_attr "length" "2,4")
4438 (set_attr "type" "alu_shift,load_byte")
4439 (set_attr "pool_range" "*,1020")]
4440 )
4441
4442 ;; This pattern will only be used when ldsh is not available
4443 (define_expand "extendhisi2_mem"
4444 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4445 (set (match_dup 3)
4446 (zero_extend:SI (match_dup 7)))
4447 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4448 (set (match_operand:SI 0 "" "")
4449 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4450 "TARGET_ARM"
4451 "
4452 {
4453 rtx mem1, mem2;
4454 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4455
4456 mem1 = change_address (operands[1], QImode, addr);
4457 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4458 operands[0] = gen_lowpart (SImode, operands[0]);
4459 operands[1] = mem1;
4460 operands[2] = gen_reg_rtx (SImode);
4461 operands[3] = gen_reg_rtx (SImode);
4462 operands[6] = gen_reg_rtx (SImode);
4463 operands[7] = mem2;
4464
4465 if (BYTES_BIG_ENDIAN)
4466 {
4467 operands[4] = operands[2];
4468 operands[5] = operands[3];
4469 }
4470 else
4471 {
4472 operands[4] = operands[3];
4473 operands[5] = operands[2];
4474 }
4475 }"
4476 )
4477
4478 (define_insn "*arm_extendhisi2"
4479 [(set (match_operand:SI 0 "s_register_operand" "=r")
4480 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4481 "TARGET_ARM && arm_arch4 && !arm_arch6"
4482 "ldr%(sh%)\\t%0, %1"
4483 [(set_attr "type" "load_byte")
4484 (set_attr "predicable" "yes")
4485 (set_attr "pool_range" "256")
4486 (set_attr "neg_pool_range" "244")]
4487 )
4488
4489 ;; ??? Check Thumb-2 pool range
4490 (define_insn "*arm_extendhisi2_v6"
4491 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4492 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4493 "TARGET_32BIT && arm_arch6"
4494 "@
4495 sxth%?\\t%0, %1
4496 ldr%(sh%)\\t%0, %1"
4497 [(set_attr "type" "alu_shift,load_byte")
4498 (set_attr "predicable" "yes")
4499 (set_attr "pool_range" "*,256")
4500 (set_attr "neg_pool_range" "*,244")]
4501 )
4502
4503 (define_insn "*arm_extendhisi2addsi"
4504 [(set (match_operand:SI 0 "s_register_operand" "=r")
4505 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4506 (match_operand:SI 2 "s_register_operand" "r")))]
4507 "TARGET_INT_SIMD"
4508 "sxtah%?\\t%0, %2, %1"
4509 )
4510
4511 (define_expand "extendqihi2"
4512 [(set (match_dup 2)
4513 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4514 (const_int 24)))
4515 (set (match_operand:HI 0 "s_register_operand" "")
4516 (ashiftrt:SI (match_dup 2)
4517 (const_int 24)))]
4518 "TARGET_ARM"
4519 "
4520 {
4521 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4522 {
4523 emit_insn (gen_rtx_SET (VOIDmode,
4524 operands[0],
4525 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4526 DONE;
4527 }
4528 if (!s_register_operand (operands[1], QImode))
4529 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4530 operands[0] = gen_lowpart (SImode, operands[0]);
4531 operands[1] = gen_lowpart (SImode, operands[1]);
4532 operands[2] = gen_reg_rtx (SImode);
4533 }"
4534 )
4535
4536 (define_insn "*arm_extendqihi_insn"
4537 [(set (match_operand:HI 0 "s_register_operand" "=r")
4538 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4539 "TARGET_ARM && arm_arch4"
4540 "ldr%(sb%)\\t%0, %1"
4541 [(set_attr "type" "load_byte")
4542 (set_attr "predicable" "yes")
4543 (set_attr "pool_range" "256")
4544 (set_attr "neg_pool_range" "244")]
4545 )
4546
4547 (define_expand "extendqisi2"
4548 [(set (match_dup 2)
4549 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4550 (const_int 24)))
4551 (set (match_operand:SI 0 "s_register_operand" "")
4552 (ashiftrt:SI (match_dup 2)
4553 (const_int 24)))]
4554 "TARGET_EITHER"
4555 "
4556 {
4557 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4558 {
4559 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4560 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4561 DONE;
4562 }
4563
4564 if (!s_register_operand (operands[1], QImode))
4565 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4566
4567 if (arm_arch6)
4568 {
4569 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4570 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4571 DONE;
4572 }
4573
4574 operands[1] = gen_lowpart (SImode, operands[1]);
4575 operands[2] = gen_reg_rtx (SImode);
4576 }"
4577 )
4578
4579 (define_insn "*arm_extendqisi"
4580 [(set (match_operand:SI 0 "s_register_operand" "=r")
4581 (sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4582 "TARGET_ARM && arm_arch4 && !arm_arch6"
4583 "ldr%(sb%)\\t%0, %1"
4584 [(set_attr "type" "load_byte")
4585 (set_attr "predicable" "yes")
4586 (set_attr "pool_range" "256")
4587 (set_attr "neg_pool_range" "244")]
4588 )
4589
4590 (define_insn "*arm_extendqisi_v6"
4591 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4592 (sign_extend:SI
4593 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4594 "TARGET_ARM && arm_arch6"
4595 "@
4596 sxtb%?\\t%0, %1
4597 ldr%(sb%)\\t%0, %1"
4598 [(set_attr "type" "alu_shift,load_byte")
4599 (set_attr "predicable" "yes")
4600 (set_attr "pool_range" "*,256")
4601 (set_attr "neg_pool_range" "*,244")]
4602 )
4603
4604 (define_insn "*arm_extendqisi2addsi"
4605 [(set (match_operand:SI 0 "s_register_operand" "=r")
4606 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4607 (match_operand:SI 2 "s_register_operand" "r")))]
4608 "TARGET_INT_SIMD"
4609 "sxtab%?\\t%0, %2, %1"
4610 [(set_attr "type" "alu_shift")
4611 (set_attr "insn" "xtab")
4612 (set_attr "predicable" "yes")]
4613 )
4614
4615 (define_insn "*thumb1_extendqisi2"
4616 [(set (match_operand:SI 0 "register_operand" "=l,l")
4617 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4618 "TARGET_THUMB1 && !arm_arch6"
4619 "*
4620 {
4621 rtx ops[3];
4622 rtx mem = XEXP (operands[1], 0);
4623
4624 if (GET_CODE (mem) == CONST)
4625 mem = XEXP (mem, 0);
4626
4627 if (GET_CODE (mem) == LABEL_REF)
4628 return \"ldr\\t%0, %1\";
4629
4630 if (GET_CODE (mem) == PLUS
4631 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4632 return \"ldr\\t%0, %1\";
4633
4634 if (which_alternative == 0)
4635 return \"ldrsb\\t%0, %1\";
4636
4637 ops[0] = operands[0];
4638
4639 if (GET_CODE (mem) == PLUS)
4640 {
4641 rtx a = XEXP (mem, 0);
4642 rtx b = XEXP (mem, 1);
4643
4644 ops[1] = a;
4645 ops[2] = b;
4646
4647 if (GET_CODE (a) == REG)
4648 {
4649 if (GET_CODE (b) == REG)
4650 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4651 else if (REGNO (a) == REGNO (ops[0]))
4652 {
4653 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4654 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4655 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4656 }
4657 else
4658 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4659 }
4660 else
4661 {
4662 gcc_assert (GET_CODE (b) == REG);
4663 if (REGNO (b) == REGNO (ops[0]))
4664 {
4665 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4666 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4667 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4668 }
4669 else
4670 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4671 }
4672 }
4673 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4674 {
4675 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4676 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4677 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4678 }
4679 else
4680 {
4681 ops[1] = mem;
4682 ops[2] = const0_rtx;
4683
4684 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4685 }
4686 return \"\";
4687 }"
4688 [(set_attr "length" "2,6")
4689 (set_attr "type" "load_byte,load_byte")
4690 (set_attr "pool_range" "32,32")]
4691 )
4692
4693 (define_insn "*thumb1_extendqisi2_v6"
4694 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4695 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4696 "TARGET_THUMB1 && arm_arch6"
4697 "*
4698 {
4699 rtx ops[3];
4700 rtx mem;
4701
4702 if (which_alternative == 0)
4703 return \"sxtb\\t%0, %1\";
4704
4705 mem = XEXP (operands[1], 0);
4706
4707 if (GET_CODE (mem) == CONST)
4708 mem = XEXP (mem, 0);
4709
4710 if (GET_CODE (mem) == LABEL_REF)
4711 return \"ldr\\t%0, %1\";
4712
4713 if (GET_CODE (mem) == PLUS
4714 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4715 return \"ldr\\t%0, %1\";
4716
4717 if (which_alternative == 0)
4718 return \"ldrsb\\t%0, %1\";
4719
4720 ops[0] = operands[0];
4721
4722 if (GET_CODE (mem) == PLUS)
4723 {
4724 rtx a = XEXP (mem, 0);
4725 rtx b = XEXP (mem, 1);
4726
4727 ops[1] = a;
4728 ops[2] = b;
4729
4730 if (GET_CODE (a) == REG)
4731 {
4732 if (GET_CODE (b) == REG)
4733 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4734 else if (REGNO (a) == REGNO (ops[0]))
4735 {
4736 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4737 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4738 }
4739 else
4740 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4741 }
4742 else
4743 {
4744 gcc_assert (GET_CODE (b) == REG);
4745 if (REGNO (b) == REGNO (ops[0]))
4746 {
4747 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4748 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4749 }
4750 else
4751 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4752 }
4753 }
4754 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4755 {
4756 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4757 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4758 }
4759 else
4760 {
4761 ops[1] = mem;
4762 ops[2] = const0_rtx;
4763
4764 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4765 }
4766 return \"\";
4767 }"
4768 [(set_attr "length" "2,2,4")
4769 (set_attr "type" "alu_shift,load_byte,load_byte")
4770 (set_attr "pool_range" "*,32,32")]
4771 )
4772
4773 (define_expand "extendsfdf2"
4774 [(set (match_operand:DF 0 "s_register_operand" "")
4775 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4776 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4777 ""
4778 )
4779
4780 /* HFmode -> DFmode conversions have to go through SFmode. */
4781 (define_expand "extendhfdf2"
4782 [(set (match_operand:DF 0 "general_operand" "")
4783 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
4784 "TARGET_EITHER"
4785 "
4786 {
4787 rtx op1;
4788 op1 = convert_to_mode (SFmode, operands[1], 0);
4789 op1 = convert_to_mode (DFmode, op1, 0);
4790 emit_insn (gen_movdf (operands[0], op1));
4791 DONE;
4792 }"
4793 )
4794 \f
4795 ;; Move insns (including loads and stores)
4796
4797 ;; XXX Just some ideas about movti.
4798 ;; I don't think these are a good idea on the arm, there just aren't enough
4799 ;; registers
4800 ;;(define_expand "loadti"
4801 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4802 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4803 ;; "" "")
4804
4805 ;;(define_expand "storeti"
4806 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4807 ;; (match_operand:TI 1 "s_register_operand" ""))]
4808 ;; "" "")
4809
4810 ;;(define_expand "movti"
4811 ;; [(set (match_operand:TI 0 "general_operand" "")
4812 ;; (match_operand:TI 1 "general_operand" ""))]
4813 ;; ""
4814 ;; "
4815 ;;{
4816 ;; rtx insn;
4817 ;;
4818 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4819 ;; operands[1] = copy_to_reg (operands[1]);
4820 ;; if (GET_CODE (operands[0]) == MEM)
4821 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4822 ;; else if (GET_CODE (operands[1]) == MEM)
4823 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4824 ;; else
4825 ;; FAIL;
4826 ;;
4827 ;; emit_insn (insn);
4828 ;; DONE;
4829 ;;}")
4830
4831 ;; Recognize garbage generated above.
4832
4833 ;;(define_insn ""
4834 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4835 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4836 ;; ""
4837 ;; "*
4838 ;; {
4839 ;; register mem = (which_alternative < 3);
4840 ;; register const char *template;
4841 ;;
4842 ;; operands[mem] = XEXP (operands[mem], 0);
4843 ;; switch (which_alternative)
4844 ;; {
4845 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4846 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4847 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4848 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4849 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4850 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4851 ;; }
4852 ;; output_asm_insn (template, operands);
4853 ;; return \"\";
4854 ;; }")
4855
4856 (define_expand "movdi"
4857 [(set (match_operand:DI 0 "general_operand" "")
4858 (match_operand:DI 1 "general_operand" ""))]
4859 "TARGET_EITHER"
4860 "
4861 if (can_create_pseudo_p ())
4862 {
4863 if (GET_CODE (operands[0]) != REG)
4864 operands[1] = force_reg (DImode, operands[1]);
4865 }
4866 "
4867 )
4868
4869 (define_insn "*arm_movdi"
4870 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4871 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4872 "TARGET_ARM
4873 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4874 && !TARGET_IWMMXT
4875 && ( register_operand (operands[0], DImode)
4876 || register_operand (operands[1], DImode))"
4877 "*
4878 switch (which_alternative)
4879 {
4880 case 0:
4881 case 1:
4882 case 2:
4883 return \"#\";
4884 default:
4885 return output_move_double (operands);
4886 }
4887 "
4888 [(set_attr "length" "8,12,16,8,8")
4889 (set_attr "type" "*,*,*,load2,store2")
4890 (set_attr "pool_range" "*,*,*,1020,*")
4891 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4892 )
4893
4894 (define_split
4895 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4896 (match_operand:ANY64 1 "const_double_operand" ""))]
4897 "TARGET_32BIT
4898 && reload_completed
4899 && (arm_const_double_inline_cost (operands[1])
4900 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4901 [(const_int 0)]
4902 "
4903 arm_split_constant (SET, SImode, curr_insn,
4904 INTVAL (gen_lowpart (SImode, operands[1])),
4905 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4906 arm_split_constant (SET, SImode, curr_insn,
4907 INTVAL (gen_highpart_mode (SImode,
4908 GET_MODE (operands[0]),
4909 operands[1])),
4910 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4911 DONE;
4912 "
4913 )
4914
4915 ; If optimizing for size, or if we have load delay slots, then
4916 ; we want to split the constant into two separate operations.
4917 ; In both cases this may split a trivial part into a single data op
4918 ; leaving a single complex constant to load. We can also get longer
4919 ; offsets in a LDR which means we get better chances of sharing the pool
4920 ; entries. Finally, we can normally do a better job of scheduling
4921 ; LDR instructions than we can with LDM.
4922 ; This pattern will only match if the one above did not.
4923 (define_split
4924 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4925 (match_operand:ANY64 1 "const_double_operand" ""))]
4926 "TARGET_ARM && reload_completed
4927 && arm_const_double_by_parts (operands[1])"
4928 [(set (match_dup 0) (match_dup 1))
4929 (set (match_dup 2) (match_dup 3))]
4930 "
4931 operands[2] = gen_highpart (SImode, operands[0]);
4932 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4933 operands[1]);
4934 operands[0] = gen_lowpart (SImode, operands[0]);
4935 operands[1] = gen_lowpart (SImode, operands[1]);
4936 "
4937 )
4938
4939 (define_split
4940 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4941 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4942 "TARGET_EITHER && reload_completed"
4943 [(set (match_dup 0) (match_dup 1))
4944 (set (match_dup 2) (match_dup 3))]
4945 "
4946 operands[2] = gen_highpart (SImode, operands[0]);
4947 operands[3] = gen_highpart (SImode, operands[1]);
4948 operands[0] = gen_lowpart (SImode, operands[0]);
4949 operands[1] = gen_lowpart (SImode, operands[1]);
4950
4951 /* Handle a partial overlap. */
4952 if (rtx_equal_p (operands[0], operands[3]))
4953 {
4954 rtx tmp0 = operands[0];
4955 rtx tmp1 = operands[1];
4956
4957 operands[0] = operands[2];
4958 operands[1] = operands[3];
4959 operands[2] = tmp0;
4960 operands[3] = tmp1;
4961 }
4962 "
4963 )
4964
4965 ;; We can't actually do base+index doubleword loads if the index and
4966 ;; destination overlap. Split here so that we at least have chance to
4967 ;; schedule.
4968 (define_split
4969 [(set (match_operand:DI 0 "s_register_operand" "")
4970 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4971 (match_operand:SI 2 "s_register_operand" ""))))]
4972 "TARGET_LDRD
4973 && reg_overlap_mentioned_p (operands[0], operands[1])
4974 && reg_overlap_mentioned_p (operands[0], operands[2])"
4975 [(set (match_dup 4)
4976 (plus:SI (match_dup 1)
4977 (match_dup 2)))
4978 (set (match_dup 0)
4979 (mem:DI (match_dup 4)))]
4980 "
4981 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4982 "
4983 )
4984
4985 ;;; ??? This should have alternatives for constants.
4986 ;;; ??? This was originally identical to the movdf_insn pattern.
4987 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4988 ;;; thumb_reorg with a memory reference.
4989 (define_insn "*thumb1_movdi_insn"
4990 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4991 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4992 "TARGET_THUMB1
4993 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4994 && ( register_operand (operands[0], DImode)
4995 || register_operand (operands[1], DImode))"
4996 "*
4997 {
4998 switch (which_alternative)
4999 {
5000 default:
5001 case 0:
5002 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5003 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5004 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5005 case 1:
5006 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5007 case 2:
5008 operands[1] = GEN_INT (- INTVAL (operands[1]));
5009 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5010 case 3:
5011 return \"ldmia\\t%1, {%0, %H0}\";
5012 case 4:
5013 return \"stmia\\t%0, {%1, %H1}\";
5014 case 5:
5015 return thumb_load_double_from_address (operands);
5016 case 6:
5017 operands[2] = gen_rtx_MEM (SImode,
5018 plus_constant (XEXP (operands[0], 0), 4));
5019 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5020 return \"\";
5021 case 7:
5022 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5023 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5024 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5025 }
5026 }"
5027 [(set_attr "length" "4,4,6,2,2,6,4,4")
5028 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5029 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5030 )
5031
5032 (define_expand "movsi"
5033 [(set (match_operand:SI 0 "general_operand" "")
5034 (match_operand:SI 1 "general_operand" ""))]
5035 "TARGET_EITHER"
5036 "
5037 {
5038 rtx base, offset, tmp;
5039
5040 if (TARGET_32BIT)
5041 {
5042 /* Everything except mem = const or mem = mem can be done easily. */
5043 if (GET_CODE (operands[0]) == MEM)
5044 operands[1] = force_reg (SImode, operands[1]);
5045 if (arm_general_register_operand (operands[0], SImode)
5046 && GET_CODE (operands[1]) == CONST_INT
5047 && !(const_ok_for_arm (INTVAL (operands[1]))
5048 || const_ok_for_arm (~INTVAL (operands[1]))))
5049 {
5050 arm_split_constant (SET, SImode, NULL_RTX,
5051 INTVAL (operands[1]), operands[0], NULL_RTX,
5052 optimize && can_create_pseudo_p ());
5053 DONE;
5054 }
5055
5056 if (TARGET_USE_MOVT && !target_word_relocations
5057 && GET_CODE (operands[1]) == SYMBOL_REF
5058 && !flag_pic && !arm_tls_referenced_p (operands[1]))
5059 {
5060 arm_emit_movpair (operands[0], operands[1]);
5061 DONE;
5062 }
5063 }
5064 else /* TARGET_THUMB1... */
5065 {
5066 if (can_create_pseudo_p ())
5067 {
5068 if (GET_CODE (operands[0]) != REG)
5069 operands[1] = force_reg (SImode, operands[1]);
5070 }
5071 }
5072
5073 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5074 {
5075 split_const (operands[1], &base, &offset);
5076 if (GET_CODE (base) == SYMBOL_REF
5077 && !offset_within_block_p (base, INTVAL (offset)))
5078 {
5079 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5080 emit_move_insn (tmp, base);
5081 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5082 DONE;
5083 }
5084 }
5085
5086 /* Recognize the case where operand[1] is a reference to thread-local
5087 data and load its address to a register. */
5088 if (arm_tls_referenced_p (operands[1]))
5089 {
5090 rtx tmp = operands[1];
5091 rtx addend = NULL;
5092
5093 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5094 {
5095 addend = XEXP (XEXP (tmp, 0), 1);
5096 tmp = XEXP (XEXP (tmp, 0), 0);
5097 }
5098
5099 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5100 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5101
5102 tmp = legitimize_tls_address (tmp,
5103 !can_create_pseudo_p () ? operands[0] : 0);
5104 if (addend)
5105 {
5106 tmp = gen_rtx_PLUS (SImode, tmp, addend);
5107 tmp = force_operand (tmp, operands[0]);
5108 }
5109 operands[1] = tmp;
5110 }
5111 else if (flag_pic
5112 && (CONSTANT_P (operands[1])
5113 || symbol_mentioned_p (operands[1])
5114 || label_mentioned_p (operands[1])))
5115 operands[1] = legitimize_pic_address (operands[1], SImode,
5116 (!can_create_pseudo_p ()
5117 ? operands[0]
5118 : 0));
5119 }
5120 "
5121 )
5122
5123 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5124 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5125 ;; so this does not matter.
5126 (define_insn "*arm_movt"
5127 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5128 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5129 (match_operand:SI 2 "general_operand" "i")))]
5130 "TARGET_32BIT"
5131 "movt%?\t%0, #:upper16:%c2"
5132 [(set_attr "predicable" "yes")
5133 (set_attr "length" "4")]
5134 )
5135
5136 (define_insn "*arm_movsi_insn"
5137 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5138 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5139 "TARGET_ARM && ! TARGET_IWMMXT
5140 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5141 && ( register_operand (operands[0], SImode)
5142 || register_operand (operands[1], SImode))"
5143 "@
5144 mov%?\\t%0, %1
5145 mov%?\\t%0, %1
5146 mvn%?\\t%0, #%B1
5147 movw%?\\t%0, %1
5148 ldr%?\\t%0, %1
5149 str%?\\t%1, %0"
5150 [(set_attr "type" "*,*,*,*,load1,store1")
5151 (set_attr "predicable" "yes")
5152 (set_attr "pool_range" "*,*,*,*,4096,*")
5153 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5154 )
5155
5156 (define_split
5157 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5158 (match_operand:SI 1 "const_int_operand" ""))]
5159 "TARGET_32BIT
5160 && (!(const_ok_for_arm (INTVAL (operands[1]))
5161 || const_ok_for_arm (~INTVAL (operands[1]))))"
5162 [(clobber (const_int 0))]
5163 "
5164 arm_split_constant (SET, SImode, NULL_RTX,
5165 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5166 DONE;
5167 "
5168 )
5169
5170 (define_insn "*thumb1_movsi_insn"
5171 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5172 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
5173 "TARGET_THUMB1
5174 && ( register_operand (operands[0], SImode)
5175 || register_operand (operands[1], SImode))"
5176 "@
5177 mov %0, %1
5178 mov %0, %1
5179 #
5180 #
5181 ldmia\\t%1, {%0}
5182 stmia\\t%0, {%1}
5183 ldr\\t%0, %1
5184 str\\t%1, %0
5185 mov\\t%0, %1"
5186 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5187 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5188 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5189 )
5190
5191 (define_split
5192 [(set (match_operand:SI 0 "register_operand" "")
5193 (match_operand:SI 1 "const_int_operand" ""))]
5194 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5195 [(set (match_dup 0) (match_dup 1))
5196 (set (match_dup 0) (neg:SI (match_dup 0)))]
5197 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5198 )
5199
5200 (define_split
5201 [(set (match_operand:SI 0 "register_operand" "")
5202 (match_operand:SI 1 "const_int_operand" ""))]
5203 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5204 [(set (match_dup 0) (match_dup 1))
5205 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5206 "
5207 {
5208 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5209 unsigned HOST_WIDE_INT mask = 0xff;
5210 int i;
5211
5212 for (i = 0; i < 25; i++)
5213 if ((val & (mask << i)) == val)
5214 break;
5215
5216 /* Shouldn't happen, but we don't want to split if the shift is zero. */
5217 if (i == 0)
5218 FAIL;
5219
5220 operands[1] = GEN_INT (val >> i);
5221 operands[2] = GEN_INT (i);
5222 }"
5223 )
5224
5225 ;; When generating pic, we need to load the symbol offset into a register.
5226 ;; So that the optimizer does not confuse this with a normal symbol load
5227 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5228 ;; since that is the only type of relocation we can use.
5229
5230 ;; The rather odd constraints on the following are to force reload to leave
5231 ;; the insn alone, and to force the minipool generation pass to then move
5232 ;; the GOT symbol to memory.
5233
5234 (define_insn "pic_load_addr_32bit"
5235 [(set (match_operand:SI 0 "s_register_operand" "=r")
5236 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5237 "TARGET_32BIT && flag_pic"
5238 "ldr%?\\t%0, %1"
5239 [(set_attr "type" "load1")
5240 (set_attr "pool_range" "4096")
5241 (set (attr "neg_pool_range")
5242 (if_then_else (eq_attr "is_thumb" "no")
5243 (const_int 4084)
5244 (const_int 0)))]
5245 )
5246
5247 (define_insn "pic_load_addr_thumb1"
5248 [(set (match_operand:SI 0 "s_register_operand" "=l")
5249 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5250 "TARGET_THUMB1 && flag_pic"
5251 "ldr\\t%0, %1"
5252 [(set_attr "type" "load1")
5253 (set (attr "pool_range") (const_int 1024))]
5254 )
5255
5256 (define_insn "pic_add_dot_plus_four"
5257 [(set (match_operand:SI 0 "register_operand" "=r")
5258 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5259 (const_int 4)
5260 (match_operand 2 "" "")]
5261 UNSPEC_PIC_BASE))]
5262 "TARGET_THUMB"
5263 "*
5264 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5265 INTVAL (operands[2]));
5266 return \"add\\t%0, %|pc\";
5267 "
5268 [(set_attr "length" "2")]
5269 )
5270
5271 (define_insn "pic_add_dot_plus_eight"
5272 [(set (match_operand:SI 0 "register_operand" "=r")
5273 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5274 (const_int 8)
5275 (match_operand 2 "" "")]
5276 UNSPEC_PIC_BASE))]
5277 "TARGET_ARM"
5278 "*
5279 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5280 INTVAL (operands[2]));
5281 return \"add%?\\t%0, %|pc, %1\";
5282 "
5283 [(set_attr "predicable" "yes")]
5284 )
5285
5286 (define_insn "tls_load_dot_plus_eight"
5287 [(set (match_operand:SI 0 "register_operand" "=r")
5288 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5289 (const_int 8)
5290 (match_operand 2 "" "")]
5291 UNSPEC_PIC_BASE)))]
5292 "TARGET_ARM"
5293 "*
5294 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5295 INTVAL (operands[2]));
5296 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5297 "
5298 [(set_attr "predicable" "yes")]
5299 )
5300
5301 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5302 ;; followed by a load. These sequences can be crunched down to
5303 ;; tls_load_dot_plus_eight by a peephole.
5304
5305 (define_peephole2
5306 [(set (match_operand:SI 0 "register_operand" "")
5307 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5308 (const_int 8)
5309 (match_operand 1 "" "")]
5310 UNSPEC_PIC_BASE))
5311 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5312 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5313 [(set (match_dup 2)
5314 (mem:SI (unspec:SI [(match_dup 3)
5315 (const_int 8)
5316 (match_dup 1)]
5317 UNSPEC_PIC_BASE)))]
5318 ""
5319 )
5320
5321 (define_insn "pic_offset_arm"
5322 [(set (match_operand:SI 0 "register_operand" "=r")
5323 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5324 (unspec:SI [(match_operand:SI 2 "" "X")]
5325 UNSPEC_PIC_OFFSET))))]
5326 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5327 "ldr%?\\t%0, [%1,%2]"
5328 [(set_attr "type" "load1")]
5329 )
5330
5331 (define_expand "builtin_setjmp_receiver"
5332 [(label_ref (match_operand 0 "" ""))]
5333 "flag_pic"
5334 "
5335 {
5336 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5337 register. */
5338 if (arm_pic_register != INVALID_REGNUM)
5339 arm_load_pic_register (1UL << 3);
5340 DONE;
5341 }")
5342
5343 ;; If copying one reg to another we can set the condition codes according to
5344 ;; its value. Such a move is common after a return from subroutine and the
5345 ;; result is being tested against zero.
5346
5347 (define_insn "*movsi_compare0"
5348 [(set (reg:CC CC_REGNUM)
5349 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5350 (const_int 0)))
5351 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5352 (match_dup 1))]
5353 "TARGET_32BIT"
5354 "@
5355 cmp%?\\t%0, #0
5356 sub%.\\t%0, %1, #0"
5357 [(set_attr "conds" "set")]
5358 )
5359
5360 ;; Subroutine to store a half word from a register into memory.
5361 ;; Operand 0 is the source register (HImode)
5362 ;; Operand 1 is the destination address in a register (SImode)
5363
5364 ;; In both this routine and the next, we must be careful not to spill
5365 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5366 ;; can generate unrecognizable rtl.
5367
5368 (define_expand "storehi"
5369 [;; store the low byte
5370 (set (match_operand 1 "" "") (match_dup 3))
5371 ;; extract the high byte
5372 (set (match_dup 2)
5373 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5374 ;; store the high byte
5375 (set (match_dup 4) (match_dup 5))]
5376 "TARGET_ARM"
5377 "
5378 {
5379 rtx op1 = operands[1];
5380 rtx addr = XEXP (op1, 0);
5381 enum rtx_code code = GET_CODE (addr);
5382
5383 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5384 || code == MINUS)
5385 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5386
5387 operands[4] = adjust_address (op1, QImode, 1);
5388 operands[1] = adjust_address (operands[1], QImode, 0);
5389 operands[3] = gen_lowpart (QImode, operands[0]);
5390 operands[0] = gen_lowpart (SImode, operands[0]);
5391 operands[2] = gen_reg_rtx (SImode);
5392 operands[5] = gen_lowpart (QImode, operands[2]);
5393 }"
5394 )
5395
5396 (define_expand "storehi_bigend"
5397 [(set (match_dup 4) (match_dup 3))
5398 (set (match_dup 2)
5399 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5400 (set (match_operand 1 "" "") (match_dup 5))]
5401 "TARGET_ARM"
5402 "
5403 {
5404 rtx op1 = operands[1];
5405 rtx addr = XEXP (op1, 0);
5406 enum rtx_code code = GET_CODE (addr);
5407
5408 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5409 || code == MINUS)
5410 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5411
5412 operands[4] = adjust_address (op1, QImode, 1);
5413 operands[1] = adjust_address (operands[1], QImode, 0);
5414 operands[3] = gen_lowpart (QImode, operands[0]);
5415 operands[0] = gen_lowpart (SImode, operands[0]);
5416 operands[2] = gen_reg_rtx (SImode);
5417 operands[5] = gen_lowpart (QImode, operands[2]);
5418 }"
5419 )
5420
5421 ;; Subroutine to store a half word integer constant into memory.
5422 (define_expand "storeinthi"
5423 [(set (match_operand 0 "" "")
5424 (match_operand 1 "" ""))
5425 (set (match_dup 3) (match_dup 2))]
5426 "TARGET_ARM"
5427 "
5428 {
5429 HOST_WIDE_INT value = INTVAL (operands[1]);
5430 rtx addr = XEXP (operands[0], 0);
5431 rtx op0 = operands[0];
5432 enum rtx_code code = GET_CODE (addr);
5433
5434 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5435 || code == MINUS)
5436 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5437
5438 operands[1] = gen_reg_rtx (SImode);
5439 if (BYTES_BIG_ENDIAN)
5440 {
5441 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5442 if ((value & 255) == ((value >> 8) & 255))
5443 operands[2] = operands[1];
5444 else
5445 {
5446 operands[2] = gen_reg_rtx (SImode);
5447 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5448 }
5449 }
5450 else
5451 {
5452 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5453 if ((value & 255) == ((value >> 8) & 255))
5454 operands[2] = operands[1];
5455 else
5456 {
5457 operands[2] = gen_reg_rtx (SImode);
5458 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5459 }
5460 }
5461
5462 operands[3] = adjust_address (op0, QImode, 1);
5463 operands[0] = adjust_address (operands[0], QImode, 0);
5464 operands[2] = gen_lowpart (QImode, operands[2]);
5465 operands[1] = gen_lowpart (QImode, operands[1]);
5466 }"
5467 )
5468
5469 (define_expand "storehi_single_op"
5470 [(set (match_operand:HI 0 "memory_operand" "")
5471 (match_operand:HI 1 "general_operand" ""))]
5472 "TARGET_32BIT && arm_arch4"
5473 "
5474 if (!s_register_operand (operands[1], HImode))
5475 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5476 "
5477 )
5478
5479 (define_expand "movhi"
5480 [(set (match_operand:HI 0 "general_operand" "")
5481 (match_operand:HI 1 "general_operand" ""))]
5482 "TARGET_EITHER"
5483 "
5484 if (TARGET_ARM)
5485 {
5486 if (can_create_pseudo_p ())
5487 {
5488 if (GET_CODE (operands[0]) == MEM)
5489 {
5490 if (arm_arch4)
5491 {
5492 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5493 DONE;
5494 }
5495 if (GET_CODE (operands[1]) == CONST_INT)
5496 emit_insn (gen_storeinthi (operands[0], operands[1]));
5497 else
5498 {
5499 if (GET_CODE (operands[1]) == MEM)
5500 operands[1] = force_reg (HImode, operands[1]);
5501 if (BYTES_BIG_ENDIAN)
5502 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5503 else
5504 emit_insn (gen_storehi (operands[1], operands[0]));
5505 }
5506 DONE;
5507 }
5508 /* Sign extend a constant, and keep it in an SImode reg. */
5509 else if (GET_CODE (operands[1]) == CONST_INT)
5510 {
5511 rtx reg = gen_reg_rtx (SImode);
5512 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5513
5514 /* If the constant is already valid, leave it alone. */
5515 if (!const_ok_for_arm (val))
5516 {
5517 /* If setting all the top bits will make the constant
5518 loadable in a single instruction, then set them.
5519 Otherwise, sign extend the number. */
5520
5521 if (const_ok_for_arm (~(val | ~0xffff)))
5522 val |= ~0xffff;
5523 else if (val & 0x8000)
5524 val |= ~0xffff;
5525 }
5526
5527 emit_insn (gen_movsi (reg, GEN_INT (val)));
5528 operands[1] = gen_lowpart (HImode, reg);
5529 }
5530 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5531 && GET_CODE (operands[1]) == MEM)
5532 {
5533 rtx reg = gen_reg_rtx (SImode);
5534
5535 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5536 operands[1] = gen_lowpart (HImode, reg);
5537 }
5538 else if (!arm_arch4)
5539 {
5540 if (GET_CODE (operands[1]) == MEM)
5541 {
5542 rtx base;
5543 rtx offset = const0_rtx;
5544 rtx reg = gen_reg_rtx (SImode);
5545
5546 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5547 || (GET_CODE (base) == PLUS
5548 && (GET_CODE (offset = XEXP (base, 1))
5549 == CONST_INT)
5550 && ((INTVAL(offset) & 1) != 1)
5551 && GET_CODE (base = XEXP (base, 0)) == REG))
5552 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5553 {
5554 rtx new_rtx;
5555
5556 new_rtx = widen_memory_access (operands[1], SImode,
5557 ((INTVAL (offset) & ~3)
5558 - INTVAL (offset)));
5559 emit_insn (gen_movsi (reg, new_rtx));
5560 if (((INTVAL (offset) & 2) != 0)
5561 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5562 {
5563 rtx reg2 = gen_reg_rtx (SImode);
5564
5565 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5566 reg = reg2;
5567 }
5568 }
5569 else
5570 emit_insn (gen_movhi_bytes (reg, operands[1]));
5571
5572 operands[1] = gen_lowpart (HImode, reg);
5573 }
5574 }
5575 }
5576 /* Handle loading a large integer during reload. */
5577 else if (GET_CODE (operands[1]) == CONST_INT
5578 && !const_ok_for_arm (INTVAL (operands[1]))
5579 && !const_ok_for_arm (~INTVAL (operands[1])))
5580 {
5581 /* Writing a constant to memory needs a scratch, which should
5582 be handled with SECONDARY_RELOADs. */
5583 gcc_assert (GET_CODE (operands[0]) == REG);
5584
5585 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5586 emit_insn (gen_movsi (operands[0], operands[1]));
5587 DONE;
5588 }
5589 }
5590 else if (TARGET_THUMB2)
5591 {
5592 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5593 if (can_create_pseudo_p ())
5594 {
5595 if (GET_CODE (operands[0]) != REG)
5596 operands[1] = force_reg (HImode, operands[1]);
5597 /* Zero extend a constant, and keep it in an SImode reg. */
5598 else if (GET_CODE (operands[1]) == CONST_INT)
5599 {
5600 rtx reg = gen_reg_rtx (SImode);
5601 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5602
5603 emit_insn (gen_movsi (reg, GEN_INT (val)));
5604 operands[1] = gen_lowpart (HImode, reg);
5605 }
5606 }
5607 }
5608 else /* TARGET_THUMB1 */
5609 {
5610 if (can_create_pseudo_p ())
5611 {
5612 if (GET_CODE (operands[1]) == CONST_INT)
5613 {
5614 rtx reg = gen_reg_rtx (SImode);
5615
5616 emit_insn (gen_movsi (reg, operands[1]));
5617 operands[1] = gen_lowpart (HImode, reg);
5618 }
5619
5620 /* ??? We shouldn't really get invalid addresses here, but this can
5621 happen if we are passed a SP (never OK for HImode/QImode) or
5622 virtual register (also rejected as illegitimate for HImode/QImode)
5623 relative address. */
5624 /* ??? This should perhaps be fixed elsewhere, for instance, in
5625 fixup_stack_1, by checking for other kinds of invalid addresses,
5626 e.g. a bare reference to a virtual register. This may confuse the
5627 alpha though, which must handle this case differently. */
5628 if (GET_CODE (operands[0]) == MEM
5629 && !memory_address_p (GET_MODE (operands[0]),
5630 XEXP (operands[0], 0)))
5631 operands[0]
5632 = replace_equiv_address (operands[0],
5633 copy_to_reg (XEXP (operands[0], 0)));
5634
5635 if (GET_CODE (operands[1]) == MEM
5636 && !memory_address_p (GET_MODE (operands[1]),
5637 XEXP (operands[1], 0)))
5638 operands[1]
5639 = replace_equiv_address (operands[1],
5640 copy_to_reg (XEXP (operands[1], 0)));
5641
5642 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5643 {
5644 rtx reg = gen_reg_rtx (SImode);
5645
5646 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5647 operands[1] = gen_lowpart (HImode, reg);
5648 }
5649
5650 if (GET_CODE (operands[0]) == MEM)
5651 operands[1] = force_reg (HImode, operands[1]);
5652 }
5653 else if (GET_CODE (operands[1]) == CONST_INT
5654 && !satisfies_constraint_I (operands[1]))
5655 {
5656 /* Handle loading a large integer during reload. */
5657
5658 /* Writing a constant to memory needs a scratch, which should
5659 be handled with SECONDARY_RELOADs. */
5660 gcc_assert (GET_CODE (operands[0]) == REG);
5661
5662 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5663 emit_insn (gen_movsi (operands[0], operands[1]));
5664 DONE;
5665 }
5666 }
5667 "
5668 )
5669
5670 (define_insn "*thumb1_movhi_insn"
5671 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5672 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5673 "TARGET_THUMB1
5674 && ( register_operand (operands[0], HImode)
5675 || register_operand (operands[1], HImode))"
5676 "*
5677 switch (which_alternative)
5678 {
5679 case 0: return \"add %0, %1, #0\";
5680 case 2: return \"strh %1, %0\";
5681 case 3: return \"mov %0, %1\";
5682 case 4: return \"mov %0, %1\";
5683 case 5: return \"mov %0, %1\";
5684 default: gcc_unreachable ();
5685 case 1:
5686 /* The stack pointer can end up being taken as an index register.
5687 Catch this case here and deal with it. */
5688 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5689 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5690 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5691 {
5692 rtx ops[2];
5693 ops[0] = operands[0];
5694 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5695
5696 output_asm_insn (\"mov %0, %1\", ops);
5697
5698 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5699
5700 }
5701 return \"ldrh %0, %1\";
5702 }"
5703 [(set_attr "length" "2,4,2,2,2,2")
5704 (set_attr "type" "*,load1,store1,*,*,*")]
5705 )
5706
5707
5708 (define_expand "movhi_bytes"
5709 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5710 (set (match_dup 3)
5711 (zero_extend:SI (match_dup 6)))
5712 (set (match_operand:SI 0 "" "")
5713 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5714 "TARGET_ARM"
5715 "
5716 {
5717 rtx mem1, mem2;
5718 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5719
5720 mem1 = change_address (operands[1], QImode, addr);
5721 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5722 operands[0] = gen_lowpart (SImode, operands[0]);
5723 operands[1] = mem1;
5724 operands[2] = gen_reg_rtx (SImode);
5725 operands[3] = gen_reg_rtx (SImode);
5726 operands[6] = mem2;
5727
5728 if (BYTES_BIG_ENDIAN)
5729 {
5730 operands[4] = operands[2];
5731 operands[5] = operands[3];
5732 }
5733 else
5734 {
5735 operands[4] = operands[3];
5736 operands[5] = operands[2];
5737 }
5738 }"
5739 )
5740
5741 (define_expand "movhi_bigend"
5742 [(set (match_dup 2)
5743 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5744 (const_int 16)))
5745 (set (match_dup 3)
5746 (ashiftrt:SI (match_dup 2) (const_int 16)))
5747 (set (match_operand:HI 0 "s_register_operand" "")
5748 (match_dup 4))]
5749 "TARGET_ARM"
5750 "
5751 operands[2] = gen_reg_rtx (SImode);
5752 operands[3] = gen_reg_rtx (SImode);
5753 operands[4] = gen_lowpart (HImode, operands[3]);
5754 "
5755 )
5756
5757 ;; Pattern to recognize insn generated default case above
5758 (define_insn "*movhi_insn_arch4"
5759 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5760 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5761 "TARGET_ARM
5762 && arm_arch4
5763 && (GET_CODE (operands[1]) != CONST_INT
5764 || const_ok_for_arm (INTVAL (operands[1]))
5765 || const_ok_for_arm (~INTVAL (operands[1])))"
5766 "@
5767 mov%?\\t%0, %1\\t%@ movhi
5768 mvn%?\\t%0, #%B1\\t%@ movhi
5769 str%(h%)\\t%1, %0\\t%@ movhi
5770 ldr%(h%)\\t%0, %1\\t%@ movhi"
5771 [(set_attr "type" "*,*,store1,load1")
5772 (set_attr "predicable" "yes")
5773 (set_attr "pool_range" "*,*,*,256")
5774 (set_attr "neg_pool_range" "*,*,*,244")]
5775 )
5776
5777 (define_insn "*movhi_bytes"
5778 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5779 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5780 "TARGET_ARM"
5781 "@
5782 mov%?\\t%0, %1\\t%@ movhi
5783 mvn%?\\t%0, #%B1\\t%@ movhi"
5784 [(set_attr "predicable" "yes")]
5785 )
5786
5787 (define_expand "thumb_movhi_clobber"
5788 [(set (match_operand:HI 0 "memory_operand" "")
5789 (match_operand:HI 1 "register_operand" ""))
5790 (clobber (match_operand:DI 2 "register_operand" ""))]
5791 "TARGET_THUMB1"
5792 "
5793 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5794 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5795 {
5796 emit_insn (gen_movhi (operands[0], operands[1]));
5797 DONE;
5798 }
5799 /* XXX Fixme, need to handle other cases here as well. */
5800 gcc_unreachable ();
5801 "
5802 )
5803
5804 ;; We use a DImode scratch because we may occasionally need an additional
5805 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5806 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5807 (define_expand "reload_outhi"
5808 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5809 (match_operand:HI 1 "s_register_operand" "r")
5810 (match_operand:DI 2 "s_register_operand" "=&l")])]
5811 "TARGET_EITHER"
5812 "if (TARGET_ARM)
5813 arm_reload_out_hi (operands);
5814 else
5815 thumb_reload_out_hi (operands);
5816 DONE;
5817 "
5818 )
5819
5820 (define_expand "reload_inhi"
5821 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5822 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5823 (match_operand:DI 2 "s_register_operand" "=&r")])]
5824 "TARGET_EITHER"
5825 "
5826 if (TARGET_ARM)
5827 arm_reload_in_hi (operands);
5828 else
5829 thumb_reload_out_hi (operands);
5830 DONE;
5831 ")
5832
5833 (define_expand "movqi"
5834 [(set (match_operand:QI 0 "general_operand" "")
5835 (match_operand:QI 1 "general_operand" ""))]
5836 "TARGET_EITHER"
5837 "
5838 /* Everything except mem = const or mem = mem can be done easily */
5839
5840 if (can_create_pseudo_p ())
5841 {
5842 if (GET_CODE (operands[1]) == CONST_INT)
5843 {
5844 rtx reg = gen_reg_rtx (SImode);
5845
5846 /* For thumb we want an unsigned immediate, then we are more likely
5847 to be able to use a movs insn. */
5848 if (TARGET_THUMB)
5849 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5850
5851 emit_insn (gen_movsi (reg, operands[1]));
5852 operands[1] = gen_lowpart (QImode, reg);
5853 }
5854
5855 if (TARGET_THUMB)
5856 {
5857 /* ??? We shouldn't really get invalid addresses here, but this can
5858 happen if we are passed a SP (never OK for HImode/QImode) or
5859 virtual register (also rejected as illegitimate for HImode/QImode)
5860 relative address. */
5861 /* ??? This should perhaps be fixed elsewhere, for instance, in
5862 fixup_stack_1, by checking for other kinds of invalid addresses,
5863 e.g. a bare reference to a virtual register. This may confuse the
5864 alpha though, which must handle this case differently. */
5865 if (GET_CODE (operands[0]) == MEM
5866 && !memory_address_p (GET_MODE (operands[0]),
5867 XEXP (operands[0], 0)))
5868 operands[0]
5869 = replace_equiv_address (operands[0],
5870 copy_to_reg (XEXP (operands[0], 0)));
5871 if (GET_CODE (operands[1]) == MEM
5872 && !memory_address_p (GET_MODE (operands[1]),
5873 XEXP (operands[1], 0)))
5874 operands[1]
5875 = replace_equiv_address (operands[1],
5876 copy_to_reg (XEXP (operands[1], 0)));
5877 }
5878
5879 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5880 {
5881 rtx reg = gen_reg_rtx (SImode);
5882
5883 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5884 operands[1] = gen_lowpart (QImode, reg);
5885 }
5886
5887 if (GET_CODE (operands[0]) == MEM)
5888 operands[1] = force_reg (QImode, operands[1]);
5889 }
5890 else if (TARGET_THUMB
5891 && GET_CODE (operands[1]) == CONST_INT
5892 && !satisfies_constraint_I (operands[1]))
5893 {
5894 /* Handle loading a large integer during reload. */
5895
5896 /* Writing a constant to memory needs a scratch, which should
5897 be handled with SECONDARY_RELOADs. */
5898 gcc_assert (GET_CODE (operands[0]) == REG);
5899
5900 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5901 emit_insn (gen_movsi (operands[0], operands[1]));
5902 DONE;
5903 }
5904 "
5905 )
5906
5907
5908 (define_insn "*arm_movqi_insn"
5909 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5910 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5911 "TARGET_32BIT
5912 && ( register_operand (operands[0], QImode)
5913 || register_operand (operands[1], QImode))"
5914 "@
5915 mov%?\\t%0, %1
5916 mvn%?\\t%0, #%B1
5917 ldr%(b%)\\t%0, %1
5918 str%(b%)\\t%1, %0"
5919 [(set_attr "type" "*,*,load1,store1")
5920 (set_attr "predicable" "yes")]
5921 )
5922
5923 (define_insn "*thumb1_movqi_insn"
5924 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5925 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5926 "TARGET_THUMB1
5927 && ( register_operand (operands[0], QImode)
5928 || register_operand (operands[1], QImode))"
5929 "@
5930 add\\t%0, %1, #0
5931 ldrb\\t%0, %1
5932 strb\\t%1, %0
5933 mov\\t%0, %1
5934 mov\\t%0, %1
5935 mov\\t%0, %1"
5936 [(set_attr "length" "2")
5937 (set_attr "type" "*,load1,store1,*,*,*")
5938 (set_attr "pool_range" "*,32,*,*,*,*")]
5939 )
5940
5941 ;; HFmode moves
5942 (define_expand "movhf"
5943 [(set (match_operand:HF 0 "general_operand" "")
5944 (match_operand:HF 1 "general_operand" ""))]
5945 "TARGET_EITHER"
5946 "
5947 if (TARGET_32BIT)
5948 {
5949 if (GET_CODE (operands[0]) == MEM)
5950 operands[1] = force_reg (HFmode, operands[1]);
5951 }
5952 else /* TARGET_THUMB1 */
5953 {
5954 if (can_create_pseudo_p ())
5955 {
5956 if (GET_CODE (operands[0]) != REG)
5957 operands[1] = force_reg (HFmode, operands[1]);
5958 }
5959 }
5960 "
5961 )
5962
5963 (define_insn "*arm32_movhf"
5964 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5965 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5966 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5967 && ( s_register_operand (operands[0], HFmode)
5968 || s_register_operand (operands[1], HFmode))"
5969 "*
5970 switch (which_alternative)
5971 {
5972 case 0: /* ARM register from memory */
5973 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5974 case 1: /* memory from ARM register */
5975 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5976 case 2: /* ARM register from ARM register */
5977 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5978 case 3: /* ARM register from constant */
5979 {
5980 REAL_VALUE_TYPE r;
5981 long bits;
5982 rtx ops[4];
5983
5984 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5985 bits = real_to_target (NULL, &r, HFmode);
5986 ops[0] = operands[0];
5987 ops[1] = GEN_INT (bits);
5988 ops[2] = GEN_INT (bits & 0xff00);
5989 ops[3] = GEN_INT (bits & 0x00ff);
5990
5991 if (arm_arch_thumb2)
5992 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5993 else
5994 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5995 return \"\";
5996 }
5997 default:
5998 gcc_unreachable ();
5999 }
6000 "
6001 [(set_attr "conds" "unconditional")
6002 (set_attr "type" "load1,store1,*,*")
6003 (set_attr "length" "4,4,4,8")
6004 (set_attr "predicable" "yes")
6005 ]
6006 )
6007
6008 (define_insn "*thumb1_movhf"
6009 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
6010 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
6011 "TARGET_THUMB1
6012 && ( s_register_operand (operands[0], HFmode)
6013 || s_register_operand (operands[1], HFmode))"
6014 "*
6015 switch (which_alternative)
6016 {
6017 case 1:
6018 {
6019 rtx addr;
6020 gcc_assert (GET_CODE(operands[1]) == MEM);
6021 addr = XEXP (operands[1], 0);
6022 if (GET_CODE (addr) == LABEL_REF
6023 || (GET_CODE (addr) == CONST
6024 && GET_CODE (XEXP (addr, 0)) == PLUS
6025 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6026 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6027 {
6028 /* Constant pool entry. */
6029 return \"ldr\\t%0, %1\";
6030 }
6031 return \"ldrh\\t%0, %1\";
6032 }
6033 case 2: return \"strh\\t%1, %0\";
6034 default: return \"mov\\t%0, %1\";
6035 }
6036 "
6037 [(set_attr "length" "2")
6038 (set_attr "type" "*,load1,store1,*,*")
6039 (set_attr "pool_range" "*,1020,*,*,*")]
6040 )
6041
6042 (define_expand "movsf"
6043 [(set (match_operand:SF 0 "general_operand" "")
6044 (match_operand:SF 1 "general_operand" ""))]
6045 "TARGET_EITHER"
6046 "
6047 if (TARGET_32BIT)
6048 {
6049 if (GET_CODE (operands[0]) == MEM)
6050 operands[1] = force_reg (SFmode, operands[1]);
6051 }
6052 else /* TARGET_THUMB1 */
6053 {
6054 if (can_create_pseudo_p ())
6055 {
6056 if (GET_CODE (operands[0]) != REG)
6057 operands[1] = force_reg (SFmode, operands[1]);
6058 }
6059 }
6060 "
6061 )
6062
6063 ;; Transform a floating-point move of a constant into a core register into
6064 ;; an SImode operation.
6065 (define_split
6066 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6067 (match_operand:SF 1 "immediate_operand" ""))]
6068 "TARGET_EITHER
6069 && reload_completed
6070 && GET_CODE (operands[1]) == CONST_DOUBLE"
6071 [(set (match_dup 2) (match_dup 3))]
6072 "
6073 operands[2] = gen_lowpart (SImode, operands[0]);
6074 operands[3] = gen_lowpart (SImode, operands[1]);
6075 if (operands[2] == 0 || operands[3] == 0)
6076 FAIL;
6077 "
6078 )
6079
6080 (define_insn "*arm_movsf_soft_insn"
6081 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6082 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6083 "TARGET_ARM
6084 && TARGET_SOFT_FLOAT
6085 && (GET_CODE (operands[0]) != MEM
6086 || register_operand (operands[1], SFmode))"
6087 "@
6088 mov%?\\t%0, %1
6089 ldr%?\\t%0, %1\\t%@ float
6090 str%?\\t%1, %0\\t%@ float"
6091 [(set_attr "length" "4,4,4")
6092 (set_attr "predicable" "yes")
6093 (set_attr "type" "*,load1,store1")
6094 (set_attr "pool_range" "*,4096,*")
6095 (set_attr "neg_pool_range" "*,4084,*")]
6096 )
6097
6098 ;;; ??? This should have alternatives for constants.
6099 (define_insn "*thumb1_movsf_insn"
6100 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6101 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
6102 "TARGET_THUMB1
6103 && ( register_operand (operands[0], SFmode)
6104 || register_operand (operands[1], SFmode))"
6105 "@
6106 add\\t%0, %1, #0
6107 ldmia\\t%1, {%0}
6108 stmia\\t%0, {%1}
6109 ldr\\t%0, %1
6110 str\\t%1, %0
6111 mov\\t%0, %1
6112 mov\\t%0, %1"
6113 [(set_attr "length" "2")
6114 (set_attr "type" "*,load1,store1,load1,store1,*,*")
6115 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6116 )
6117
6118 (define_expand "movdf"
6119 [(set (match_operand:DF 0 "general_operand" "")
6120 (match_operand:DF 1 "general_operand" ""))]
6121 "TARGET_EITHER"
6122 "
6123 if (TARGET_32BIT)
6124 {
6125 if (GET_CODE (operands[0]) == MEM)
6126 operands[1] = force_reg (DFmode, operands[1]);
6127 }
6128 else /* TARGET_THUMB */
6129 {
6130 if (can_create_pseudo_p ())
6131 {
6132 if (GET_CODE (operands[0]) != REG)
6133 operands[1] = force_reg (DFmode, operands[1]);
6134 }
6135 }
6136 "
6137 )
6138
6139 ;; Reloading a df mode value stored in integer regs to memory can require a
6140 ;; scratch reg.
6141 (define_expand "reload_outdf"
6142 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6143 (match_operand:DF 1 "s_register_operand" "r")
6144 (match_operand:SI 2 "s_register_operand" "=&r")]
6145 "TARGET_32BIT"
6146 "
6147 {
6148 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6149
6150 if (code == REG)
6151 operands[2] = XEXP (operands[0], 0);
6152 else if (code == POST_INC || code == PRE_DEC)
6153 {
6154 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6155 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6156 emit_insn (gen_movdi (operands[0], operands[1]));
6157 DONE;
6158 }
6159 else if (code == PRE_INC)
6160 {
6161 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6162
6163 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6164 operands[2] = reg;
6165 }
6166 else if (code == POST_DEC)
6167 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6168 else
6169 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6170 XEXP (XEXP (operands[0], 0), 1)));
6171
6172 emit_insn (gen_rtx_SET (VOIDmode,
6173 replace_equiv_address (operands[0], operands[2]),
6174 operands[1]));
6175
6176 if (code == POST_DEC)
6177 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6178
6179 DONE;
6180 }"
6181 )
6182
6183 (define_insn "*movdf_soft_insn"
6184 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6185 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6186 "TARGET_ARM && TARGET_SOFT_FLOAT
6187 && ( register_operand (operands[0], DFmode)
6188 || register_operand (operands[1], DFmode))"
6189 "*
6190 switch (which_alternative)
6191 {
6192 case 0:
6193 case 1:
6194 case 2:
6195 return \"#\";
6196 default:
6197 return output_move_double (operands);
6198 }
6199 "
6200 [(set_attr "length" "8,12,16,8,8")
6201 (set_attr "type" "*,*,*,load2,store2")
6202 (set_attr "pool_range" "1020")
6203 (set_attr "neg_pool_range" "1008")]
6204 )
6205
6206 ;;; ??? This should have alternatives for constants.
6207 ;;; ??? This was originally identical to the movdi_insn pattern.
6208 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6209 ;;; thumb_reorg with a memory reference.
6210 (define_insn "*thumb_movdf_insn"
6211 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6212 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
6213 "TARGET_THUMB1
6214 && ( register_operand (operands[0], DFmode)
6215 || register_operand (operands[1], DFmode))"
6216 "*
6217 switch (which_alternative)
6218 {
6219 default:
6220 case 0:
6221 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6222 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6223 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6224 case 1:
6225 return \"ldmia\\t%1, {%0, %H0}\";
6226 case 2:
6227 return \"stmia\\t%0, {%1, %H1}\";
6228 case 3:
6229 return thumb_load_double_from_address (operands);
6230 case 4:
6231 operands[2] = gen_rtx_MEM (SImode,
6232 plus_constant (XEXP (operands[0], 0), 4));
6233 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6234 return \"\";
6235 case 5:
6236 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6237 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6238 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6239 }
6240 "
6241 [(set_attr "length" "4,2,2,6,4,4")
6242 (set_attr "type" "*,load2,store2,load2,store2,*")
6243 (set_attr "pool_range" "*,*,*,1020,*,*")]
6244 )
6245
6246 (define_expand "movxf"
6247 [(set (match_operand:XF 0 "general_operand" "")
6248 (match_operand:XF 1 "general_operand" ""))]
6249 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6250 "
6251 if (GET_CODE (operands[0]) == MEM)
6252 operands[1] = force_reg (XFmode, operands[1]);
6253 "
6254 )
6255
6256 \f
6257
6258 ;; load- and store-multiple insns
6259 ;; The arm can load/store any set of registers, provided that they are in
6260 ;; ascending order; but that is beyond GCC so stick with what it knows.
6261
6262 (define_expand "load_multiple"
6263 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6264 (match_operand:SI 1 "" ""))
6265 (use (match_operand:SI 2 "" ""))])]
6266 "TARGET_32BIT"
6267 {
6268 HOST_WIDE_INT offset = 0;
6269
6270 /* Support only fixed point registers. */
6271 if (GET_CODE (operands[2]) != CONST_INT
6272 || INTVAL (operands[2]) > 14
6273 || INTVAL (operands[2]) < 2
6274 || GET_CODE (operands[1]) != MEM
6275 || GET_CODE (operands[0]) != REG
6276 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6277 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6278 FAIL;
6279
6280 operands[3]
6281 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6282 force_reg (SImode, XEXP (operands[1], 0)),
6283 TRUE, FALSE, operands[1], &offset);
6284 })
6285
6286 ;; Load multiple with write-back
6287
6288 (define_insn "*ldmsi_postinc4"
6289 [(match_parallel 0 "load_multiple_operation"
6290 [(set (match_operand:SI 1 "s_register_operand" "=r")
6291 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6292 (const_int 16)))
6293 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6294 (mem:SI (match_dup 2)))
6295 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6296 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6297 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6298 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6299 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6300 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6301 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6302 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6303 [(set_attr "type" "load4")
6304 (set_attr "predicable" "yes")]
6305 )
6306
6307 (define_insn "*ldmsi_postinc4_thumb1"
6308 [(match_parallel 0 "load_multiple_operation"
6309 [(set (match_operand:SI 1 "s_register_operand" "=l")
6310 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6311 (const_int 16)))
6312 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6313 (mem:SI (match_dup 2)))
6314 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6315 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6316 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6317 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6318 (set (match_operand:SI 6 "arm_hard_register_operand" "")
6319 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6320 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6321 "ldmia\\t%1!, {%3, %4, %5, %6}"
6322 [(set_attr "type" "load4")]
6323 )
6324
6325 (define_insn "*ldmsi_postinc3"
6326 [(match_parallel 0 "load_multiple_operation"
6327 [(set (match_operand:SI 1 "s_register_operand" "=r")
6328 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6329 (const_int 12)))
6330 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6331 (mem:SI (match_dup 2)))
6332 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6333 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6334 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6335 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6336 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6337 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6338 [(set_attr "type" "load3")
6339 (set_attr "predicable" "yes")]
6340 )
6341
6342 (define_insn "*ldmsi_postinc2"
6343 [(match_parallel 0 "load_multiple_operation"
6344 [(set (match_operand:SI 1 "s_register_operand" "=r")
6345 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6346 (const_int 8)))
6347 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6348 (mem:SI (match_dup 2)))
6349 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6350 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6351 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6352 "ldm%(ia%)\\t%1!, {%3, %4}"
6353 [(set_attr "type" "load2")
6354 (set_attr "predicable" "yes")]
6355 )
6356
6357 ;; Ordinary load multiple
6358
6359 (define_insn "*ldmsi4"
6360 [(match_parallel 0 "load_multiple_operation"
6361 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6362 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6363 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6364 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6365 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6366 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6367 (set (match_operand:SI 5 "arm_hard_register_operand" "")
6368 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6369 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6370 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6371 [(set_attr "type" "load4")
6372 (set_attr "predicable" "yes")]
6373 )
6374
6375 (define_insn "*ldmsi3"
6376 [(match_parallel 0 "load_multiple_operation"
6377 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6378 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6379 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6380 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6381 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6382 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6383 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6384 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6385 [(set_attr "type" "load3")
6386 (set_attr "predicable" "yes")]
6387 )
6388
6389 (define_insn "*ldmsi2"
6390 [(match_parallel 0 "load_multiple_operation"
6391 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6392 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6393 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6394 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6395 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6396 "ldm%(ia%)\\t%1, {%2, %3}"
6397 [(set_attr "type" "load2")
6398 (set_attr "predicable" "yes")]
6399 )
6400
6401 (define_expand "store_multiple"
6402 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6403 (match_operand:SI 1 "" ""))
6404 (use (match_operand:SI 2 "" ""))])]
6405 "TARGET_32BIT"
6406 {
6407 HOST_WIDE_INT offset = 0;
6408
6409 /* Support only fixed point registers. */
6410 if (GET_CODE (operands[2]) != CONST_INT
6411 || INTVAL (operands[2]) > 14
6412 || INTVAL (operands[2]) < 2
6413 || GET_CODE (operands[1]) != REG
6414 || GET_CODE (operands[0]) != MEM
6415 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6416 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6417 FAIL;
6418
6419 operands[3]
6420 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6421 force_reg (SImode, XEXP (operands[0], 0)),
6422 TRUE, FALSE, operands[0], &offset);
6423 })
6424
6425 ;; Store multiple with write-back
6426
6427 (define_insn "*stmsi_postinc4"
6428 [(match_parallel 0 "store_multiple_operation"
6429 [(set (match_operand:SI 1 "s_register_operand" "=r")
6430 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6431 (const_int 16)))
6432 (set (mem:SI (match_dup 2))
6433 (match_operand:SI 3 "arm_hard_register_operand" ""))
6434 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6435 (match_operand:SI 4 "arm_hard_register_operand" ""))
6436 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6437 (match_operand:SI 5 "arm_hard_register_operand" ""))
6438 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6439 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6440 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6441 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6442 [(set_attr "predicable" "yes")
6443 (set_attr "type" "store4")]
6444 )
6445
6446 (define_insn "*stmsi_postinc4_thumb1"
6447 [(match_parallel 0 "store_multiple_operation"
6448 [(set (match_operand:SI 1 "s_register_operand" "=l")
6449 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6450 (const_int 16)))
6451 (set (mem:SI (match_dup 2))
6452 (match_operand:SI 3 "arm_hard_register_operand" ""))
6453 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6454 (match_operand:SI 4 "arm_hard_register_operand" ""))
6455 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6456 (match_operand:SI 5 "arm_hard_register_operand" ""))
6457 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6458 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6459 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6460 "stmia\\t%1!, {%3, %4, %5, %6}"
6461 [(set_attr "type" "store4")]
6462 )
6463
6464 (define_insn "*stmsi_postinc3"
6465 [(match_parallel 0 "store_multiple_operation"
6466 [(set (match_operand:SI 1 "s_register_operand" "=r")
6467 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6468 (const_int 12)))
6469 (set (mem:SI (match_dup 2))
6470 (match_operand:SI 3 "arm_hard_register_operand" ""))
6471 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6472 (match_operand:SI 4 "arm_hard_register_operand" ""))
6473 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6474 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6475 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6476 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6477 [(set_attr "predicable" "yes")
6478 (set_attr "type" "store3")]
6479 )
6480
6481 (define_insn "*stmsi_postinc2"
6482 [(match_parallel 0 "store_multiple_operation"
6483 [(set (match_operand:SI 1 "s_register_operand" "=r")
6484 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6485 (const_int 8)))
6486 (set (mem:SI (match_dup 2))
6487 (match_operand:SI 3 "arm_hard_register_operand" ""))
6488 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6489 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6490 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6491 "stm%(ia%)\\t%1!, {%3, %4}"
6492 [(set_attr "predicable" "yes")
6493 (set_attr "type" "store2")]
6494 )
6495
6496 ;; Ordinary store multiple
6497
6498 (define_insn "*stmsi4"
6499 [(match_parallel 0 "store_multiple_operation"
6500 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6501 (match_operand:SI 2 "arm_hard_register_operand" ""))
6502 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6503 (match_operand:SI 3 "arm_hard_register_operand" ""))
6504 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6505 (match_operand:SI 4 "arm_hard_register_operand" ""))
6506 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6507 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6508 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6509 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6510 [(set_attr "predicable" "yes")
6511 (set_attr "type" "store4")]
6512 )
6513
6514 (define_insn "*stmsi3"
6515 [(match_parallel 0 "store_multiple_operation"
6516 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6517 (match_operand:SI 2 "arm_hard_register_operand" ""))
6518 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6519 (match_operand:SI 3 "arm_hard_register_operand" ""))
6520 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6521 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6522 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6523 "stm%(ia%)\\t%1, {%2, %3, %4}"
6524 [(set_attr "predicable" "yes")
6525 (set_attr "type" "store3")]
6526 )
6527
6528 (define_insn "*stmsi2"
6529 [(match_parallel 0 "store_multiple_operation"
6530 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6531 (match_operand:SI 2 "arm_hard_register_operand" ""))
6532 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6533 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6534 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6535 "stm%(ia%)\\t%1, {%2, %3}"
6536 [(set_attr "predicable" "yes")
6537 (set_attr "type" "store2")]
6538 )
6539
6540 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6541 ;; We could let this apply for blocks of less than this, but it clobbers so
6542 ;; many registers that there is then probably a better way.
6543
6544 (define_expand "movmemqi"
6545 [(match_operand:BLK 0 "general_operand" "")
6546 (match_operand:BLK 1 "general_operand" "")
6547 (match_operand:SI 2 "const_int_operand" "")
6548 (match_operand:SI 3 "const_int_operand" "")]
6549 "TARGET_EITHER"
6550 "
6551 if (TARGET_32BIT)
6552 {
6553 if (arm_gen_movmemqi (operands))
6554 DONE;
6555 FAIL;
6556 }
6557 else /* TARGET_THUMB1 */
6558 {
6559 if ( INTVAL (operands[3]) != 4
6560 || INTVAL (operands[2]) > 48)
6561 FAIL;
6562
6563 thumb_expand_movmemqi (operands);
6564 DONE;
6565 }
6566 "
6567 )
6568
6569 ;; Thumb block-move insns
6570
6571 (define_insn "movmem12b"
6572 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6573 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6574 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6575 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6576 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6577 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6578 (set (match_operand:SI 0 "register_operand" "=l")
6579 (plus:SI (match_dup 2) (const_int 12)))
6580 (set (match_operand:SI 1 "register_operand" "=l")
6581 (plus:SI (match_dup 3) (const_int 12)))
6582 (clobber (match_scratch:SI 4 "=&l"))
6583 (clobber (match_scratch:SI 5 "=&l"))
6584 (clobber (match_scratch:SI 6 "=&l"))]
6585 "TARGET_THUMB1"
6586 "* return thumb_output_move_mem_multiple (3, operands);"
6587 [(set_attr "length" "4")
6588 ; This isn't entirely accurate... It loads as well, but in terms of
6589 ; scheduling the following insn it is better to consider it as a store
6590 (set_attr "type" "store3")]
6591 )
6592
6593 (define_insn "movmem8b"
6594 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6595 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6596 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6597 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6598 (set (match_operand:SI 0 "register_operand" "=l")
6599 (plus:SI (match_dup 2) (const_int 8)))
6600 (set (match_operand:SI 1 "register_operand" "=l")
6601 (plus:SI (match_dup 3) (const_int 8)))
6602 (clobber (match_scratch:SI 4 "=&l"))
6603 (clobber (match_scratch:SI 5 "=&l"))]
6604 "TARGET_THUMB1"
6605 "* return thumb_output_move_mem_multiple (2, operands);"
6606 [(set_attr "length" "4")
6607 ; This isn't entirely accurate... It loads as well, but in terms of
6608 ; scheduling the following insn it is better to consider it as a store
6609 (set_attr "type" "store2")]
6610 )
6611
6612 \f
6613
6614 ;; Compare & branch insns
6615 ;; The range calculations are based as follows:
6616 ;; For forward branches, the address calculation returns the address of
6617 ;; the next instruction. This is 2 beyond the branch instruction.
6618 ;; For backward branches, the address calculation returns the address of
6619 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6620 ;; instruction for the shortest sequence, and 4 before the branch instruction
6621 ;; if we have to jump around an unconditional branch.
6622 ;; To the basic branch range the PC offset must be added (this is +4).
6623 ;; So for forward branches we have
6624 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6625 ;; And for backward branches we have
6626 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6627 ;;
6628 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6629 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6630
6631 (define_expand "cbranchsi4"
6632 [(set (pc) (if_then_else
6633 (match_operator 0 "arm_comparison_operator"
6634 [(match_operand:SI 1 "s_register_operand" "")
6635 (match_operand:SI 2 "nonmemory_operand" "")])
6636 (label_ref (match_operand 3 "" ""))
6637 (pc)))]
6638 "TARGET_THUMB1 || TARGET_32BIT"
6639 "
6640 if (!TARGET_THUMB1)
6641 {
6642 if (!arm_add_operand (operands[2], SImode))
6643 operands[2] = force_reg (SImode, operands[2]);
6644 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6645 operands[3]));
6646 DONE;
6647 }
6648 if (thumb1_cmpneg_operand (operands[2], SImode))
6649 {
6650 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6651 operands[3], operands[0]));
6652 DONE;
6653 }
6654 if (!thumb1_cmp_operand (operands[2], SImode))
6655 operands[2] = force_reg (SImode, operands[2]);
6656 ")
6657
6658 ;; A pattern to recognize a special situation and optimize for it.
6659 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
6660 ;; due to the available addressing modes. Hence, convert a signed comparison
6661 ;; with zero into an unsigned comparison with 127 if possible.
6662 (define_expand "cbranchqi4"
6663 [(set (pc) (if_then_else
6664 (match_operator 0 "lt_ge_comparison_operator"
6665 [(match_operand:QI 1 "memory_operand" "")
6666 (match_operand:QI 2 "const0_operand" "")])
6667 (label_ref (match_operand 3 "" ""))
6668 (pc)))]
6669 "TARGET_THUMB1"
6670 {
6671 rtx xops[3];
6672 xops[1] = gen_reg_rtx (SImode);
6673 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6674 xops[2] = GEN_INT (127);
6675 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6676 VOIDmode, xops[1], xops[2]);
6677 xops[3] = operands[3];
6678 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
6679 DONE;
6680 })
6681
6682 (define_expand "cbranchsf4"
6683 [(set (pc) (if_then_else
6684 (match_operator 0 "arm_comparison_operator"
6685 [(match_operand:SF 1 "s_register_operand" "")
6686 (match_operand:SF 2 "arm_float_compare_operand" "")])
6687 (label_ref (match_operand 3 "" ""))
6688 (pc)))]
6689 "TARGET_32BIT && TARGET_HARD_FLOAT"
6690 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6691 operands[3])); DONE;"
6692 )
6693
6694 (define_expand "cbranchdf4"
6695 [(set (pc) (if_then_else
6696 (match_operator 0 "arm_comparison_operator"
6697 [(match_operand:DF 1 "s_register_operand" "")
6698 (match_operand:DF 2 "arm_float_compare_operand" "")])
6699 (label_ref (match_operand 3 "" ""))
6700 (pc)))]
6701 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6702 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6703 operands[3])); DONE;"
6704 )
6705
6706 ;; this uses the Cirrus DI compare instruction
6707 (define_expand "cbranchdi4"
6708 [(set (pc) (if_then_else
6709 (match_operator 0 "arm_comparison_operator"
6710 [(match_operand:DI 1 "cirrus_fp_register" "")
6711 (match_operand:DI 2 "cirrus_fp_register" "")])
6712 (label_ref (match_operand 3 "" ""))
6713 (pc)))]
6714 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6715 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6716 operands[3])); DONE;"
6717 )
6718
6719 (define_insn "cbranchsi4_insn"
6720 [(set (pc) (if_then_else
6721 (match_operator 0 "arm_comparison_operator"
6722 [(match_operand:SI 1 "s_register_operand" "l,*h")
6723 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6724 (label_ref (match_operand 3 "" ""))
6725 (pc)))]
6726 "TARGET_THUMB1"
6727 "*
6728 rtx t = prev_nonnote_insn (insn);
6729 if (t != NULL_RTX
6730 && INSN_P (t)
6731 && INSN_CODE (t) == CODE_FOR_cbranchsi4_insn)
6732 {
6733 t = XEXP (SET_SRC (PATTERN (t)), 0);
6734 if (!rtx_equal_p (XEXP (t, 0), operands[1])
6735 || !rtx_equal_p (XEXP (t, 1), operands[2]))
6736 t = NULL_RTX;
6737 }
6738 else
6739 t = NULL_RTX;
6740 if (t == NULL_RTX)
6741 output_asm_insn (\"cmp\\t%1, %2\", operands);
6742
6743 switch (get_attr_length (insn))
6744 {
6745 case 4: return \"b%d0\\t%l3\";
6746 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6747 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6748 }
6749 "
6750 [(set (attr "far_jump")
6751 (if_then_else
6752 (eq_attr "length" "8")
6753 (const_string "yes")
6754 (const_string "no")))
6755 (set (attr "length")
6756 (if_then_else
6757 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6758 (le (minus (match_dup 3) (pc)) (const_int 256)))
6759 (const_int 4)
6760 (if_then_else
6761 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6762 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6763 (const_int 6)
6764 (const_int 8))))]
6765 )
6766
6767 (define_insn "cbranchsi4_scratch"
6768 [(set (pc) (if_then_else
6769 (match_operator 4 "arm_comparison_operator"
6770 [(match_operand:SI 1 "s_register_operand" "l,0")
6771 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6772 (label_ref (match_operand 3 "" ""))
6773 (pc)))
6774 (clobber (match_scratch:SI 0 "=l,l"))]
6775 "TARGET_THUMB1"
6776 "*
6777 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6778
6779 switch (get_attr_length (insn))
6780 {
6781 case 4: return \"b%d4\\t%l3\";
6782 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6783 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6784 }
6785 "
6786 [(set (attr "far_jump")
6787 (if_then_else
6788 (eq_attr "length" "8")
6789 (const_string "yes")
6790 (const_string "no")))
6791 (set (attr "length")
6792 (if_then_else
6793 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6794 (le (minus (match_dup 3) (pc)) (const_int 256)))
6795 (const_int 4)
6796 (if_then_else
6797 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6798 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6799 (const_int 6)
6800 (const_int 8))))]
6801 )
6802
6803 (define_insn "*movsi_cbranchsi4"
6804 [(set (pc)
6805 (if_then_else
6806 (match_operator 3 "arm_comparison_operator"
6807 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6808 (const_int 0)])
6809 (label_ref (match_operand 2 "" ""))
6810 (pc)))
6811 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6812 (match_dup 1))]
6813 "TARGET_THUMB1"
6814 "*{
6815 if (which_alternative == 0)
6816 output_asm_insn (\"cmp\t%0, #0\", operands);
6817 else if (which_alternative == 1)
6818 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6819 else
6820 {
6821 output_asm_insn (\"cmp\t%1, #0\", operands);
6822 if (which_alternative == 2)
6823 output_asm_insn (\"mov\t%0, %1\", operands);
6824 else
6825 output_asm_insn (\"str\t%1, %0\", operands);
6826 }
6827 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6828 {
6829 case 4: return \"b%d3\\t%l2\";
6830 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6831 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6832 }
6833 }"
6834 [(set (attr "far_jump")
6835 (if_then_else
6836 (ior (and (gt (symbol_ref ("which_alternative"))
6837 (const_int 1))
6838 (eq_attr "length" "8"))
6839 (eq_attr "length" "10"))
6840 (const_string "yes")
6841 (const_string "no")))
6842 (set (attr "length")
6843 (if_then_else
6844 (le (symbol_ref ("which_alternative"))
6845 (const_int 1))
6846 (if_then_else
6847 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6848 (le (minus (match_dup 2) (pc)) (const_int 256)))
6849 (const_int 4)
6850 (if_then_else
6851 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6852 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6853 (const_int 6)
6854 (const_int 8)))
6855 (if_then_else
6856 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6857 (le (minus (match_dup 2) (pc)) (const_int 256)))
6858 (const_int 6)
6859 (if_then_else
6860 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6861 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6862 (const_int 8)
6863 (const_int 10)))))]
6864 )
6865
6866 (define_peephole2
6867 [(set (match_operand:SI 0 "low_register_operand" "")
6868 (match_operand:SI 1 "low_register_operand" ""))
6869 (set (pc)
6870 (if_then_else (match_operator 2 "arm_comparison_operator"
6871 [(match_dup 1) (const_int 0)])
6872 (label_ref (match_operand 3 "" ""))
6873 (pc)))]
6874 "TARGET_THUMB1"
6875 [(parallel
6876 [(set (pc)
6877 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6878 (label_ref (match_dup 3))
6879 (pc)))
6880 (set (match_dup 0) (match_dup 1))])]
6881 ""
6882 )
6883
6884 ;; Sigh! This variant shouldn't be needed, but combine often fails to
6885 ;; merge cases like this because the op1 is a hard register in
6886 ;; CLASS_LIKELY_SPILLED_P.
6887 (define_peephole2
6888 [(set (match_operand:SI 0 "low_register_operand" "")
6889 (match_operand:SI 1 "low_register_operand" ""))
6890 (set (pc)
6891 (if_then_else (match_operator 2 "arm_comparison_operator"
6892 [(match_dup 0) (const_int 0)])
6893 (label_ref (match_operand 3 "" ""))
6894 (pc)))]
6895 "TARGET_THUMB1"
6896 [(parallel
6897 [(set (pc)
6898 (if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6899 (label_ref (match_dup 3))
6900 (pc)))
6901 (set (match_dup 0) (match_dup 1))])]
6902 ""
6903 )
6904
6905 (define_insn "*negated_cbranchsi4"
6906 [(set (pc)
6907 (if_then_else
6908 (match_operator 0 "equality_operator"
6909 [(match_operand:SI 1 "s_register_operand" "l")
6910 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6911 (label_ref (match_operand 3 "" ""))
6912 (pc)))]
6913 "TARGET_THUMB1"
6914 "*
6915 output_asm_insn (\"cmn\\t%1, %2\", operands);
6916 switch (get_attr_length (insn))
6917 {
6918 case 4: return \"b%d0\\t%l3\";
6919 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6920 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6921 }
6922 "
6923 [(set (attr "far_jump")
6924 (if_then_else
6925 (eq_attr "length" "8")
6926 (const_string "yes")
6927 (const_string "no")))
6928 (set (attr "length")
6929 (if_then_else
6930 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6931 (le (minus (match_dup 3) (pc)) (const_int 256)))
6932 (const_int 4)
6933 (if_then_else
6934 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6935 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6936 (const_int 6)
6937 (const_int 8))))]
6938 )
6939
6940 (define_insn "*tbit_cbranch"
6941 [(set (pc)
6942 (if_then_else
6943 (match_operator 0 "equality_operator"
6944 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6945 (const_int 1)
6946 (match_operand:SI 2 "const_int_operand" "i"))
6947 (const_int 0)])
6948 (label_ref (match_operand 3 "" ""))
6949 (pc)))
6950 (clobber (match_scratch:SI 4 "=l"))]
6951 "TARGET_THUMB1"
6952 "*
6953 {
6954 rtx op[3];
6955 op[0] = operands[4];
6956 op[1] = operands[1];
6957 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6958
6959 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6960 switch (get_attr_length (insn))
6961 {
6962 case 4: return \"b%d0\\t%l3\";
6963 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6964 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6965 }
6966 }"
6967 [(set (attr "far_jump")
6968 (if_then_else
6969 (eq_attr "length" "8")
6970 (const_string "yes")
6971 (const_string "no")))
6972 (set (attr "length")
6973 (if_then_else
6974 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6975 (le (minus (match_dup 3) (pc)) (const_int 256)))
6976 (const_int 4)
6977 (if_then_else
6978 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6979 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6980 (const_int 6)
6981 (const_int 8))))]
6982 )
6983
6984 (define_insn "*tlobits_cbranch"
6985 [(set (pc)
6986 (if_then_else
6987 (match_operator 0 "equality_operator"
6988 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6989 (match_operand:SI 2 "const_int_operand" "i")
6990 (const_int 0))
6991 (const_int 0)])
6992 (label_ref (match_operand 3 "" ""))
6993 (pc)))
6994 (clobber (match_scratch:SI 4 "=l"))]
6995 "TARGET_THUMB1"
6996 "*
6997 {
6998 rtx op[3];
6999 op[0] = operands[4];
7000 op[1] = operands[1];
7001 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7002
7003 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7004 switch (get_attr_length (insn))
7005 {
7006 case 4: return \"b%d0\\t%l3\";
7007 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7008 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7009 }
7010 }"
7011 [(set (attr "far_jump")
7012 (if_then_else
7013 (eq_attr "length" "8")
7014 (const_string "yes")
7015 (const_string "no")))
7016 (set (attr "length")
7017 (if_then_else
7018 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7019 (le (minus (match_dup 3) (pc)) (const_int 256)))
7020 (const_int 4)
7021 (if_then_else
7022 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7023 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7024 (const_int 6)
7025 (const_int 8))))]
7026 )
7027
7028 (define_insn "*tstsi3_cbranch"
7029 [(set (pc)
7030 (if_then_else
7031 (match_operator 3 "equality_operator"
7032 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7033 (match_operand:SI 1 "s_register_operand" "l"))
7034 (const_int 0)])
7035 (label_ref (match_operand 2 "" ""))
7036 (pc)))]
7037 "TARGET_THUMB1"
7038 "*
7039 {
7040 output_asm_insn (\"tst\\t%0, %1\", operands);
7041 switch (get_attr_length (insn))
7042 {
7043 case 4: return \"b%d3\\t%l2\";
7044 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7045 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7046 }
7047 }"
7048 [(set (attr "far_jump")
7049 (if_then_else
7050 (eq_attr "length" "8")
7051 (const_string "yes")
7052 (const_string "no")))
7053 (set (attr "length")
7054 (if_then_else
7055 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7056 (le (minus (match_dup 2) (pc)) (const_int 256)))
7057 (const_int 4)
7058 (if_then_else
7059 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7060 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7061 (const_int 6)
7062 (const_int 8))))]
7063 )
7064
7065 (define_insn "*andsi3_cbranch"
7066 [(set (pc)
7067 (if_then_else
7068 (match_operator 5 "equality_operator"
7069 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7070 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7071 (const_int 0)])
7072 (label_ref (match_operand 4 "" ""))
7073 (pc)))
7074 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7075 (and:SI (match_dup 2) (match_dup 3)))
7076 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7077 "TARGET_THUMB1"
7078 "*
7079 {
7080 if (which_alternative == 0)
7081 output_asm_insn (\"and\\t%0, %3\", operands);
7082 else if (which_alternative == 1)
7083 {
7084 output_asm_insn (\"and\\t%1, %3\", operands);
7085 output_asm_insn (\"mov\\t%0, %1\", operands);
7086 }
7087 else
7088 {
7089 output_asm_insn (\"and\\t%1, %3\", operands);
7090 output_asm_insn (\"str\\t%1, %0\", operands);
7091 }
7092
7093 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7094 {
7095 case 4: return \"b%d5\\t%l4\";
7096 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7097 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7098 }
7099 }"
7100 [(set (attr "far_jump")
7101 (if_then_else
7102 (ior (and (eq (symbol_ref ("which_alternative"))
7103 (const_int 0))
7104 (eq_attr "length" "8"))
7105 (eq_attr "length" "10"))
7106 (const_string "yes")
7107 (const_string "no")))
7108 (set (attr "length")
7109 (if_then_else
7110 (eq (symbol_ref ("which_alternative"))
7111 (const_int 0))
7112 (if_then_else
7113 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7114 (le (minus (match_dup 4) (pc)) (const_int 256)))
7115 (const_int 4)
7116 (if_then_else
7117 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7118 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7119 (const_int 6)
7120 (const_int 8)))
7121 (if_then_else
7122 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7123 (le (minus (match_dup 4) (pc)) (const_int 256)))
7124 (const_int 6)
7125 (if_then_else
7126 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7127 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7128 (const_int 8)
7129 (const_int 10)))))]
7130 )
7131
7132 (define_insn "*orrsi3_cbranch_scratch"
7133 [(set (pc)
7134 (if_then_else
7135 (match_operator 4 "equality_operator"
7136 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7137 (match_operand:SI 2 "s_register_operand" "l"))
7138 (const_int 0)])
7139 (label_ref (match_operand 3 "" ""))
7140 (pc)))
7141 (clobber (match_scratch:SI 0 "=l"))]
7142 "TARGET_THUMB1"
7143 "*
7144 {
7145 output_asm_insn (\"orr\\t%0, %2\", operands);
7146 switch (get_attr_length (insn))
7147 {
7148 case 4: return \"b%d4\\t%l3\";
7149 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7150 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7151 }
7152 }"
7153 [(set (attr "far_jump")
7154 (if_then_else
7155 (eq_attr "length" "8")
7156 (const_string "yes")
7157 (const_string "no")))
7158 (set (attr "length")
7159 (if_then_else
7160 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7161 (le (minus (match_dup 3) (pc)) (const_int 256)))
7162 (const_int 4)
7163 (if_then_else
7164 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7165 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7166 (const_int 6)
7167 (const_int 8))))]
7168 )
7169
7170 (define_insn "*orrsi3_cbranch"
7171 [(set (pc)
7172 (if_then_else
7173 (match_operator 5 "equality_operator"
7174 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7175 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7176 (const_int 0)])
7177 (label_ref (match_operand 4 "" ""))
7178 (pc)))
7179 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7180 (ior:SI (match_dup 2) (match_dup 3)))
7181 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7182 "TARGET_THUMB1"
7183 "*
7184 {
7185 if (which_alternative == 0)
7186 output_asm_insn (\"orr\\t%0, %3\", operands);
7187 else if (which_alternative == 1)
7188 {
7189 output_asm_insn (\"orr\\t%1, %3\", operands);
7190 output_asm_insn (\"mov\\t%0, %1\", operands);
7191 }
7192 else
7193 {
7194 output_asm_insn (\"orr\\t%1, %3\", operands);
7195 output_asm_insn (\"str\\t%1, %0\", operands);
7196 }
7197
7198 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7199 {
7200 case 4: return \"b%d5\\t%l4\";
7201 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7202 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7203 }
7204 }"
7205 [(set (attr "far_jump")
7206 (if_then_else
7207 (ior (and (eq (symbol_ref ("which_alternative"))
7208 (const_int 0))
7209 (eq_attr "length" "8"))
7210 (eq_attr "length" "10"))
7211 (const_string "yes")
7212 (const_string "no")))
7213 (set (attr "length")
7214 (if_then_else
7215 (eq (symbol_ref ("which_alternative"))
7216 (const_int 0))
7217 (if_then_else
7218 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7219 (le (minus (match_dup 4) (pc)) (const_int 256)))
7220 (const_int 4)
7221 (if_then_else
7222 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7223 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7224 (const_int 6)
7225 (const_int 8)))
7226 (if_then_else
7227 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7228 (le (minus (match_dup 4) (pc)) (const_int 256)))
7229 (const_int 6)
7230 (if_then_else
7231 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7232 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7233 (const_int 8)
7234 (const_int 10)))))]
7235 )
7236
7237 (define_insn "*xorsi3_cbranch_scratch"
7238 [(set (pc)
7239 (if_then_else
7240 (match_operator 4 "equality_operator"
7241 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7242 (match_operand:SI 2 "s_register_operand" "l"))
7243 (const_int 0)])
7244 (label_ref (match_operand 3 "" ""))
7245 (pc)))
7246 (clobber (match_scratch:SI 0 "=l"))]
7247 "TARGET_THUMB1"
7248 "*
7249 {
7250 output_asm_insn (\"eor\\t%0, %2\", operands);
7251 switch (get_attr_length (insn))
7252 {
7253 case 4: return \"b%d4\\t%l3\";
7254 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7255 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7256 }
7257 }"
7258 [(set (attr "far_jump")
7259 (if_then_else
7260 (eq_attr "length" "8")
7261 (const_string "yes")
7262 (const_string "no")))
7263 (set (attr "length")
7264 (if_then_else
7265 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7266 (le (minus (match_dup 3) (pc)) (const_int 256)))
7267 (const_int 4)
7268 (if_then_else
7269 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7270 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7271 (const_int 6)
7272 (const_int 8))))]
7273 )
7274
7275 (define_insn "*xorsi3_cbranch"
7276 [(set (pc)
7277 (if_then_else
7278 (match_operator 5 "equality_operator"
7279 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7280 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7281 (const_int 0)])
7282 (label_ref (match_operand 4 "" ""))
7283 (pc)))
7284 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7285 (xor:SI (match_dup 2) (match_dup 3)))
7286 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7287 "TARGET_THUMB1"
7288 "*
7289 {
7290 if (which_alternative == 0)
7291 output_asm_insn (\"eor\\t%0, %3\", operands);
7292 else if (which_alternative == 1)
7293 {
7294 output_asm_insn (\"eor\\t%1, %3\", operands);
7295 output_asm_insn (\"mov\\t%0, %1\", operands);
7296 }
7297 else
7298 {
7299 output_asm_insn (\"eor\\t%1, %3\", operands);
7300 output_asm_insn (\"str\\t%1, %0\", operands);
7301 }
7302
7303 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7304 {
7305 case 4: return \"b%d5\\t%l4\";
7306 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7307 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7308 }
7309 }"
7310 [(set (attr "far_jump")
7311 (if_then_else
7312 (ior (and (eq (symbol_ref ("which_alternative"))
7313 (const_int 0))
7314 (eq_attr "length" "8"))
7315 (eq_attr "length" "10"))
7316 (const_string "yes")
7317 (const_string "no")))
7318 (set (attr "length")
7319 (if_then_else
7320 (eq (symbol_ref ("which_alternative"))
7321 (const_int 0))
7322 (if_then_else
7323 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7324 (le (minus (match_dup 4) (pc)) (const_int 256)))
7325 (const_int 4)
7326 (if_then_else
7327 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7328 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7329 (const_int 6)
7330 (const_int 8)))
7331 (if_then_else
7332 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7333 (le (minus (match_dup 4) (pc)) (const_int 256)))
7334 (const_int 6)
7335 (if_then_else
7336 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7337 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7338 (const_int 8)
7339 (const_int 10)))))]
7340 )
7341
7342 (define_insn "*bicsi3_cbranch_scratch"
7343 [(set (pc)
7344 (if_then_else
7345 (match_operator 4 "equality_operator"
7346 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7347 (match_operand:SI 1 "s_register_operand" "0"))
7348 (const_int 0)])
7349 (label_ref (match_operand 3 "" ""))
7350 (pc)))
7351 (clobber (match_scratch:SI 0 "=l"))]
7352 "TARGET_THUMB1"
7353 "*
7354 {
7355 output_asm_insn (\"bic\\t%0, %2\", operands);
7356 switch (get_attr_length (insn))
7357 {
7358 case 4: return \"b%d4\\t%l3\";
7359 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7360 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7361 }
7362 }"
7363 [(set (attr "far_jump")
7364 (if_then_else
7365 (eq_attr "length" "8")
7366 (const_string "yes")
7367 (const_string "no")))
7368 (set (attr "length")
7369 (if_then_else
7370 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7371 (le (minus (match_dup 3) (pc)) (const_int 256)))
7372 (const_int 4)
7373 (if_then_else
7374 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7375 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7376 (const_int 6)
7377 (const_int 8))))]
7378 )
7379
7380 (define_insn "*bicsi3_cbranch"
7381 [(set (pc)
7382 (if_then_else
7383 (match_operator 5 "equality_operator"
7384 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7385 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7386 (const_int 0)])
7387 (label_ref (match_operand 4 "" ""))
7388 (pc)))
7389 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7390 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
7391 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7392 "TARGET_THUMB1"
7393 "*
7394 {
7395 if (which_alternative == 0)
7396 output_asm_insn (\"bic\\t%0, %3\", operands);
7397 else if (which_alternative <= 2)
7398 {
7399 output_asm_insn (\"bic\\t%1, %3\", operands);
7400 /* It's ok if OP0 is a lo-reg, even though the mov will set the
7401 conditions again, since we're only testing for equality. */
7402 output_asm_insn (\"mov\\t%0, %1\", operands);
7403 }
7404 else
7405 {
7406 output_asm_insn (\"bic\\t%1, %3\", operands);
7407 output_asm_insn (\"str\\t%1, %0\", operands);
7408 }
7409
7410 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7411 {
7412 case 4: return \"b%d5\\t%l4\";
7413 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7414 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7415 }
7416 }"
7417 [(set (attr "far_jump")
7418 (if_then_else
7419 (ior (and (eq (symbol_ref ("which_alternative"))
7420 (const_int 0))
7421 (eq_attr "length" "8"))
7422 (eq_attr "length" "10"))
7423 (const_string "yes")
7424 (const_string "no")))
7425 (set (attr "length")
7426 (if_then_else
7427 (eq (symbol_ref ("which_alternative"))
7428 (const_int 0))
7429 (if_then_else
7430 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7431 (le (minus (match_dup 4) (pc)) (const_int 256)))
7432 (const_int 4)
7433 (if_then_else
7434 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7435 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7436 (const_int 6)
7437 (const_int 8)))
7438 (if_then_else
7439 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7440 (le (minus (match_dup 4) (pc)) (const_int 256)))
7441 (const_int 6)
7442 (if_then_else
7443 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7444 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7445 (const_int 8)
7446 (const_int 10)))))]
7447 )
7448
7449 (define_insn "*cbranchne_decr1"
7450 [(set (pc)
7451 (if_then_else (match_operator 3 "equality_operator"
7452 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7453 (const_int 0)])
7454 (label_ref (match_operand 4 "" ""))
7455 (pc)))
7456 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7457 (plus:SI (match_dup 2) (const_int -1)))
7458 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7459 "TARGET_THUMB1"
7460 "*
7461 {
7462 rtx cond[2];
7463 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7464 ? GEU : LTU),
7465 VOIDmode, operands[2], const1_rtx);
7466 cond[1] = operands[4];
7467
7468 if (which_alternative == 0)
7469 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7470 else if (which_alternative == 1)
7471 {
7472 /* We must provide an alternative for a hi reg because reload
7473 cannot handle output reloads on a jump instruction, but we
7474 can't subtract into that. Fortunately a mov from lo to hi
7475 does not clobber the condition codes. */
7476 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7477 output_asm_insn (\"mov\\t%0, %1\", operands);
7478 }
7479 else
7480 {
7481 /* Similarly, but the target is memory. */
7482 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7483 output_asm_insn (\"str\\t%1, %0\", operands);
7484 }
7485
7486 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7487 {
7488 case 4:
7489 output_asm_insn (\"b%d0\\t%l1\", cond);
7490 return \"\";
7491 case 6:
7492 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7493 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7494 default:
7495 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7496 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7497 }
7498 }
7499 "
7500 [(set (attr "far_jump")
7501 (if_then_else
7502 (ior (and (eq (symbol_ref ("which_alternative"))
7503 (const_int 0))
7504 (eq_attr "length" "8"))
7505 (eq_attr "length" "10"))
7506 (const_string "yes")
7507 (const_string "no")))
7508 (set_attr_alternative "length"
7509 [
7510 ;; Alternative 0
7511 (if_then_else
7512 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7513 (le (minus (match_dup 4) (pc)) (const_int 256)))
7514 (const_int 4)
7515 (if_then_else
7516 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7517 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7518 (const_int 6)
7519 (const_int 8)))
7520 ;; Alternative 1
7521 (if_then_else
7522 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7523 (le (minus (match_dup 4) (pc)) (const_int 256)))
7524 (const_int 6)
7525 (if_then_else
7526 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7527 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7528 (const_int 8)
7529 (const_int 10)))
7530 ;; Alternative 2
7531 (if_then_else
7532 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7533 (le (minus (match_dup 4) (pc)) (const_int 256)))
7534 (const_int 6)
7535 (if_then_else
7536 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7537 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7538 (const_int 8)
7539 (const_int 10)))
7540 ;; Alternative 3
7541 (if_then_else
7542 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7543 (le (minus (match_dup 4) (pc)) (const_int 256)))
7544 (const_int 6)
7545 (if_then_else
7546 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7547 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7548 (const_int 8)
7549 (const_int 10)))])]
7550 )
7551
7552 (define_insn "*addsi3_cbranch"
7553 [(set (pc)
7554 (if_then_else
7555 (match_operator 4 "arm_comparison_operator"
7556 [(plus:SI
7557 (match_operand:SI 2 "s_register_operand" "%l,0,*l,1,1,1")
7558 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*l,lIJ,lIJ,lIJ"))
7559 (const_int 0)])
7560 (label_ref (match_operand 5 "" ""))
7561 (pc)))
7562 (set
7563 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7564 (plus:SI (match_dup 2) (match_dup 3)))
7565 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7566 "TARGET_THUMB1
7567 && (GET_CODE (operands[4]) == EQ
7568 || GET_CODE (operands[4]) == NE
7569 || GET_CODE (operands[4]) == GE
7570 || GET_CODE (operands[4]) == LT)"
7571 "*
7572 {
7573 rtx cond[3];
7574
7575 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7576 cond[1] = operands[2];
7577 cond[2] = operands[3];
7578
7579 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7580 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7581 else
7582 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7583
7584 if (which_alternative >= 2
7585 && which_alternative < 4)
7586 output_asm_insn (\"mov\\t%0, %1\", operands);
7587 else if (which_alternative >= 4)
7588 output_asm_insn (\"str\\t%1, %0\", operands);
7589
7590 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7591 {
7592 case 4:
7593 return \"b%d4\\t%l5\";
7594 case 6:
7595 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7596 default:
7597 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7598 }
7599 }
7600 "
7601 [(set (attr "far_jump")
7602 (if_then_else
7603 (ior (and (lt (symbol_ref ("which_alternative"))
7604 (const_int 3))
7605 (eq_attr "length" "8"))
7606 (eq_attr "length" "10"))
7607 (const_string "yes")
7608 (const_string "no")))
7609 (set (attr "length")
7610 (if_then_else
7611 (lt (symbol_ref ("which_alternative"))
7612 (const_int 3))
7613 (if_then_else
7614 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7615 (le (minus (match_dup 5) (pc)) (const_int 256)))
7616 (const_int 4)
7617 (if_then_else
7618 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7619 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7620 (const_int 6)
7621 (const_int 8)))
7622 (if_then_else
7623 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7624 (le (minus (match_dup 5) (pc)) (const_int 256)))
7625 (const_int 6)
7626 (if_then_else
7627 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7628 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7629 (const_int 8)
7630 (const_int 10)))))]
7631 )
7632
7633 (define_insn "*addsi3_cbranch_scratch"
7634 [(set (pc)
7635 (if_then_else
7636 (match_operator 3 "arm_comparison_operator"
7637 [(plus:SI
7638 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7639 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7640 (const_int 0)])
7641 (label_ref (match_operand 4 "" ""))
7642 (pc)))
7643 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7644 "TARGET_THUMB1
7645 && (GET_CODE (operands[3]) == EQ
7646 || GET_CODE (operands[3]) == NE
7647 || GET_CODE (operands[3]) == GE
7648 || GET_CODE (operands[3]) == LT)"
7649 "*
7650 {
7651 switch (which_alternative)
7652 {
7653 case 0:
7654 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7655 break;
7656 case 1:
7657 output_asm_insn (\"cmn\t%1, %2\", operands);
7658 break;
7659 case 2:
7660 if (INTVAL (operands[2]) < 0)
7661 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7662 else
7663 output_asm_insn (\"add\t%0, %1, %2\", operands);
7664 break;
7665 case 3:
7666 if (INTVAL (operands[2]) < 0)
7667 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7668 else
7669 output_asm_insn (\"add\t%0, %0, %2\", operands);
7670 break;
7671 }
7672
7673 switch (get_attr_length (insn))
7674 {
7675 case 4:
7676 return \"b%d3\\t%l4\";
7677 case 6:
7678 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7679 default:
7680 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7681 }
7682 }
7683 "
7684 [(set (attr "far_jump")
7685 (if_then_else
7686 (eq_attr "length" "8")
7687 (const_string "yes")
7688 (const_string "no")))
7689 (set (attr "length")
7690 (if_then_else
7691 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7692 (le (minus (match_dup 4) (pc)) (const_int 256)))
7693 (const_int 4)
7694 (if_then_else
7695 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7696 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7697 (const_int 6)
7698 (const_int 8))))]
7699 )
7700
7701 (define_insn "*subsi3_cbranch"
7702 [(set (pc)
7703 (if_then_else
7704 (match_operator 4 "arm_comparison_operator"
7705 [(minus:SI
7706 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7707 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7708 (const_int 0)])
7709 (label_ref (match_operand 5 "" ""))
7710 (pc)))
7711 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7712 (minus:SI (match_dup 2) (match_dup 3)))
7713 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7714 "TARGET_THUMB1
7715 && (GET_CODE (operands[4]) == EQ
7716 || GET_CODE (operands[4]) == NE
7717 || GET_CODE (operands[4]) == GE
7718 || GET_CODE (operands[4]) == LT)"
7719 "*
7720 {
7721 if (which_alternative == 0)
7722 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7723 else if (which_alternative == 1)
7724 {
7725 /* We must provide an alternative for a hi reg because reload
7726 cannot handle output reloads on a jump instruction, but we
7727 can't subtract into that. Fortunately a mov from lo to hi
7728 does not clobber the condition codes. */
7729 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7730 output_asm_insn (\"mov\\t%0, %1\", operands);
7731 }
7732 else
7733 {
7734 /* Similarly, but the target is memory. */
7735 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7736 output_asm_insn (\"str\\t%1, %0\", operands);
7737 }
7738
7739 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7740 {
7741 case 4:
7742 return \"b%d4\\t%l5\";
7743 case 6:
7744 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7745 default:
7746 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7747 }
7748 }
7749 "
7750 [(set (attr "far_jump")
7751 (if_then_else
7752 (ior (and (eq (symbol_ref ("which_alternative"))
7753 (const_int 0))
7754 (eq_attr "length" "8"))
7755 (eq_attr "length" "10"))
7756 (const_string "yes")
7757 (const_string "no")))
7758 (set (attr "length")
7759 (if_then_else
7760 (eq (symbol_ref ("which_alternative"))
7761 (const_int 0))
7762 (if_then_else
7763 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7764 (le (minus (match_dup 5) (pc)) (const_int 256)))
7765 (const_int 4)
7766 (if_then_else
7767 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7768 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7769 (const_int 6)
7770 (const_int 8)))
7771 (if_then_else
7772 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7773 (le (minus (match_dup 5) (pc)) (const_int 256)))
7774 (const_int 6)
7775 (if_then_else
7776 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7777 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7778 (const_int 8)
7779 (const_int 10)))))]
7780 )
7781
7782 (define_insn "*subsi3_cbranch_scratch"
7783 [(set (pc)
7784 (if_then_else
7785 (match_operator 0 "arm_comparison_operator"
7786 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7787 (match_operand:SI 2 "nonmemory_operand" "l"))
7788 (const_int 0)])
7789 (label_ref (match_operand 3 "" ""))
7790 (pc)))]
7791 "TARGET_THUMB1
7792 && (GET_CODE (operands[0]) == EQ
7793 || GET_CODE (operands[0]) == NE
7794 || GET_CODE (operands[0]) == GE
7795 || GET_CODE (operands[0]) == LT)"
7796 "*
7797 output_asm_insn (\"cmp\\t%1, %2\", operands);
7798 switch (get_attr_length (insn))
7799 {
7800 case 4: return \"b%d0\\t%l3\";
7801 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7802 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7803 }
7804 "
7805 [(set (attr "far_jump")
7806 (if_then_else
7807 (eq_attr "length" "8")
7808 (const_string "yes")
7809 (const_string "no")))
7810 (set (attr "length")
7811 (if_then_else
7812 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7813 (le (minus (match_dup 3) (pc)) (const_int 256)))
7814 (const_int 4)
7815 (if_then_else
7816 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7817 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7818 (const_int 6)
7819 (const_int 8))))]
7820 )
7821
7822 ;; Comparison and test insns
7823
7824 (define_insn "*arm_cmpsi_insn"
7825 [(set (reg:CC CC_REGNUM)
7826 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7827 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7828 "TARGET_32BIT"
7829 "@
7830 cmp%?\\t%0, %1
7831 cmn%?\\t%0, #%n1"
7832 [(set_attr "conds" "set")]
7833 )
7834
7835 (define_insn "*arm_cmpsi_shiftsi"
7836 [(set (reg:CC CC_REGNUM)
7837 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7838 (match_operator:SI 3 "shift_operator"
7839 [(match_operand:SI 1 "s_register_operand" "r")
7840 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7841 "TARGET_ARM"
7842 "cmp%?\\t%0, %1%S3"
7843 [(set_attr "conds" "set")
7844 (set_attr "shift" "1")
7845 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7846 (const_string "alu_shift")
7847 (const_string "alu_shift_reg")))]
7848 )
7849
7850 (define_insn "*arm_cmpsi_shiftsi_swp"
7851 [(set (reg:CC_SWP CC_REGNUM)
7852 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7853 [(match_operand:SI 1 "s_register_operand" "r")
7854 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7855 (match_operand:SI 0 "s_register_operand" "r")))]
7856 "TARGET_ARM"
7857 "cmp%?\\t%0, %1%S3"
7858 [(set_attr "conds" "set")
7859 (set_attr "shift" "1")
7860 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7861 (const_string "alu_shift")
7862 (const_string "alu_shift_reg")))]
7863 )
7864
7865 (define_insn "*arm_cmpsi_negshiftsi_si"
7866 [(set (reg:CC_Z CC_REGNUM)
7867 (compare:CC_Z
7868 (neg:SI (match_operator:SI 1 "shift_operator"
7869 [(match_operand:SI 2 "s_register_operand" "r")
7870 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7871 (match_operand:SI 0 "s_register_operand" "r")))]
7872 "TARGET_ARM"
7873 "cmn%?\\t%0, %2%S1"
7874 [(set_attr "conds" "set")
7875 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7876 (const_string "alu_shift")
7877 (const_string "alu_shift_reg")))]
7878 )
7879
7880 ;; Cirrus SF compare instruction
7881 (define_insn "*cirrus_cmpsf"
7882 [(set (reg:CCFP CC_REGNUM)
7883 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7884 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7885 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7886 "cfcmps%?\\tr15, %V0, %V1"
7887 [(set_attr "type" "mav_farith")
7888 (set_attr "cirrus" "compare")]
7889 )
7890
7891 ;; Cirrus DF compare instruction
7892 (define_insn "*cirrus_cmpdf"
7893 [(set (reg:CCFP CC_REGNUM)
7894 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7895 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7896 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7897 "cfcmpd%?\\tr15, %V0, %V1"
7898 [(set_attr "type" "mav_farith")
7899 (set_attr "cirrus" "compare")]
7900 )
7901
7902 (define_insn "*cirrus_cmpdi"
7903 [(set (reg:CC CC_REGNUM)
7904 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7905 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7906 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7907 "cfcmp64%?\\tr15, %V0, %V1"
7908 [(set_attr "type" "mav_farith")
7909 (set_attr "cirrus" "compare")]
7910 )
7911
7912 ; This insn allows redundant compares to be removed by cse, nothing should
7913 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7914 ; is deleted later on. The match_dup will match the mode here, so that
7915 ; mode changes of the condition codes aren't lost by this even though we don't
7916 ; specify what they are.
7917
7918 (define_insn "*deleted_compare"
7919 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7920 "TARGET_32BIT"
7921 "\\t%@ deleted compare"
7922 [(set_attr "conds" "set")
7923 (set_attr "length" "0")]
7924 )
7925
7926 \f
7927 ;; Conditional branch insns
7928
7929 (define_expand "cbranch_cc"
7930 [(set (pc)
7931 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7932 (match_operand 2 "" "")])
7933 (label_ref (match_operand 3 "" ""))
7934 (pc)))]
7935 "TARGET_32BIT"
7936 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7937 operands[1], operands[2]);
7938 operands[2] = const0_rtx;"
7939 )
7940
7941 ;;
7942 ;; Patterns to match conditional branch insns.
7943 ;;
7944
7945 (define_insn "*arm_cond_branch"
7946 [(set (pc)
7947 (if_then_else (match_operator 1 "arm_comparison_operator"
7948 [(match_operand 2 "cc_register" "") (const_int 0)])
7949 (label_ref (match_operand 0 "" ""))
7950 (pc)))]
7951 "TARGET_32BIT"
7952 "*
7953 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7954 {
7955 arm_ccfsm_state += 2;
7956 return \"\";
7957 }
7958 return \"b%d1\\t%l0\";
7959 "
7960 [(set_attr "conds" "use")
7961 (set_attr "type" "branch")]
7962 )
7963
7964 (define_insn "*arm_cond_branch_reversed"
7965 [(set (pc)
7966 (if_then_else (match_operator 1 "arm_comparison_operator"
7967 [(match_operand 2 "cc_register" "") (const_int 0)])
7968 (pc)
7969 (label_ref (match_operand 0 "" ""))))]
7970 "TARGET_32BIT"
7971 "*
7972 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7973 {
7974 arm_ccfsm_state += 2;
7975 return \"\";
7976 }
7977 return \"b%D1\\t%l0\";
7978 "
7979 [(set_attr "conds" "use")
7980 (set_attr "type" "branch")]
7981 )
7982
7983 \f
7984
7985 ; scc insns
7986
7987 (define_expand "cstore_cc"
7988 [(set (match_operand:SI 0 "s_register_operand" "")
7989 (match_operator:SI 1 "" [(match_operand 2 "" "")
7990 (match_operand 3 "" "")]))]
7991 "TARGET_32BIT"
7992 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7993 operands[2], operands[3]);
7994 operands[3] = const0_rtx;"
7995 )
7996
7997 (define_insn "*mov_scc"
7998 [(set (match_operand:SI 0 "s_register_operand" "=r")
7999 (match_operator:SI 1 "arm_comparison_operator"
8000 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8001 "TARGET_ARM"
8002 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8003 [(set_attr "conds" "use")
8004 (set_attr "length" "8")]
8005 )
8006
8007 (define_insn "*mov_negscc"
8008 [(set (match_operand:SI 0 "s_register_operand" "=r")
8009 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8010 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8011 "TARGET_ARM"
8012 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8013 [(set_attr "conds" "use")
8014 (set_attr "length" "8")]
8015 )
8016
8017 (define_insn "*mov_notscc"
8018 [(set (match_operand:SI 0 "s_register_operand" "=r")
8019 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8020 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8021 "TARGET_ARM"
8022 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8023 [(set_attr "conds" "use")
8024 (set_attr "length" "8")]
8025 )
8026
8027 (define_expand "cstoresi4"
8028 [(set (match_operand:SI 0 "s_register_operand" "")
8029 (match_operator:SI 1 "arm_comparison_operator"
8030 [(match_operand:SI 2 "s_register_operand" "")
8031 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8032 "TARGET_32BIT || TARGET_THUMB1"
8033 "{
8034 rtx op3, scratch, scratch2;
8035
8036 if (!TARGET_THUMB1)
8037 {
8038 if (!arm_add_operand (operands[3], SImode))
8039 operands[3] = force_reg (SImode, operands[3]);
8040 emit_insn (gen_cstore_cc (operands[0], operands[1],
8041 operands[2], operands[3]));
8042 DONE;
8043 }
8044
8045 if (operands[3] == const0_rtx)
8046 {
8047 switch (GET_CODE (operands[1]))
8048 {
8049 case EQ:
8050 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8051 break;
8052
8053 case NE:
8054 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8055 break;
8056
8057 case LE:
8058 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8059 NULL_RTX, 0, OPTAB_WIDEN);
8060 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8061 NULL_RTX, 0, OPTAB_WIDEN);
8062 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8063 operands[0], 1, OPTAB_WIDEN);
8064 break;
8065
8066 case GE:
8067 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8068 NULL_RTX, 1);
8069 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8070 NULL_RTX, 1, OPTAB_WIDEN);
8071 break;
8072
8073 case GT:
8074 scratch = expand_binop (SImode, ashr_optab, operands[2],
8075 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8076 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8077 NULL_RTX, 0, OPTAB_WIDEN);
8078 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8079 0, OPTAB_WIDEN);
8080 break;
8081
8082 /* LT is handled by generic code. No need for unsigned with 0. */
8083 default:
8084 FAIL;
8085 }
8086 DONE;
8087 }
8088
8089 switch (GET_CODE (operands[1]))
8090 {
8091 case EQ:
8092 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8093 NULL_RTX, 0, OPTAB_WIDEN);
8094 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8095 break;
8096
8097 case NE:
8098 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8099 NULL_RTX, 0, OPTAB_WIDEN);
8100 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8101 break;
8102
8103 case LE:
8104 op3 = force_reg (SImode, operands[3]);
8105
8106 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8107 NULL_RTX, 1, OPTAB_WIDEN);
8108 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8109 NULL_RTX, 0, OPTAB_WIDEN);
8110 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8111 op3, operands[2]));
8112 break;
8113
8114 case GE:
8115 op3 = operands[3];
8116 if (!thumb1_cmp_operand (op3, SImode))
8117 op3 = force_reg (SImode, op3);
8118 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8119 NULL_RTX, 0, OPTAB_WIDEN);
8120 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8121 NULL_RTX, 1, OPTAB_WIDEN);
8122 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8123 operands[2], op3));
8124 break;
8125
8126 case LEU:
8127 op3 = force_reg (SImode, operands[3]);
8128 scratch = force_reg (SImode, const0_rtx);
8129 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8130 op3, operands[2]));
8131 break;
8132
8133 case GEU:
8134 op3 = operands[3];
8135 if (!thumb1_cmp_operand (op3, SImode))
8136 op3 = force_reg (SImode, op3);
8137 scratch = force_reg (SImode, const0_rtx);
8138 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8139 operands[2], op3));
8140 break;
8141
8142 case LTU:
8143 op3 = operands[3];
8144 if (!thumb1_cmp_operand (op3, SImode))
8145 op3 = force_reg (SImode, op3);
8146 scratch = gen_reg_rtx (SImode);
8147 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8148 break;
8149
8150 case GTU:
8151 op3 = force_reg (SImode, operands[3]);
8152 scratch = gen_reg_rtx (SImode);
8153 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8154 break;
8155
8156 /* No good sequences for GT, LT. */
8157 default:
8158 FAIL;
8159 }
8160 DONE;
8161 }")
8162
8163 (define_expand "cstoresf4"
8164 [(set (match_operand:SI 0 "s_register_operand" "")
8165 (match_operator:SI 1 "arm_comparison_operator"
8166 [(match_operand:SF 2 "s_register_operand" "")
8167 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8168 "TARGET_32BIT && TARGET_HARD_FLOAT"
8169 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8170 operands[2], operands[3])); DONE;"
8171 )
8172
8173 (define_expand "cstoredf4"
8174 [(set (match_operand:SI 0 "s_register_operand" "")
8175 (match_operator:SI 1 "arm_comparison_operator"
8176 [(match_operand:DF 2 "s_register_operand" "")
8177 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8178 "TARGET_32BIT && TARGET_HARD_FLOAT"
8179 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8180 operands[2], operands[3])); DONE;"
8181 )
8182
8183 ;; this uses the Cirrus DI compare instruction
8184 (define_expand "cstoredi4"
8185 [(set (match_operand:SI 0 "s_register_operand" "")
8186 (match_operator:SI 1 "arm_comparison_operator"
8187 [(match_operand:DI 2 "cirrus_fp_register" "")
8188 (match_operand:DI 3 "cirrus_fp_register" "")]))]
8189 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8190 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8191 operands[2], operands[3])); DONE;"
8192 )
8193
8194
8195 (define_expand "cstoresi_eq0_thumb1"
8196 [(parallel
8197 [(set (match_operand:SI 0 "s_register_operand" "")
8198 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8199 (const_int 0)))
8200 (clobber (match_dup:SI 2))])]
8201 "TARGET_THUMB1"
8202 "operands[2] = gen_reg_rtx (SImode);"
8203 )
8204
8205 (define_expand "cstoresi_ne0_thumb1"
8206 [(parallel
8207 [(set (match_operand:SI 0 "s_register_operand" "")
8208 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8209 (const_int 0)))
8210 (clobber (match_dup:SI 2))])]
8211 "TARGET_THUMB1"
8212 "operands[2] = gen_reg_rtx (SImode);"
8213 )
8214
8215 (define_insn "*cstoresi_eq0_thumb1_insn"
8216 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8217 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8218 (const_int 0)))
8219 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8220 "TARGET_THUMB1"
8221 "@
8222 neg\\t%0, %1\;adc\\t%0, %0, %1
8223 neg\\t%2, %1\;adc\\t%0, %1, %2"
8224 [(set_attr "length" "4")]
8225 )
8226
8227 (define_insn "*cstoresi_ne0_thumb1_insn"
8228 [(set (match_operand:SI 0 "s_register_operand" "=l")
8229 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8230 (const_int 0)))
8231 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8232 "TARGET_THUMB1"
8233 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8234 [(set_attr "length" "4")]
8235 )
8236
8237 ;; Used as part of the expansion of thumb ltu and gtu sequences
8238 (define_insn "cstoresi_nltu_thumb1"
8239 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8240 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8241 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8242 "TARGET_THUMB1"
8243 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8244 [(set_attr "length" "4")]
8245 )
8246
8247 (define_insn_and_split "cstoresi_ltu_thumb1"
8248 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8249 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8250 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8251 "TARGET_THUMB1"
8252 "#"
8253 "TARGET_THUMB1"
8254 [(set (match_dup 3)
8255 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8256 (set (match_dup 0) (neg:SI (match_dup 3)))]
8257 "operands[3] = gen_reg_rtx (SImode);"
8258 [(set_attr "length" "4")]
8259 )
8260
8261 ;; Used as part of the expansion of thumb les sequence.
8262 (define_insn "thumb1_addsi3_addgeu"
8263 [(set (match_operand:SI 0 "s_register_operand" "=l")
8264 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8265 (match_operand:SI 2 "s_register_operand" "l"))
8266 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8267 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8268 "TARGET_THUMB1"
8269 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8270 [(set_attr "length" "4")]
8271 )
8272
8273 \f
8274 ;; Conditional move insns
8275
8276 (define_expand "movsicc"
8277 [(set (match_operand:SI 0 "s_register_operand" "")
8278 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8279 (match_operand:SI 2 "arm_not_operand" "")
8280 (match_operand:SI 3 "arm_not_operand" "")))]
8281 "TARGET_32BIT"
8282 "
8283 {
8284 enum rtx_code code = GET_CODE (operands[1]);
8285 rtx ccreg;
8286
8287 if (code == UNEQ || code == LTGT)
8288 FAIL;
8289
8290 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8291 XEXP (operands[1], 1));
8292 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8293 }"
8294 )
8295
8296 (define_expand "movsfcc"
8297 [(set (match_operand:SF 0 "s_register_operand" "")
8298 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8299 (match_operand:SF 2 "s_register_operand" "")
8300 (match_operand:SF 3 "nonmemory_operand" "")))]
8301 "TARGET_32BIT && TARGET_HARD_FLOAT"
8302 "
8303 {
8304 enum rtx_code code = GET_CODE (operands[1]);
8305 rtx ccreg;
8306
8307 if (code == UNEQ || code == LTGT)
8308 FAIL;
8309
8310 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8311 Otherwise, ensure it is a valid FP add operand */
8312 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8313 || (!arm_float_add_operand (operands[3], SFmode)))
8314 operands[3] = force_reg (SFmode, operands[3]);
8315
8316 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8317 XEXP (operands[1], 1));
8318 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8319 }"
8320 )
8321
8322 (define_expand "movdfcc"
8323 [(set (match_operand:DF 0 "s_register_operand" "")
8324 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8325 (match_operand:DF 2 "s_register_operand" "")
8326 (match_operand:DF 3 "arm_float_add_operand" "")))]
8327 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8328 "
8329 {
8330 enum rtx_code code = GET_CODE (operands[1]);
8331 rtx ccreg;
8332
8333 if (code == UNEQ || code == LTGT)
8334 FAIL;
8335
8336 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8337 XEXP (operands[1], 1));
8338 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8339 }"
8340 )
8341
8342 (define_insn "*movsicc_insn"
8343 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8344 (if_then_else:SI
8345 (match_operator 3 "arm_comparison_operator"
8346 [(match_operand 4 "cc_register" "") (const_int 0)])
8347 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8348 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8349 "TARGET_ARM"
8350 "@
8351 mov%D3\\t%0, %2
8352 mvn%D3\\t%0, #%B2
8353 mov%d3\\t%0, %1
8354 mvn%d3\\t%0, #%B1
8355 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8356 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8357 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8358 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8359 [(set_attr "length" "4,4,4,4,8,8,8,8")
8360 (set_attr "conds" "use")]
8361 )
8362
8363 (define_insn "*movsfcc_soft_insn"
8364 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8365 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8366 [(match_operand 4 "cc_register" "") (const_int 0)])
8367 (match_operand:SF 1 "s_register_operand" "0,r")
8368 (match_operand:SF 2 "s_register_operand" "r,0")))]
8369 "TARGET_ARM && TARGET_SOFT_FLOAT"
8370 "@
8371 mov%D3\\t%0, %2
8372 mov%d3\\t%0, %1"
8373 [(set_attr "conds" "use")]
8374 )
8375
8376 \f
8377 ;; Jump and linkage insns
8378
8379 (define_expand "jump"
8380 [(set (pc)
8381 (label_ref (match_operand 0 "" "")))]
8382 "TARGET_EITHER"
8383 ""
8384 )
8385
8386 (define_insn "*arm_jump"
8387 [(set (pc)
8388 (label_ref (match_operand 0 "" "")))]
8389 "TARGET_32BIT"
8390 "*
8391 {
8392 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8393 {
8394 arm_ccfsm_state += 2;
8395 return \"\";
8396 }
8397 return \"b%?\\t%l0\";
8398 }
8399 "
8400 [(set_attr "predicable" "yes")]
8401 )
8402
8403 (define_insn "*thumb_jump"
8404 [(set (pc)
8405 (label_ref (match_operand 0 "" "")))]
8406 "TARGET_THUMB1"
8407 "*
8408 if (get_attr_length (insn) == 2)
8409 return \"b\\t%l0\";
8410 return \"bl\\t%l0\\t%@ far jump\";
8411 "
8412 [(set (attr "far_jump")
8413 (if_then_else
8414 (eq_attr "length" "4")
8415 (const_string "yes")
8416 (const_string "no")))
8417 (set (attr "length")
8418 (if_then_else
8419 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8420 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8421 (const_int 2)
8422 (const_int 4)))]
8423 )
8424
8425 (define_expand "call"
8426 [(parallel [(call (match_operand 0 "memory_operand" "")
8427 (match_operand 1 "general_operand" ""))
8428 (use (match_operand 2 "" ""))
8429 (clobber (reg:SI LR_REGNUM))])]
8430 "TARGET_EITHER"
8431 "
8432 {
8433 rtx callee, pat;
8434
8435 /* In an untyped call, we can get NULL for operand 2. */
8436 if (operands[2] == NULL_RTX)
8437 operands[2] = const0_rtx;
8438
8439 /* Decide if we should generate indirect calls by loading the
8440 32-bit address of the callee into a register before performing the
8441 branch and link. */
8442 callee = XEXP (operands[0], 0);
8443 if (GET_CODE (callee) == SYMBOL_REF
8444 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8445 : !REG_P (callee))
8446 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8447
8448 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8449 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8450 DONE;
8451 }"
8452 )
8453
8454 (define_expand "call_internal"
8455 [(parallel [(call (match_operand 0 "memory_operand" "")
8456 (match_operand 1 "general_operand" ""))
8457 (use (match_operand 2 "" ""))
8458 (clobber (reg:SI LR_REGNUM))])])
8459
8460 (define_insn "*call_reg_armv5"
8461 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8462 (match_operand 1 "" ""))
8463 (use (match_operand 2 "" ""))
8464 (clobber (reg:SI LR_REGNUM))]
8465 "TARGET_ARM && arm_arch5"
8466 "blx%?\\t%0"
8467 [(set_attr "type" "call")]
8468 )
8469
8470 (define_insn "*call_reg_arm"
8471 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8472 (match_operand 1 "" ""))
8473 (use (match_operand 2 "" ""))
8474 (clobber (reg:SI LR_REGNUM))]
8475 "TARGET_ARM && !arm_arch5"
8476 "*
8477 return output_call (operands);
8478 "
8479 ;; length is worst case, normally it is only two
8480 [(set_attr "length" "12")
8481 (set_attr "type" "call")]
8482 )
8483
8484
8485 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8486 ;; considered a function call by the branch predictor of some cores (PR40887).
8487 ;; Falls back to blx rN (*call_reg_armv5).
8488
8489 (define_insn "*call_mem"
8490 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8491 (match_operand 1 "" ""))
8492 (use (match_operand 2 "" ""))
8493 (clobber (reg:SI LR_REGNUM))]
8494 "TARGET_ARM && !arm_arch5"
8495 "*
8496 return output_call_mem (operands);
8497 "
8498 [(set_attr "length" "12")
8499 (set_attr "type" "call")]
8500 )
8501
8502 (define_insn "*call_reg_thumb1_v5"
8503 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8504 (match_operand 1 "" ""))
8505 (use (match_operand 2 "" ""))
8506 (clobber (reg:SI LR_REGNUM))]
8507 "TARGET_THUMB1 && arm_arch5"
8508 "blx\\t%0"
8509 [(set_attr "length" "2")
8510 (set_attr "type" "call")]
8511 )
8512
8513 (define_insn "*call_reg_thumb1"
8514 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8515 (match_operand 1 "" ""))
8516 (use (match_operand 2 "" ""))
8517 (clobber (reg:SI LR_REGNUM))]
8518 "TARGET_THUMB1 && !arm_arch5"
8519 "*
8520 {
8521 if (!TARGET_CALLER_INTERWORKING)
8522 return thumb_call_via_reg (operands[0]);
8523 else if (operands[1] == const0_rtx)
8524 return \"bl\\t%__interwork_call_via_%0\";
8525 else if (frame_pointer_needed)
8526 return \"bl\\t%__interwork_r7_call_via_%0\";
8527 else
8528 return \"bl\\t%__interwork_r11_call_via_%0\";
8529 }"
8530 [(set_attr "type" "call")]
8531 )
8532
8533 (define_expand "call_value"
8534 [(parallel [(set (match_operand 0 "" "")
8535 (call (match_operand 1 "memory_operand" "")
8536 (match_operand 2 "general_operand" "")))
8537 (use (match_operand 3 "" ""))
8538 (clobber (reg:SI LR_REGNUM))])]
8539 "TARGET_EITHER"
8540 "
8541 {
8542 rtx pat, callee;
8543
8544 /* In an untyped call, we can get NULL for operand 2. */
8545 if (operands[3] == 0)
8546 operands[3] = const0_rtx;
8547
8548 /* Decide if we should generate indirect calls by loading the
8549 32-bit address of the callee into a register before performing the
8550 branch and link. */
8551 callee = XEXP (operands[1], 0);
8552 if (GET_CODE (callee) == SYMBOL_REF
8553 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8554 : !REG_P (callee))
8555 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8556
8557 pat = gen_call_value_internal (operands[0], operands[1],
8558 operands[2], operands[3]);
8559 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8560 DONE;
8561 }"
8562 )
8563
8564 (define_expand "call_value_internal"
8565 [(parallel [(set (match_operand 0 "" "")
8566 (call (match_operand 1 "memory_operand" "")
8567 (match_operand 2 "general_operand" "")))
8568 (use (match_operand 3 "" ""))
8569 (clobber (reg:SI LR_REGNUM))])])
8570
8571 (define_insn "*call_value_reg_armv5"
8572 [(set (match_operand 0 "" "")
8573 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8574 (match_operand 2 "" "")))
8575 (use (match_operand 3 "" ""))
8576 (clobber (reg:SI LR_REGNUM))]
8577 "TARGET_ARM && arm_arch5"
8578 "blx%?\\t%1"
8579 [(set_attr "type" "call")]
8580 )
8581
8582 (define_insn "*call_value_reg_arm"
8583 [(set (match_operand 0 "" "")
8584 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8585 (match_operand 2 "" "")))
8586 (use (match_operand 3 "" ""))
8587 (clobber (reg:SI LR_REGNUM))]
8588 "TARGET_ARM && !arm_arch5"
8589 "*
8590 return output_call (&operands[1]);
8591 "
8592 [(set_attr "length" "12")
8593 (set_attr "type" "call")]
8594 )
8595
8596 ;; Note: see *call_mem
8597
8598 (define_insn "*call_value_mem"
8599 [(set (match_operand 0 "" "")
8600 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8601 (match_operand 2 "" "")))
8602 (use (match_operand 3 "" ""))
8603 (clobber (reg:SI LR_REGNUM))]
8604 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8605 "*
8606 return output_call_mem (&operands[1]);
8607 "
8608 [(set_attr "length" "12")
8609 (set_attr "type" "call")]
8610 )
8611
8612 (define_insn "*call_value_reg_thumb1_v5"
8613 [(set (match_operand 0 "" "")
8614 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8615 (match_operand 2 "" "")))
8616 (use (match_operand 3 "" ""))
8617 (clobber (reg:SI LR_REGNUM))]
8618 "TARGET_THUMB1 && arm_arch5"
8619 "blx\\t%1"
8620 [(set_attr "length" "2")
8621 (set_attr "type" "call")]
8622 )
8623
8624 (define_insn "*call_value_reg_thumb1"
8625 [(set (match_operand 0 "" "")
8626 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8627 (match_operand 2 "" "")))
8628 (use (match_operand 3 "" ""))
8629 (clobber (reg:SI LR_REGNUM))]
8630 "TARGET_THUMB1 && !arm_arch5"
8631 "*
8632 {
8633 if (!TARGET_CALLER_INTERWORKING)
8634 return thumb_call_via_reg (operands[1]);
8635 else if (operands[2] == const0_rtx)
8636 return \"bl\\t%__interwork_call_via_%1\";
8637 else if (frame_pointer_needed)
8638 return \"bl\\t%__interwork_r7_call_via_%1\";
8639 else
8640 return \"bl\\t%__interwork_r11_call_via_%1\";
8641 }"
8642 [(set_attr "type" "call")]
8643 )
8644
8645 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8646 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8647
8648 (define_insn "*call_symbol"
8649 [(call (mem:SI (match_operand:SI 0 "" ""))
8650 (match_operand 1 "" ""))
8651 (use (match_operand 2 "" ""))
8652 (clobber (reg:SI LR_REGNUM))]
8653 "TARGET_ARM
8654 && (GET_CODE (operands[0]) == SYMBOL_REF)
8655 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8656 "*
8657 {
8658 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8659 }"
8660 [(set_attr "type" "call")]
8661 )
8662
8663 (define_insn "*call_value_symbol"
8664 [(set (match_operand 0 "" "")
8665 (call (mem:SI (match_operand:SI 1 "" ""))
8666 (match_operand:SI 2 "" "")))
8667 (use (match_operand 3 "" ""))
8668 (clobber (reg:SI LR_REGNUM))]
8669 "TARGET_ARM
8670 && (GET_CODE (operands[1]) == SYMBOL_REF)
8671 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8672 "*
8673 {
8674 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8675 }"
8676 [(set_attr "type" "call")]
8677 )
8678
8679 (define_insn "*call_insn"
8680 [(call (mem:SI (match_operand:SI 0 "" ""))
8681 (match_operand:SI 1 "" ""))
8682 (use (match_operand 2 "" ""))
8683 (clobber (reg:SI LR_REGNUM))]
8684 "TARGET_THUMB
8685 && GET_CODE (operands[0]) == SYMBOL_REF
8686 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8687 "bl\\t%a0"
8688 [(set_attr "length" "4")
8689 (set_attr "type" "call")]
8690 )
8691
8692 (define_insn "*call_value_insn"
8693 [(set (match_operand 0 "" "")
8694 (call (mem:SI (match_operand 1 "" ""))
8695 (match_operand 2 "" "")))
8696 (use (match_operand 3 "" ""))
8697 (clobber (reg:SI LR_REGNUM))]
8698 "TARGET_THUMB
8699 && GET_CODE (operands[1]) == SYMBOL_REF
8700 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8701 "bl\\t%a1"
8702 [(set_attr "length" "4")
8703 (set_attr "type" "call")]
8704 )
8705
8706 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8707 (define_expand "sibcall"
8708 [(parallel [(call (match_operand 0 "memory_operand" "")
8709 (match_operand 1 "general_operand" ""))
8710 (return)
8711 (use (match_operand 2 "" ""))])]
8712 "TARGET_ARM"
8713 "
8714 {
8715 if (operands[2] == NULL_RTX)
8716 operands[2] = const0_rtx;
8717 }"
8718 )
8719
8720 (define_expand "sibcall_value"
8721 [(parallel [(set (match_operand 0 "" "")
8722 (call (match_operand 1 "memory_operand" "")
8723 (match_operand 2 "general_operand" "")))
8724 (return)
8725 (use (match_operand 3 "" ""))])]
8726 "TARGET_ARM"
8727 "
8728 {
8729 if (operands[3] == NULL_RTX)
8730 operands[3] = const0_rtx;
8731 }"
8732 )
8733
8734 (define_insn "*sibcall_insn"
8735 [(call (mem:SI (match_operand:SI 0 "" "X"))
8736 (match_operand 1 "" ""))
8737 (return)
8738 (use (match_operand 2 "" ""))]
8739 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8740 "*
8741 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8742 "
8743 [(set_attr "type" "call")]
8744 )
8745
8746 (define_insn "*sibcall_value_insn"
8747 [(set (match_operand 0 "" "")
8748 (call (mem:SI (match_operand:SI 1 "" "X"))
8749 (match_operand 2 "" "")))
8750 (return)
8751 (use (match_operand 3 "" ""))]
8752 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8753 "*
8754 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8755 "
8756 [(set_attr "type" "call")]
8757 )
8758
8759 ;; Often the return insn will be the same as loading from memory, so set attr
8760 (define_insn "return"
8761 [(return)]
8762 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8763 "*
8764 {
8765 if (arm_ccfsm_state == 2)
8766 {
8767 arm_ccfsm_state += 2;
8768 return \"\";
8769 }
8770 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8771 }"
8772 [(set_attr "type" "load1")
8773 (set_attr "length" "12")
8774 (set_attr "predicable" "yes")]
8775 )
8776
8777 (define_insn "*cond_return"
8778 [(set (pc)
8779 (if_then_else (match_operator 0 "arm_comparison_operator"
8780 [(match_operand 1 "cc_register" "") (const_int 0)])
8781 (return)
8782 (pc)))]
8783 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8784 "*
8785 {
8786 if (arm_ccfsm_state == 2)
8787 {
8788 arm_ccfsm_state += 2;
8789 return \"\";
8790 }
8791 return output_return_instruction (operands[0], TRUE, FALSE);
8792 }"
8793 [(set_attr "conds" "use")
8794 (set_attr "length" "12")
8795 (set_attr "type" "load1")]
8796 )
8797
8798 (define_insn "*cond_return_inverted"
8799 [(set (pc)
8800 (if_then_else (match_operator 0 "arm_comparison_operator"
8801 [(match_operand 1 "cc_register" "") (const_int 0)])
8802 (pc)
8803 (return)))]
8804 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8805 "*
8806 {
8807 if (arm_ccfsm_state == 2)
8808 {
8809 arm_ccfsm_state += 2;
8810 return \"\";
8811 }
8812 return output_return_instruction (operands[0], TRUE, TRUE);
8813 }"
8814 [(set_attr "conds" "use")
8815 (set_attr "length" "12")
8816 (set_attr "type" "load1")]
8817 )
8818
8819 ;; Generate a sequence of instructions to determine if the processor is
8820 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8821 ;; mask.
8822
8823 (define_expand "return_addr_mask"
8824 [(set (match_dup 1)
8825 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8826 (const_int 0)))
8827 (set (match_operand:SI 0 "s_register_operand" "")
8828 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8829 (const_int -1)
8830 (const_int 67108860)))] ; 0x03fffffc
8831 "TARGET_ARM"
8832 "
8833 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8834 ")
8835
8836 (define_insn "*check_arch2"
8837 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8838 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8839 (const_int 0)))]
8840 "TARGET_ARM"
8841 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8842 [(set_attr "length" "8")
8843 (set_attr "conds" "set")]
8844 )
8845
8846 ;; Call subroutine returning any type.
8847
8848 (define_expand "untyped_call"
8849 [(parallel [(call (match_operand 0 "" "")
8850 (const_int 0))
8851 (match_operand 1 "" "")
8852 (match_operand 2 "" "")])]
8853 "TARGET_EITHER"
8854 "
8855 {
8856 int i;
8857 rtx par = gen_rtx_PARALLEL (VOIDmode,
8858 rtvec_alloc (XVECLEN (operands[2], 0)));
8859 rtx addr = gen_reg_rtx (Pmode);
8860 rtx mem;
8861 int size = 0;
8862
8863 emit_move_insn (addr, XEXP (operands[1], 0));
8864 mem = change_address (operands[1], BLKmode, addr);
8865
8866 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8867 {
8868 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8869
8870 /* Default code only uses r0 as a return value, but we could
8871 be using anything up to 4 registers. */
8872 if (REGNO (src) == R0_REGNUM)
8873 src = gen_rtx_REG (TImode, R0_REGNUM);
8874
8875 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8876 GEN_INT (size));
8877 size += GET_MODE_SIZE (GET_MODE (src));
8878 }
8879
8880 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8881 const0_rtx));
8882
8883 size = 0;
8884
8885 for (i = 0; i < XVECLEN (par, 0); i++)
8886 {
8887 HOST_WIDE_INT offset = 0;
8888 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8889
8890 if (size != 0)
8891 emit_move_insn (addr, plus_constant (addr, size));
8892
8893 mem = change_address (mem, GET_MODE (reg), NULL);
8894 if (REGNO (reg) == R0_REGNUM)
8895 {
8896 /* On thumb we have to use a write-back instruction. */
8897 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8898 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8899 size = TARGET_ARM ? 16 : 0;
8900 }
8901 else
8902 {
8903 emit_move_insn (mem, reg);
8904 size = GET_MODE_SIZE (GET_MODE (reg));
8905 }
8906 }
8907
8908 /* The optimizer does not know that the call sets the function value
8909 registers we stored in the result block. We avoid problems by
8910 claiming that all hard registers are used and clobbered at this
8911 point. */
8912 emit_insn (gen_blockage ());
8913
8914 DONE;
8915 }"
8916 )
8917
8918 (define_expand "untyped_return"
8919 [(match_operand:BLK 0 "memory_operand" "")
8920 (match_operand 1 "" "")]
8921 "TARGET_EITHER"
8922 "
8923 {
8924 int i;
8925 rtx addr = gen_reg_rtx (Pmode);
8926 rtx mem;
8927 int size = 0;
8928
8929 emit_move_insn (addr, XEXP (operands[0], 0));
8930 mem = change_address (operands[0], BLKmode, addr);
8931
8932 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8933 {
8934 HOST_WIDE_INT offset = 0;
8935 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8936
8937 if (size != 0)
8938 emit_move_insn (addr, plus_constant (addr, size));
8939
8940 mem = change_address (mem, GET_MODE (reg), NULL);
8941 if (REGNO (reg) == R0_REGNUM)
8942 {
8943 /* On thumb we have to use a write-back instruction. */
8944 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8945 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8946 size = TARGET_ARM ? 16 : 0;
8947 }
8948 else
8949 {
8950 emit_move_insn (reg, mem);
8951 size = GET_MODE_SIZE (GET_MODE (reg));
8952 }
8953 }
8954
8955 /* Emit USE insns before the return. */
8956 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8957 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8958
8959 /* Construct the return. */
8960 expand_naked_return ();
8961
8962 DONE;
8963 }"
8964 )
8965
8966 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8967 ;; all of memory. This blocks insns from being moved across this point.
8968
8969 (define_insn "blockage"
8970 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8971 "TARGET_EITHER"
8972 ""
8973 [(set_attr "length" "0")
8974 (set_attr "type" "block")]
8975 )
8976
8977 (define_expand "casesi"
8978 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8979 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8980 (match_operand:SI 2 "const_int_operand" "") ; total range
8981 (match_operand:SI 3 "" "") ; table label
8982 (match_operand:SI 4 "" "")] ; Out of range label
8983 "TARGET_32BIT || optimize_size || flag_pic"
8984 "
8985 {
8986 enum insn_code code;
8987 if (operands[1] != const0_rtx)
8988 {
8989 rtx reg = gen_reg_rtx (SImode);
8990
8991 emit_insn (gen_addsi3 (reg, operands[0],
8992 GEN_INT (-INTVAL (operands[1]))));
8993 operands[0] = reg;
8994 }
8995
8996 if (TARGET_ARM)
8997 code = CODE_FOR_arm_casesi_internal;
8998 else if (TARGET_THUMB1)
8999 code = CODE_FOR_thumb1_casesi_internal_pic;
9000 else if (flag_pic)
9001 code = CODE_FOR_thumb2_casesi_internal_pic;
9002 else
9003 code = CODE_FOR_thumb2_casesi_internal;
9004
9005 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9006 operands[2] = force_reg (SImode, operands[2]);
9007
9008 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9009 operands[3], operands[4]));
9010 DONE;
9011 }"
9012 )
9013
9014 ;; The USE in this pattern is needed to tell flow analysis that this is
9015 ;; a CASESI insn. It has no other purpose.
9016 (define_insn "arm_casesi_internal"
9017 [(parallel [(set (pc)
9018 (if_then_else
9019 (leu (match_operand:SI 0 "s_register_operand" "r")
9020 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9021 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9022 (label_ref (match_operand 2 "" ""))))
9023 (label_ref (match_operand 3 "" ""))))
9024 (clobber (reg:CC CC_REGNUM))
9025 (use (label_ref (match_dup 2)))])]
9026 "TARGET_ARM"
9027 "*
9028 if (flag_pic)
9029 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9030 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9031 "
9032 [(set_attr "conds" "clob")
9033 (set_attr "length" "12")]
9034 )
9035
9036 (define_expand "thumb1_casesi_internal_pic"
9037 [(match_operand:SI 0 "s_register_operand" "")
9038 (match_operand:SI 1 "thumb1_cmp_operand" "")
9039 (match_operand 2 "" "")
9040 (match_operand 3 "" "")]
9041 "TARGET_THUMB1"
9042 {
9043 rtx reg0;
9044 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9045 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9046 operands[3]));
9047 reg0 = gen_rtx_REG (SImode, 0);
9048 emit_move_insn (reg0, operands[0]);
9049 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9050 DONE;
9051 }
9052 )
9053
9054 (define_insn "thumb1_casesi_dispatch"
9055 [(parallel [(set (pc) (unspec [(reg:SI 0)
9056 (label_ref (match_operand 0 "" ""))
9057 ;; (label_ref (match_operand 1 "" ""))
9058 ]
9059 UNSPEC_THUMB1_CASESI))
9060 (clobber (reg:SI IP_REGNUM))
9061 (clobber (reg:SI LR_REGNUM))])]
9062 "TARGET_THUMB1"
9063 "* return thumb1_output_casesi(operands);"
9064 [(set_attr "length" "4")]
9065 )
9066
9067 (define_expand "indirect_jump"
9068 [(set (pc)
9069 (match_operand:SI 0 "s_register_operand" ""))]
9070 "TARGET_EITHER"
9071 "
9072 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9073 address and use bx. */
9074 if (TARGET_THUMB2)
9075 {
9076 rtx tmp;
9077 tmp = gen_reg_rtx (SImode);
9078 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9079 operands[0] = tmp;
9080 }
9081 "
9082 )
9083
9084 ;; NB Never uses BX.
9085 (define_insn "*arm_indirect_jump"
9086 [(set (pc)
9087 (match_operand:SI 0 "s_register_operand" "r"))]
9088 "TARGET_ARM"
9089 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9090 [(set_attr "predicable" "yes")]
9091 )
9092
9093 (define_insn "*load_indirect_jump"
9094 [(set (pc)
9095 (match_operand:SI 0 "memory_operand" "m"))]
9096 "TARGET_ARM"
9097 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9098 [(set_attr "type" "load1")
9099 (set_attr "pool_range" "4096")
9100 (set_attr "neg_pool_range" "4084")
9101 (set_attr "predicable" "yes")]
9102 )
9103
9104 ;; NB Never uses BX.
9105 (define_insn "*thumb1_indirect_jump"
9106 [(set (pc)
9107 (match_operand:SI 0 "register_operand" "l*r"))]
9108 "TARGET_THUMB1"
9109 "mov\\tpc, %0"
9110 [(set_attr "conds" "clob")
9111 (set_attr "length" "2")]
9112 )
9113
9114 \f
9115 ;; Misc insns
9116
9117 (define_insn "nop"
9118 [(const_int 0)]
9119 "TARGET_EITHER"
9120 "*
9121 if (TARGET_UNIFIED_ASM)
9122 return \"nop\";
9123 if (TARGET_ARM)
9124 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9125 return \"mov\\tr8, r8\";
9126 "
9127 [(set (attr "length")
9128 (if_then_else (eq_attr "is_thumb" "yes")
9129 (const_int 2)
9130 (const_int 4)))]
9131 )
9132
9133 \f
9134 ;; Patterns to allow combination of arithmetic, cond code and shifts
9135
9136 (define_insn "*arith_shiftsi"
9137 [(set (match_operand:SI 0 "s_register_operand" "=r")
9138 (match_operator:SI 1 "shiftable_operator"
9139 [(match_operator:SI 3 "shift_operator"
9140 [(match_operand:SI 4 "s_register_operand" "r")
9141 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9142 (match_operand:SI 2 "s_register_operand" "r")]))]
9143 "TARGET_ARM"
9144 "%i1%?\\t%0, %2, %4%S3"
9145 [(set_attr "predicable" "yes")
9146 (set_attr "shift" "4")
9147 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9148 (const_string "alu_shift")
9149 (const_string "alu_shift_reg")))]
9150 )
9151
9152 (define_split
9153 [(set (match_operand:SI 0 "s_register_operand" "")
9154 (match_operator:SI 1 "shiftable_operator"
9155 [(match_operator:SI 2 "shiftable_operator"
9156 [(match_operator:SI 3 "shift_operator"
9157 [(match_operand:SI 4 "s_register_operand" "")
9158 (match_operand:SI 5 "reg_or_int_operand" "")])
9159 (match_operand:SI 6 "s_register_operand" "")])
9160 (match_operand:SI 7 "arm_rhs_operand" "")]))
9161 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9162 "TARGET_ARM"
9163 [(set (match_dup 8)
9164 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9165 (match_dup 6)]))
9166 (set (match_dup 0)
9167 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9168 "")
9169
9170 (define_insn "*arith_shiftsi_compare0"
9171 [(set (reg:CC_NOOV CC_REGNUM)
9172 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9173 [(match_operator:SI 3 "shift_operator"
9174 [(match_operand:SI 4 "s_register_operand" "r")
9175 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9176 (match_operand:SI 2 "s_register_operand" "r")])
9177 (const_int 0)))
9178 (set (match_operand:SI 0 "s_register_operand" "=r")
9179 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9180 (match_dup 2)]))]
9181 "TARGET_ARM"
9182 "%i1%.\\t%0, %2, %4%S3"
9183 [(set_attr "conds" "set")
9184 (set_attr "shift" "4")
9185 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9186 (const_string "alu_shift")
9187 (const_string "alu_shift_reg")))]
9188 )
9189
9190 (define_insn "*arith_shiftsi_compare0_scratch"
9191 [(set (reg:CC_NOOV CC_REGNUM)
9192 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9193 [(match_operator:SI 3 "shift_operator"
9194 [(match_operand:SI 4 "s_register_operand" "r")
9195 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9196 (match_operand:SI 2 "s_register_operand" "r")])
9197 (const_int 0)))
9198 (clobber (match_scratch:SI 0 "=r"))]
9199 "TARGET_ARM"
9200 "%i1%.\\t%0, %2, %4%S3"
9201 [(set_attr "conds" "set")
9202 (set_attr "shift" "4")
9203 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9204 (const_string "alu_shift")
9205 (const_string "alu_shift_reg")))]
9206 )
9207
9208 (define_insn "*sub_shiftsi"
9209 [(set (match_operand:SI 0 "s_register_operand" "=r")
9210 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9211 (match_operator:SI 2 "shift_operator"
9212 [(match_operand:SI 3 "s_register_operand" "r")
9213 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9214 "TARGET_ARM"
9215 "sub%?\\t%0, %1, %3%S2"
9216 [(set_attr "predicable" "yes")
9217 (set_attr "shift" "3")
9218 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9219 (const_string "alu_shift")
9220 (const_string "alu_shift_reg")))]
9221 )
9222
9223 (define_insn "*sub_shiftsi_compare0"
9224 [(set (reg:CC_NOOV CC_REGNUM)
9225 (compare:CC_NOOV
9226 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9227 (match_operator:SI 2 "shift_operator"
9228 [(match_operand:SI 3 "s_register_operand" "r")
9229 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9230 (const_int 0)))
9231 (set (match_operand:SI 0 "s_register_operand" "=r")
9232 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9233 (match_dup 4)])))]
9234 "TARGET_ARM"
9235 "sub%.\\t%0, %1, %3%S2"
9236 [(set_attr "conds" "set")
9237 (set_attr "shift" "3")
9238 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9239 (const_string "alu_shift")
9240 (const_string "alu_shift_reg")))]
9241 )
9242
9243 (define_insn "*sub_shiftsi_compare0_scratch"
9244 [(set (reg:CC_NOOV CC_REGNUM)
9245 (compare:CC_NOOV
9246 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9247 (match_operator:SI 2 "shift_operator"
9248 [(match_operand:SI 3 "s_register_operand" "r")
9249 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9250 (const_int 0)))
9251 (clobber (match_scratch:SI 0 "=r"))]
9252 "TARGET_ARM"
9253 "sub%.\\t%0, %1, %3%S2"
9254 [(set_attr "conds" "set")
9255 (set_attr "shift" "3")
9256 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9257 (const_string "alu_shift")
9258 (const_string "alu_shift_reg")))]
9259 )
9260
9261 \f
9262
9263 (define_insn "*and_scc"
9264 [(set (match_operand:SI 0 "s_register_operand" "=r")
9265 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9266 [(match_operand 3 "cc_register" "") (const_int 0)])
9267 (match_operand:SI 2 "s_register_operand" "r")))]
9268 "TARGET_ARM"
9269 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9270 [(set_attr "conds" "use")
9271 (set_attr "length" "8")]
9272 )
9273
9274 (define_insn "*ior_scc"
9275 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9276 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9277 [(match_operand 3 "cc_register" "") (const_int 0)])
9278 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9279 "TARGET_ARM"
9280 "@
9281 orr%d2\\t%0, %1, #1
9282 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9283 [(set_attr "conds" "use")
9284 (set_attr "length" "4,8")]
9285 )
9286
9287 (define_insn "*compare_scc"
9288 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9289 (match_operator:SI 1 "arm_comparison_operator"
9290 [(match_operand:SI 2 "s_register_operand" "r,r")
9291 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9292 (clobber (reg:CC CC_REGNUM))]
9293 "TARGET_ARM"
9294 "*
9295 if (operands[3] == const0_rtx)
9296 {
9297 if (GET_CODE (operands[1]) == LT)
9298 return \"mov\\t%0, %2, lsr #31\";
9299
9300 if (GET_CODE (operands[1]) == GE)
9301 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9302
9303 if (GET_CODE (operands[1]) == EQ)
9304 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9305 }
9306
9307 if (GET_CODE (operands[1]) == NE)
9308 {
9309 if (which_alternative == 1)
9310 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9311 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9312 }
9313 if (which_alternative == 1)
9314 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9315 else
9316 output_asm_insn (\"cmp\\t%2, %3\", operands);
9317 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9318 "
9319 [(set_attr "conds" "clob")
9320 (set_attr "length" "12")]
9321 )
9322
9323 (define_insn "*cond_move"
9324 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9325 (if_then_else:SI (match_operator 3 "equality_operator"
9326 [(match_operator 4 "arm_comparison_operator"
9327 [(match_operand 5 "cc_register" "") (const_int 0)])
9328 (const_int 0)])
9329 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9330 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9331 "TARGET_ARM"
9332 "*
9333 if (GET_CODE (operands[3]) == NE)
9334 {
9335 if (which_alternative != 1)
9336 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9337 if (which_alternative != 0)
9338 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9339 return \"\";
9340 }
9341 if (which_alternative != 0)
9342 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9343 if (which_alternative != 1)
9344 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9345 return \"\";
9346 "
9347 [(set_attr "conds" "use")
9348 (set_attr "length" "4,4,8")]
9349 )
9350
9351 (define_insn "*cond_arith"
9352 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9353 (match_operator:SI 5 "shiftable_operator"
9354 [(match_operator:SI 4 "arm_comparison_operator"
9355 [(match_operand:SI 2 "s_register_operand" "r,r")
9356 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9357 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9358 (clobber (reg:CC CC_REGNUM))]
9359 "TARGET_ARM"
9360 "*
9361 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9362 return \"%i5\\t%0, %1, %2, lsr #31\";
9363
9364 output_asm_insn (\"cmp\\t%2, %3\", operands);
9365 if (GET_CODE (operands[5]) == AND)
9366 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9367 else if (GET_CODE (operands[5]) == MINUS)
9368 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9369 else if (which_alternative != 0)
9370 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9371 return \"%i5%d4\\t%0, %1, #1\";
9372 "
9373 [(set_attr "conds" "clob")
9374 (set_attr "length" "12")]
9375 )
9376
9377 (define_insn "*cond_sub"
9378 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9379 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9380 (match_operator:SI 4 "arm_comparison_operator"
9381 [(match_operand:SI 2 "s_register_operand" "r,r")
9382 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9383 (clobber (reg:CC CC_REGNUM))]
9384 "TARGET_ARM"
9385 "*
9386 output_asm_insn (\"cmp\\t%2, %3\", operands);
9387 if (which_alternative != 0)
9388 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9389 return \"sub%d4\\t%0, %1, #1\";
9390 "
9391 [(set_attr "conds" "clob")
9392 (set_attr "length" "8,12")]
9393 )
9394
9395 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9396 (define_insn "*cmp_ite0"
9397 [(set (match_operand 6 "dominant_cc_register" "")
9398 (compare
9399 (if_then_else:SI
9400 (match_operator 4 "arm_comparison_operator"
9401 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9402 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9403 (match_operator:SI 5 "arm_comparison_operator"
9404 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9405 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9406 (const_int 0))
9407 (const_int 0)))]
9408 "TARGET_ARM"
9409 "*
9410 {
9411 static const char * const opcodes[4][2] =
9412 {
9413 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9414 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9415 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9416 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9417 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9418 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9419 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9420 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9421 };
9422 int swap =
9423 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9424
9425 return opcodes[which_alternative][swap];
9426 }"
9427 [(set_attr "conds" "set")
9428 (set_attr "length" "8")]
9429 )
9430
9431 (define_insn "*cmp_ite1"
9432 [(set (match_operand 6 "dominant_cc_register" "")
9433 (compare
9434 (if_then_else:SI
9435 (match_operator 4 "arm_comparison_operator"
9436 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9437 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9438 (match_operator:SI 5 "arm_comparison_operator"
9439 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9440 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9441 (const_int 1))
9442 (const_int 0)))]
9443 "TARGET_ARM"
9444 "*
9445 {
9446 static const char * const opcodes[4][2] =
9447 {
9448 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9449 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9450 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9451 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9452 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9453 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9454 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9455 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9456 };
9457 int swap =
9458 comparison_dominates_p (GET_CODE (operands[5]),
9459 reverse_condition (GET_CODE (operands[4])));
9460
9461 return opcodes[which_alternative][swap];
9462 }"
9463 [(set_attr "conds" "set")
9464 (set_attr "length" "8")]
9465 )
9466
9467 (define_insn "*cmp_and"
9468 [(set (match_operand 6 "dominant_cc_register" "")
9469 (compare
9470 (and:SI
9471 (match_operator 4 "arm_comparison_operator"
9472 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9473 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9474 (match_operator:SI 5 "arm_comparison_operator"
9475 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9476 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9477 (const_int 0)))]
9478 "TARGET_ARM"
9479 "*
9480 {
9481 static const char *const opcodes[4][2] =
9482 {
9483 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9484 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9485 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9486 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9487 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9488 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9489 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9490 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9491 };
9492 int swap =
9493 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9494
9495 return opcodes[which_alternative][swap];
9496 }"
9497 [(set_attr "conds" "set")
9498 (set_attr "predicable" "no")
9499 (set_attr "length" "8")]
9500 )
9501
9502 (define_insn "*cmp_ior"
9503 [(set (match_operand 6 "dominant_cc_register" "")
9504 (compare
9505 (ior:SI
9506 (match_operator 4 "arm_comparison_operator"
9507 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9508 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9509 (match_operator:SI 5 "arm_comparison_operator"
9510 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9511 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9512 (const_int 0)))]
9513 "TARGET_ARM"
9514 "*
9515 {
9516 static const char *const opcodes[4][2] =
9517 {
9518 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9519 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9520 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9521 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9522 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9523 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9524 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9525 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9526 };
9527 int swap =
9528 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9529
9530 return opcodes[which_alternative][swap];
9531 }
9532 "
9533 [(set_attr "conds" "set")
9534 (set_attr "length" "8")]
9535 )
9536
9537 (define_insn_and_split "*ior_scc_scc"
9538 [(set (match_operand:SI 0 "s_register_operand" "=r")
9539 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9540 [(match_operand:SI 1 "s_register_operand" "r")
9541 (match_operand:SI 2 "arm_add_operand" "rIL")])
9542 (match_operator:SI 6 "arm_comparison_operator"
9543 [(match_operand:SI 4 "s_register_operand" "r")
9544 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9545 (clobber (reg:CC CC_REGNUM))]
9546 "TARGET_ARM
9547 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9548 != CCmode)"
9549 "#"
9550 "TARGET_ARM && reload_completed"
9551 [(set (match_dup 7)
9552 (compare
9553 (ior:SI
9554 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9555 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9556 (const_int 0)))
9557 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9558 "operands[7]
9559 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9560 DOM_CC_X_OR_Y),
9561 CC_REGNUM);"
9562 [(set_attr "conds" "clob")
9563 (set_attr "length" "16")])
9564
9565 ; If the above pattern is followed by a CMP insn, then the compare is
9566 ; redundant, since we can rework the conditional instruction that follows.
9567 (define_insn_and_split "*ior_scc_scc_cmp"
9568 [(set (match_operand 0 "dominant_cc_register" "")
9569 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9570 [(match_operand:SI 1 "s_register_operand" "r")
9571 (match_operand:SI 2 "arm_add_operand" "rIL")])
9572 (match_operator:SI 6 "arm_comparison_operator"
9573 [(match_operand:SI 4 "s_register_operand" "r")
9574 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9575 (const_int 0)))
9576 (set (match_operand:SI 7 "s_register_operand" "=r")
9577 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9578 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9579 "TARGET_ARM"
9580 "#"
9581 "TARGET_ARM && reload_completed"
9582 [(set (match_dup 0)
9583 (compare
9584 (ior:SI
9585 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9586 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9587 (const_int 0)))
9588 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9589 ""
9590 [(set_attr "conds" "set")
9591 (set_attr "length" "16")])
9592
9593 (define_insn_and_split "*and_scc_scc"
9594 [(set (match_operand:SI 0 "s_register_operand" "=r")
9595 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9596 [(match_operand:SI 1 "s_register_operand" "r")
9597 (match_operand:SI 2 "arm_add_operand" "rIL")])
9598 (match_operator:SI 6 "arm_comparison_operator"
9599 [(match_operand:SI 4 "s_register_operand" "r")
9600 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9601 (clobber (reg:CC CC_REGNUM))]
9602 "TARGET_ARM
9603 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9604 != CCmode)"
9605 "#"
9606 "TARGET_ARM && reload_completed
9607 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9608 != CCmode)"
9609 [(set (match_dup 7)
9610 (compare
9611 (and:SI
9612 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9613 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9614 (const_int 0)))
9615 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9616 "operands[7]
9617 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9618 DOM_CC_X_AND_Y),
9619 CC_REGNUM);"
9620 [(set_attr "conds" "clob")
9621 (set_attr "length" "16")])
9622
9623 ; If the above pattern is followed by a CMP insn, then the compare is
9624 ; redundant, since we can rework the conditional instruction that follows.
9625 (define_insn_and_split "*and_scc_scc_cmp"
9626 [(set (match_operand 0 "dominant_cc_register" "")
9627 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9628 [(match_operand:SI 1 "s_register_operand" "r")
9629 (match_operand:SI 2 "arm_add_operand" "rIL")])
9630 (match_operator:SI 6 "arm_comparison_operator"
9631 [(match_operand:SI 4 "s_register_operand" "r")
9632 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9633 (const_int 0)))
9634 (set (match_operand:SI 7 "s_register_operand" "=r")
9635 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9636 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9637 "TARGET_ARM"
9638 "#"
9639 "TARGET_ARM && reload_completed"
9640 [(set (match_dup 0)
9641 (compare
9642 (and:SI
9643 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9644 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9645 (const_int 0)))
9646 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9647 ""
9648 [(set_attr "conds" "set")
9649 (set_attr "length" "16")])
9650
9651 ;; If there is no dominance in the comparison, then we can still save an
9652 ;; instruction in the AND case, since we can know that the second compare
9653 ;; need only zero the value if false (if true, then the value is already
9654 ;; correct).
9655 (define_insn_and_split "*and_scc_scc_nodom"
9656 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9657 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9658 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9659 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9660 (match_operator:SI 6 "arm_comparison_operator"
9661 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9662 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9663 (clobber (reg:CC CC_REGNUM))]
9664 "TARGET_ARM
9665 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9666 == CCmode)"
9667 "#"
9668 "TARGET_ARM && reload_completed"
9669 [(parallel [(set (match_dup 0)
9670 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9671 (clobber (reg:CC CC_REGNUM))])
9672 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9673 (set (match_dup 0)
9674 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9675 (match_dup 0)
9676 (const_int 0)))]
9677 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9678 operands[4], operands[5]),
9679 CC_REGNUM);
9680 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9681 operands[5]);"
9682 [(set_attr "conds" "clob")
9683 (set_attr "length" "20")])
9684
9685 (define_split
9686 [(set (reg:CC_NOOV CC_REGNUM)
9687 (compare:CC_NOOV (ior:SI
9688 (and:SI (match_operand:SI 0 "s_register_operand" "")
9689 (const_int 1))
9690 (match_operator:SI 1 "arm_comparison_operator"
9691 [(match_operand:SI 2 "s_register_operand" "")
9692 (match_operand:SI 3 "arm_add_operand" "")]))
9693 (const_int 0)))
9694 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9695 "TARGET_ARM"
9696 [(set (match_dup 4)
9697 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9698 (match_dup 0)))
9699 (set (reg:CC_NOOV CC_REGNUM)
9700 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9701 (const_int 0)))]
9702 "")
9703
9704 (define_split
9705 [(set (reg:CC_NOOV CC_REGNUM)
9706 (compare:CC_NOOV (ior:SI
9707 (match_operator:SI 1 "arm_comparison_operator"
9708 [(match_operand:SI 2 "s_register_operand" "")
9709 (match_operand:SI 3 "arm_add_operand" "")])
9710 (and:SI (match_operand:SI 0 "s_register_operand" "")
9711 (const_int 1)))
9712 (const_int 0)))
9713 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9714 "TARGET_ARM"
9715 [(set (match_dup 4)
9716 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9717 (match_dup 0)))
9718 (set (reg:CC_NOOV CC_REGNUM)
9719 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9720 (const_int 0)))]
9721 "")
9722 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9723
9724 (define_insn "*negscc"
9725 [(set (match_operand:SI 0 "s_register_operand" "=r")
9726 (neg:SI (match_operator 3 "arm_comparison_operator"
9727 [(match_operand:SI 1 "s_register_operand" "r")
9728 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9729 (clobber (reg:CC CC_REGNUM))]
9730 "TARGET_ARM"
9731 "*
9732 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9733 return \"mov\\t%0, %1, asr #31\";
9734
9735 if (GET_CODE (operands[3]) == NE)
9736 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9737
9738 output_asm_insn (\"cmp\\t%1, %2\", operands);
9739 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9740 return \"mvn%d3\\t%0, #0\";
9741 "
9742 [(set_attr "conds" "clob")
9743 (set_attr "length" "12")]
9744 )
9745
9746 (define_insn "movcond"
9747 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9748 (if_then_else:SI
9749 (match_operator 5 "arm_comparison_operator"
9750 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9751 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9752 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9753 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9754 (clobber (reg:CC CC_REGNUM))]
9755 "TARGET_ARM"
9756 "*
9757 if (GET_CODE (operands[5]) == LT
9758 && (operands[4] == const0_rtx))
9759 {
9760 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9761 {
9762 if (operands[2] == const0_rtx)
9763 return \"and\\t%0, %1, %3, asr #31\";
9764 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9765 }
9766 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9767 {
9768 if (operands[1] == const0_rtx)
9769 return \"bic\\t%0, %2, %3, asr #31\";
9770 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9771 }
9772 /* The only case that falls through to here is when both ops 1 & 2
9773 are constants. */
9774 }
9775
9776 if (GET_CODE (operands[5]) == GE
9777 && (operands[4] == const0_rtx))
9778 {
9779 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9780 {
9781 if (operands[2] == const0_rtx)
9782 return \"bic\\t%0, %1, %3, asr #31\";
9783 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9784 }
9785 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9786 {
9787 if (operands[1] == const0_rtx)
9788 return \"and\\t%0, %2, %3, asr #31\";
9789 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9790 }
9791 /* The only case that falls through to here is when both ops 1 & 2
9792 are constants. */
9793 }
9794 if (GET_CODE (operands[4]) == CONST_INT
9795 && !const_ok_for_arm (INTVAL (operands[4])))
9796 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9797 else
9798 output_asm_insn (\"cmp\\t%3, %4\", operands);
9799 if (which_alternative != 0)
9800 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9801 if (which_alternative != 1)
9802 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9803 return \"\";
9804 "
9805 [(set_attr "conds" "clob")
9806 (set_attr "length" "8,8,12")]
9807 )
9808
9809 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9810
9811 (define_insn "*ifcompare_plus_move"
9812 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9813 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9814 [(match_operand:SI 4 "s_register_operand" "r,r")
9815 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9816 (plus:SI
9817 (match_operand:SI 2 "s_register_operand" "r,r")
9818 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9819 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9820 (clobber (reg:CC CC_REGNUM))]
9821 "TARGET_ARM"
9822 "#"
9823 [(set_attr "conds" "clob")
9824 (set_attr "length" "8,12")]
9825 )
9826
9827 (define_insn "*if_plus_move"
9828 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9829 (if_then_else:SI
9830 (match_operator 4 "arm_comparison_operator"
9831 [(match_operand 5 "cc_register" "") (const_int 0)])
9832 (plus:SI
9833 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9834 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9835 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9836 "TARGET_ARM"
9837 "@
9838 add%d4\\t%0, %2, %3
9839 sub%d4\\t%0, %2, #%n3
9840 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9841 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9842 [(set_attr "conds" "use")
9843 (set_attr "length" "4,4,8,8")
9844 (set_attr "type" "*,*,*,*")]
9845 )
9846
9847 (define_insn "*ifcompare_move_plus"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9849 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9850 [(match_operand:SI 4 "s_register_operand" "r,r")
9851 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9852 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9853 (plus:SI
9854 (match_operand:SI 2 "s_register_operand" "r,r")
9855 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9856 (clobber (reg:CC CC_REGNUM))]
9857 "TARGET_ARM"
9858 "#"
9859 [(set_attr "conds" "clob")
9860 (set_attr "length" "8,12")]
9861 )
9862
9863 (define_insn "*if_move_plus"
9864 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9865 (if_then_else:SI
9866 (match_operator 4 "arm_comparison_operator"
9867 [(match_operand 5 "cc_register" "") (const_int 0)])
9868 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9869 (plus:SI
9870 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9871 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9872 "TARGET_ARM"
9873 "@
9874 add%D4\\t%0, %2, %3
9875 sub%D4\\t%0, %2, #%n3
9876 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9877 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9878 [(set_attr "conds" "use")
9879 (set_attr "length" "4,4,8,8")
9880 (set_attr "type" "*,*,*,*")]
9881 )
9882
9883 (define_insn "*ifcompare_arith_arith"
9884 [(set (match_operand:SI 0 "s_register_operand" "=r")
9885 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9886 [(match_operand:SI 5 "s_register_operand" "r")
9887 (match_operand:SI 6 "arm_add_operand" "rIL")])
9888 (match_operator:SI 8 "shiftable_operator"
9889 [(match_operand:SI 1 "s_register_operand" "r")
9890 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9891 (match_operator:SI 7 "shiftable_operator"
9892 [(match_operand:SI 3 "s_register_operand" "r")
9893 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9894 (clobber (reg:CC CC_REGNUM))]
9895 "TARGET_ARM"
9896 "#"
9897 [(set_attr "conds" "clob")
9898 (set_attr "length" "12")]
9899 )
9900
9901 (define_insn "*if_arith_arith"
9902 [(set (match_operand:SI 0 "s_register_operand" "=r")
9903 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9904 [(match_operand 8 "cc_register" "") (const_int 0)])
9905 (match_operator:SI 6 "shiftable_operator"
9906 [(match_operand:SI 1 "s_register_operand" "r")
9907 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9908 (match_operator:SI 7 "shiftable_operator"
9909 [(match_operand:SI 3 "s_register_operand" "r")
9910 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9911 "TARGET_ARM"
9912 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9913 [(set_attr "conds" "use")
9914 (set_attr "length" "8")]
9915 )
9916
9917 (define_insn "*ifcompare_arith_move"
9918 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9919 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9920 [(match_operand:SI 2 "s_register_operand" "r,r")
9921 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9922 (match_operator:SI 7 "shiftable_operator"
9923 [(match_operand:SI 4 "s_register_operand" "r,r")
9924 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9925 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9926 (clobber (reg:CC CC_REGNUM))]
9927 "TARGET_ARM"
9928 "*
9929 /* If we have an operation where (op x 0) is the identity operation and
9930 the conditional operator is LT or GE and we are comparing against zero and
9931 everything is in registers then we can do this in two instructions. */
9932 if (operands[3] == const0_rtx
9933 && GET_CODE (operands[7]) != AND
9934 && GET_CODE (operands[5]) == REG
9935 && GET_CODE (operands[1]) == REG
9936 && REGNO (operands[1]) == REGNO (operands[4])
9937 && REGNO (operands[4]) != REGNO (operands[0]))
9938 {
9939 if (GET_CODE (operands[6]) == LT)
9940 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9941 else if (GET_CODE (operands[6]) == GE)
9942 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9943 }
9944 if (GET_CODE (operands[3]) == CONST_INT
9945 && !const_ok_for_arm (INTVAL (operands[3])))
9946 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9947 else
9948 output_asm_insn (\"cmp\\t%2, %3\", operands);
9949 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9950 if (which_alternative != 0)
9951 return \"mov%D6\\t%0, %1\";
9952 return \"\";
9953 "
9954 [(set_attr "conds" "clob")
9955 (set_attr "length" "8,12")]
9956 )
9957
9958 (define_insn "*if_arith_move"
9959 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9960 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9961 [(match_operand 6 "cc_register" "") (const_int 0)])
9962 (match_operator:SI 5 "shiftable_operator"
9963 [(match_operand:SI 2 "s_register_operand" "r,r")
9964 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9965 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9966 "TARGET_ARM"
9967 "@
9968 %I5%d4\\t%0, %2, %3
9969 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9970 [(set_attr "conds" "use")
9971 (set_attr "length" "4,8")
9972 (set_attr "type" "*,*")]
9973 )
9974
9975 (define_insn "*ifcompare_move_arith"
9976 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9977 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9978 [(match_operand:SI 4 "s_register_operand" "r,r")
9979 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9980 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9981 (match_operator:SI 7 "shiftable_operator"
9982 [(match_operand:SI 2 "s_register_operand" "r,r")
9983 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9984 (clobber (reg:CC CC_REGNUM))]
9985 "TARGET_ARM"
9986 "*
9987 /* If we have an operation where (op x 0) is the identity operation and
9988 the conditional operator is LT or GE and we are comparing against zero and
9989 everything is in registers then we can do this in two instructions */
9990 if (operands[5] == const0_rtx
9991 && GET_CODE (operands[7]) != AND
9992 && GET_CODE (operands[3]) == REG
9993 && GET_CODE (operands[1]) == REG
9994 && REGNO (operands[1]) == REGNO (operands[2])
9995 && REGNO (operands[2]) != REGNO (operands[0]))
9996 {
9997 if (GET_CODE (operands[6]) == GE)
9998 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9999 else if (GET_CODE (operands[6]) == LT)
10000 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10001 }
10002
10003 if (GET_CODE (operands[5]) == CONST_INT
10004 && !const_ok_for_arm (INTVAL (operands[5])))
10005 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10006 else
10007 output_asm_insn (\"cmp\\t%4, %5\", operands);
10008
10009 if (which_alternative != 0)
10010 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10011 return \"%I7%D6\\t%0, %2, %3\";
10012 "
10013 [(set_attr "conds" "clob")
10014 (set_attr "length" "8,12")]
10015 )
10016
10017 (define_insn "*if_move_arith"
10018 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10019 (if_then_else:SI
10020 (match_operator 4 "arm_comparison_operator"
10021 [(match_operand 6 "cc_register" "") (const_int 0)])
10022 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10023 (match_operator:SI 5 "shiftable_operator"
10024 [(match_operand:SI 2 "s_register_operand" "r,r")
10025 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10026 "TARGET_ARM"
10027 "@
10028 %I5%D4\\t%0, %2, %3
10029 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10030 [(set_attr "conds" "use")
10031 (set_attr "length" "4,8")
10032 (set_attr "type" "*,*")]
10033 )
10034
10035 (define_insn "*ifcompare_move_not"
10036 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10037 (if_then_else:SI
10038 (match_operator 5 "arm_comparison_operator"
10039 [(match_operand:SI 3 "s_register_operand" "r,r")
10040 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10041 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10042 (not:SI
10043 (match_operand:SI 2 "s_register_operand" "r,r"))))
10044 (clobber (reg:CC CC_REGNUM))]
10045 "TARGET_ARM"
10046 "#"
10047 [(set_attr "conds" "clob")
10048 (set_attr "length" "8,12")]
10049 )
10050
10051 (define_insn "*if_move_not"
10052 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10053 (if_then_else:SI
10054 (match_operator 4 "arm_comparison_operator"
10055 [(match_operand 3 "cc_register" "") (const_int 0)])
10056 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10057 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10058 "TARGET_ARM"
10059 "@
10060 mvn%D4\\t%0, %2
10061 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10062 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10063 [(set_attr "conds" "use")
10064 (set_attr "length" "4,8,8")]
10065 )
10066
10067 (define_insn "*ifcompare_not_move"
10068 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10069 (if_then_else:SI
10070 (match_operator 5 "arm_comparison_operator"
10071 [(match_operand:SI 3 "s_register_operand" "r,r")
10072 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10073 (not:SI
10074 (match_operand:SI 2 "s_register_operand" "r,r"))
10075 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10076 (clobber (reg:CC CC_REGNUM))]
10077 "TARGET_ARM"
10078 "#"
10079 [(set_attr "conds" "clob")
10080 (set_attr "length" "8,12")]
10081 )
10082
10083 (define_insn "*if_not_move"
10084 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10085 (if_then_else:SI
10086 (match_operator 4 "arm_comparison_operator"
10087 [(match_operand 3 "cc_register" "") (const_int 0)])
10088 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10089 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10090 "TARGET_ARM"
10091 "@
10092 mvn%d4\\t%0, %2
10093 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10094 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10095 [(set_attr "conds" "use")
10096 (set_attr "length" "4,8,8")]
10097 )
10098
10099 (define_insn "*ifcompare_shift_move"
10100 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10101 (if_then_else:SI
10102 (match_operator 6 "arm_comparison_operator"
10103 [(match_operand:SI 4 "s_register_operand" "r,r")
10104 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10105 (match_operator:SI 7 "shift_operator"
10106 [(match_operand:SI 2 "s_register_operand" "r,r")
10107 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10108 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10109 (clobber (reg:CC CC_REGNUM))]
10110 "TARGET_ARM"
10111 "#"
10112 [(set_attr "conds" "clob")
10113 (set_attr "length" "8,12")]
10114 )
10115
10116 (define_insn "*if_shift_move"
10117 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10118 (if_then_else:SI
10119 (match_operator 5 "arm_comparison_operator"
10120 [(match_operand 6 "cc_register" "") (const_int 0)])
10121 (match_operator:SI 4 "shift_operator"
10122 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10123 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10124 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10125 "TARGET_ARM"
10126 "@
10127 mov%d5\\t%0, %2%S4
10128 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10129 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10130 [(set_attr "conds" "use")
10131 (set_attr "shift" "2")
10132 (set_attr "length" "4,8,8")
10133 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10134 (const_string "alu_shift")
10135 (const_string "alu_shift_reg")))]
10136 )
10137
10138 (define_insn "*ifcompare_move_shift"
10139 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10140 (if_then_else:SI
10141 (match_operator 6 "arm_comparison_operator"
10142 [(match_operand:SI 4 "s_register_operand" "r,r")
10143 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10144 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10145 (match_operator:SI 7 "shift_operator"
10146 [(match_operand:SI 2 "s_register_operand" "r,r")
10147 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10148 (clobber (reg:CC CC_REGNUM))]
10149 "TARGET_ARM"
10150 "#"
10151 [(set_attr "conds" "clob")
10152 (set_attr "length" "8,12")]
10153 )
10154
10155 (define_insn "*if_move_shift"
10156 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10157 (if_then_else:SI
10158 (match_operator 5 "arm_comparison_operator"
10159 [(match_operand 6 "cc_register" "") (const_int 0)])
10160 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10161 (match_operator:SI 4 "shift_operator"
10162 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10163 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10164 "TARGET_ARM"
10165 "@
10166 mov%D5\\t%0, %2%S4
10167 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10168 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10169 [(set_attr "conds" "use")
10170 (set_attr "shift" "2")
10171 (set_attr "length" "4,8,8")
10172 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10173 (const_string "alu_shift")
10174 (const_string "alu_shift_reg")))]
10175 )
10176
10177 (define_insn "*ifcompare_shift_shift"
10178 [(set (match_operand:SI 0 "s_register_operand" "=r")
10179 (if_then_else:SI
10180 (match_operator 7 "arm_comparison_operator"
10181 [(match_operand:SI 5 "s_register_operand" "r")
10182 (match_operand:SI 6 "arm_add_operand" "rIL")])
10183 (match_operator:SI 8 "shift_operator"
10184 [(match_operand:SI 1 "s_register_operand" "r")
10185 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10186 (match_operator:SI 9 "shift_operator"
10187 [(match_operand:SI 3 "s_register_operand" "r")
10188 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10189 (clobber (reg:CC CC_REGNUM))]
10190 "TARGET_ARM"
10191 "#"
10192 [(set_attr "conds" "clob")
10193 (set_attr "length" "12")]
10194 )
10195
10196 (define_insn "*if_shift_shift"
10197 [(set (match_operand:SI 0 "s_register_operand" "=r")
10198 (if_then_else:SI
10199 (match_operator 5 "arm_comparison_operator"
10200 [(match_operand 8 "cc_register" "") (const_int 0)])
10201 (match_operator:SI 6 "shift_operator"
10202 [(match_operand:SI 1 "s_register_operand" "r")
10203 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10204 (match_operator:SI 7 "shift_operator"
10205 [(match_operand:SI 3 "s_register_operand" "r")
10206 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10207 "TARGET_ARM"
10208 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10209 [(set_attr "conds" "use")
10210 (set_attr "shift" "1")
10211 (set_attr "length" "8")
10212 (set (attr "type") (if_then_else
10213 (and (match_operand 2 "const_int_operand" "")
10214 (match_operand 4 "const_int_operand" ""))
10215 (const_string "alu_shift")
10216 (const_string "alu_shift_reg")))]
10217 )
10218
10219 (define_insn "*ifcompare_not_arith"
10220 [(set (match_operand:SI 0 "s_register_operand" "=r")
10221 (if_then_else:SI
10222 (match_operator 6 "arm_comparison_operator"
10223 [(match_operand:SI 4 "s_register_operand" "r")
10224 (match_operand:SI 5 "arm_add_operand" "rIL")])
10225 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10226 (match_operator:SI 7 "shiftable_operator"
10227 [(match_operand:SI 2 "s_register_operand" "r")
10228 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10229 (clobber (reg:CC CC_REGNUM))]
10230 "TARGET_ARM"
10231 "#"
10232 [(set_attr "conds" "clob")
10233 (set_attr "length" "12")]
10234 )
10235
10236 (define_insn "*if_not_arith"
10237 [(set (match_operand:SI 0 "s_register_operand" "=r")
10238 (if_then_else:SI
10239 (match_operator 5 "arm_comparison_operator"
10240 [(match_operand 4 "cc_register" "") (const_int 0)])
10241 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10242 (match_operator:SI 6 "shiftable_operator"
10243 [(match_operand:SI 2 "s_register_operand" "r")
10244 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10245 "TARGET_ARM"
10246 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10247 [(set_attr "conds" "use")
10248 (set_attr "length" "8")]
10249 )
10250
10251 (define_insn "*ifcompare_arith_not"
10252 [(set (match_operand:SI 0 "s_register_operand" "=r")
10253 (if_then_else:SI
10254 (match_operator 6 "arm_comparison_operator"
10255 [(match_operand:SI 4 "s_register_operand" "r")
10256 (match_operand:SI 5 "arm_add_operand" "rIL")])
10257 (match_operator:SI 7 "shiftable_operator"
10258 [(match_operand:SI 2 "s_register_operand" "r")
10259 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10260 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10261 (clobber (reg:CC CC_REGNUM))]
10262 "TARGET_ARM"
10263 "#"
10264 [(set_attr "conds" "clob")
10265 (set_attr "length" "12")]
10266 )
10267
10268 (define_insn "*if_arith_not"
10269 [(set (match_operand:SI 0 "s_register_operand" "=r")
10270 (if_then_else:SI
10271 (match_operator 5 "arm_comparison_operator"
10272 [(match_operand 4 "cc_register" "") (const_int 0)])
10273 (match_operator:SI 6 "shiftable_operator"
10274 [(match_operand:SI 2 "s_register_operand" "r")
10275 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10276 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10277 "TARGET_ARM"
10278 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10279 [(set_attr "conds" "use")
10280 (set_attr "length" "8")]
10281 )
10282
10283 (define_insn "*ifcompare_neg_move"
10284 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10285 (if_then_else:SI
10286 (match_operator 5 "arm_comparison_operator"
10287 [(match_operand:SI 3 "s_register_operand" "r,r")
10288 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10289 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10290 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10291 (clobber (reg:CC CC_REGNUM))]
10292 "TARGET_ARM"
10293 "#"
10294 [(set_attr "conds" "clob")
10295 (set_attr "length" "8,12")]
10296 )
10297
10298 (define_insn "*if_neg_move"
10299 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10300 (if_then_else:SI
10301 (match_operator 4 "arm_comparison_operator"
10302 [(match_operand 3 "cc_register" "") (const_int 0)])
10303 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10304 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10305 "TARGET_ARM"
10306 "@
10307 rsb%d4\\t%0, %2, #0
10308 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10309 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10310 [(set_attr "conds" "use")
10311 (set_attr "length" "4,8,8")]
10312 )
10313
10314 (define_insn "*ifcompare_move_neg"
10315 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10316 (if_then_else:SI
10317 (match_operator 5 "arm_comparison_operator"
10318 [(match_operand:SI 3 "s_register_operand" "r,r")
10319 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10320 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10321 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10322 (clobber (reg:CC CC_REGNUM))]
10323 "TARGET_ARM"
10324 "#"
10325 [(set_attr "conds" "clob")
10326 (set_attr "length" "8,12")]
10327 )
10328
10329 (define_insn "*if_move_neg"
10330 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10331 (if_then_else:SI
10332 (match_operator 4 "arm_comparison_operator"
10333 [(match_operand 3 "cc_register" "") (const_int 0)])
10334 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10335 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10336 "TARGET_ARM"
10337 "@
10338 rsb%D4\\t%0, %2, #0
10339 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10340 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10341 [(set_attr "conds" "use")
10342 (set_attr "length" "4,8,8")]
10343 )
10344
10345 (define_insn "*arith_adjacentmem"
10346 [(set (match_operand:SI 0 "s_register_operand" "=r")
10347 (match_operator:SI 1 "shiftable_operator"
10348 [(match_operand:SI 2 "memory_operand" "m")
10349 (match_operand:SI 3 "memory_operand" "m")]))
10350 (clobber (match_scratch:SI 4 "=r"))]
10351 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10352 "*
10353 {
10354 rtx ldm[3];
10355 rtx arith[4];
10356 rtx base_reg;
10357 HOST_WIDE_INT val1 = 0, val2 = 0;
10358
10359 if (REGNO (operands[0]) > REGNO (operands[4]))
10360 {
10361 ldm[1] = operands[4];
10362 ldm[2] = operands[0];
10363 }
10364 else
10365 {
10366 ldm[1] = operands[0];
10367 ldm[2] = operands[4];
10368 }
10369
10370 base_reg = XEXP (operands[2], 0);
10371
10372 if (!REG_P (base_reg))
10373 {
10374 val1 = INTVAL (XEXP (base_reg, 1));
10375 base_reg = XEXP (base_reg, 0);
10376 }
10377
10378 if (!REG_P (XEXP (operands[3], 0)))
10379 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10380
10381 arith[0] = operands[0];
10382 arith[3] = operands[1];
10383
10384 if (val1 < val2)
10385 {
10386 arith[1] = ldm[1];
10387 arith[2] = ldm[2];
10388 }
10389 else
10390 {
10391 arith[1] = ldm[2];
10392 arith[2] = ldm[1];
10393 }
10394
10395 ldm[0] = base_reg;
10396 if (val1 !=0 && val2 != 0)
10397 {
10398 rtx ops[3];
10399
10400 if (val1 == 4 || val2 == 4)
10401 /* Other val must be 8, since we know they are adjacent and neither
10402 is zero. */
10403 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10404 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10405 {
10406 ldm[0] = ops[0] = operands[4];
10407 ops[1] = base_reg;
10408 ops[2] = GEN_INT (val1);
10409 output_add_immediate (ops);
10410 if (val1 < val2)
10411 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10412 else
10413 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10414 }
10415 else
10416 {
10417 /* Offset is out of range for a single add, so use two ldr. */
10418 ops[0] = ldm[1];
10419 ops[1] = base_reg;
10420 ops[2] = GEN_INT (val1);
10421 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10422 ops[0] = ldm[2];
10423 ops[2] = GEN_INT (val2);
10424 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10425 }
10426 }
10427 else if (val1 != 0)
10428 {
10429 if (val1 < val2)
10430 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10431 else
10432 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10433 }
10434 else
10435 {
10436 if (val1 < val2)
10437 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10438 else
10439 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10440 }
10441 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10442 return \"\";
10443 }"
10444 [(set_attr "length" "12")
10445 (set_attr "predicable" "yes")
10446 (set_attr "type" "load1")]
10447 )
10448
10449 ; This pattern is never tried by combine, so do it as a peephole
10450
10451 (define_peephole2
10452 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10453 (match_operand:SI 1 "arm_general_register_operand" ""))
10454 (set (reg:CC CC_REGNUM)
10455 (compare:CC (match_dup 1) (const_int 0)))]
10456 "TARGET_ARM"
10457 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10458 (set (match_dup 0) (match_dup 1))])]
10459 ""
10460 )
10461
10462 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10463 ; reversed, check that the memory references aren't volatile.
10464
10465 (define_peephole
10466 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10467 (match_operand:SI 4 "memory_operand" "m"))
10468 (set (match_operand:SI 1 "s_register_operand" "=rk")
10469 (match_operand:SI 5 "memory_operand" "m"))
10470 (set (match_operand:SI 2 "s_register_operand" "=rk")
10471 (match_operand:SI 6 "memory_operand" "m"))
10472 (set (match_operand:SI 3 "s_register_operand" "=rk")
10473 (match_operand:SI 7 "memory_operand" "m"))]
10474 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10475 "*
10476 return emit_ldm_seq (operands, 4);
10477 "
10478 )
10479
10480 (define_peephole
10481 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10482 (match_operand:SI 3 "memory_operand" "m"))
10483 (set (match_operand:SI 1 "s_register_operand" "=rk")
10484 (match_operand:SI 4 "memory_operand" "m"))
10485 (set (match_operand:SI 2 "s_register_operand" "=rk")
10486 (match_operand:SI 5 "memory_operand" "m"))]
10487 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10488 "*
10489 return emit_ldm_seq (operands, 3);
10490 "
10491 )
10492
10493 (define_peephole
10494 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10495 (match_operand:SI 2 "memory_operand" "m"))
10496 (set (match_operand:SI 1 "s_register_operand" "=rk")
10497 (match_operand:SI 3 "memory_operand" "m"))]
10498 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10499 "*
10500 return emit_ldm_seq (operands, 2);
10501 "
10502 )
10503
10504 (define_peephole
10505 [(set (match_operand:SI 4 "memory_operand" "=m")
10506 (match_operand:SI 0 "s_register_operand" "rk"))
10507 (set (match_operand:SI 5 "memory_operand" "=m")
10508 (match_operand:SI 1 "s_register_operand" "rk"))
10509 (set (match_operand:SI 6 "memory_operand" "=m")
10510 (match_operand:SI 2 "s_register_operand" "rk"))
10511 (set (match_operand:SI 7 "memory_operand" "=m")
10512 (match_operand:SI 3 "s_register_operand" "rk"))]
10513 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10514 "*
10515 return emit_stm_seq (operands, 4);
10516 "
10517 )
10518
10519 (define_peephole
10520 [(set (match_operand:SI 3 "memory_operand" "=m")
10521 (match_operand:SI 0 "s_register_operand" "rk"))
10522 (set (match_operand:SI 4 "memory_operand" "=m")
10523 (match_operand:SI 1 "s_register_operand" "rk"))
10524 (set (match_operand:SI 5 "memory_operand" "=m")
10525 (match_operand:SI 2 "s_register_operand" "rk"))]
10526 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10527 "*
10528 return emit_stm_seq (operands, 3);
10529 "
10530 )
10531
10532 (define_peephole
10533 [(set (match_operand:SI 2 "memory_operand" "=m")
10534 (match_operand:SI 0 "s_register_operand" "rk"))
10535 (set (match_operand:SI 3 "memory_operand" "=m")
10536 (match_operand:SI 1 "s_register_operand" "rk"))]
10537 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10538 "*
10539 return emit_stm_seq (operands, 2);
10540 "
10541 )
10542
10543 (define_split
10544 [(set (match_operand:SI 0 "s_register_operand" "")
10545 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10546 (const_int 0))
10547 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10548 [(match_operand:SI 3 "s_register_operand" "")
10549 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10550 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10551 "TARGET_ARM"
10552 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10553 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10554 (match_dup 5)))]
10555 ""
10556 )
10557
10558 ;; This split can be used because CC_Z mode implies that the following
10559 ;; branch will be an equality, or an unsigned inequality, so the sign
10560 ;; extension is not needed.
10561
10562 (define_split
10563 [(set (reg:CC_Z CC_REGNUM)
10564 (compare:CC_Z
10565 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10566 (const_int 24))
10567 (match_operand 1 "const_int_operand" "")))
10568 (clobber (match_scratch:SI 2 ""))]
10569 "TARGET_ARM
10570 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10571 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10572 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10573 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10574 "
10575 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10576 "
10577 )
10578 ;; ??? Check the patterns above for Thumb-2 usefulness
10579
10580 (define_expand "prologue"
10581 [(clobber (const_int 0))]
10582 "TARGET_EITHER"
10583 "if (TARGET_32BIT)
10584 arm_expand_prologue ();
10585 else
10586 thumb1_expand_prologue ();
10587 DONE;
10588 "
10589 )
10590
10591 (define_expand "epilogue"
10592 [(clobber (const_int 0))]
10593 "TARGET_EITHER"
10594 "
10595 if (crtl->calls_eh_return)
10596 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10597 if (TARGET_THUMB1)
10598 thumb1_expand_epilogue ();
10599 else if (USE_RETURN_INSN (FALSE))
10600 {
10601 emit_jump_insn (gen_return ());
10602 DONE;
10603 }
10604 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10605 gen_rtvec (1,
10606 gen_rtx_RETURN (VOIDmode)),
10607 VUNSPEC_EPILOGUE));
10608 DONE;
10609 "
10610 )
10611
10612 ;; Note - although unspec_volatile's USE all hard registers,
10613 ;; USEs are ignored after relaod has completed. Thus we need
10614 ;; to add an unspec of the link register to ensure that flow
10615 ;; does not think that it is unused by the sibcall branch that
10616 ;; will replace the standard function epilogue.
10617 (define_insn "sibcall_epilogue"
10618 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10619 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10620 "TARGET_32BIT"
10621 "*
10622 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10623 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10624 return arm_output_epilogue (next_nonnote_insn (insn));
10625 "
10626 ;; Length is absolute worst case
10627 [(set_attr "length" "44")
10628 (set_attr "type" "block")
10629 ;; We don't clobber the conditions, but the potential length of this
10630 ;; operation is sufficient to make conditionalizing the sequence
10631 ;; unlikely to be profitable.
10632 (set_attr "conds" "clob")]
10633 )
10634
10635 (define_insn "*epilogue_insns"
10636 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10637 "TARGET_EITHER"
10638 "*
10639 if (TARGET_32BIT)
10640 return arm_output_epilogue (NULL);
10641 else /* TARGET_THUMB1 */
10642 return thumb_unexpanded_epilogue ();
10643 "
10644 ; Length is absolute worst case
10645 [(set_attr "length" "44")
10646 (set_attr "type" "block")
10647 ;; We don't clobber the conditions, but the potential length of this
10648 ;; operation is sufficient to make conditionalizing the sequence
10649 ;; unlikely to be profitable.
10650 (set_attr "conds" "clob")]
10651 )
10652
10653 (define_expand "eh_epilogue"
10654 [(use (match_operand:SI 0 "register_operand" ""))
10655 (use (match_operand:SI 1 "register_operand" ""))
10656 (use (match_operand:SI 2 "register_operand" ""))]
10657 "TARGET_EITHER"
10658 "
10659 {
10660 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10661 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10662 {
10663 rtx ra = gen_rtx_REG (Pmode, 2);
10664
10665 emit_move_insn (ra, operands[2]);
10666 operands[2] = ra;
10667 }
10668 /* This is a hack -- we may have crystalized the function type too
10669 early. */
10670 cfun->machine->func_type = 0;
10671 }"
10672 )
10673
10674 ;; This split is only used during output to reduce the number of patterns
10675 ;; that need assembler instructions adding to them. We allowed the setting
10676 ;; of the conditions to be implicit during rtl generation so that
10677 ;; the conditional compare patterns would work. However this conflicts to
10678 ;; some extent with the conditional data operations, so we have to split them
10679 ;; up again here.
10680
10681 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10682 ;; conditional execution sufficient?
10683
10684 (define_split
10685 [(set (match_operand:SI 0 "s_register_operand" "")
10686 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10687 [(match_operand 2 "" "") (match_operand 3 "" "")])
10688 (match_dup 0)
10689 (match_operand 4 "" "")))
10690 (clobber (reg:CC CC_REGNUM))]
10691 "TARGET_ARM && reload_completed"
10692 [(set (match_dup 5) (match_dup 6))
10693 (cond_exec (match_dup 7)
10694 (set (match_dup 0) (match_dup 4)))]
10695 "
10696 {
10697 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10698 operands[2], operands[3]);
10699 enum rtx_code rc = GET_CODE (operands[1]);
10700
10701 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10702 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10703 if (mode == CCFPmode || mode == CCFPEmode)
10704 rc = reverse_condition_maybe_unordered (rc);
10705 else
10706 rc = reverse_condition (rc);
10707
10708 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10709 }"
10710 )
10711
10712 (define_split
10713 [(set (match_operand:SI 0 "s_register_operand" "")
10714 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10715 [(match_operand 2 "" "") (match_operand 3 "" "")])
10716 (match_operand 4 "" "")
10717 (match_dup 0)))
10718 (clobber (reg:CC CC_REGNUM))]
10719 "TARGET_ARM && reload_completed"
10720 [(set (match_dup 5) (match_dup 6))
10721 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10722 (set (match_dup 0) (match_dup 4)))]
10723 "
10724 {
10725 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10726 operands[2], operands[3]);
10727
10728 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10729 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10730 }"
10731 )
10732
10733 (define_split
10734 [(set (match_operand:SI 0 "s_register_operand" "")
10735 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10736 [(match_operand 2 "" "") (match_operand 3 "" "")])
10737 (match_operand 4 "" "")
10738 (match_operand 5 "" "")))
10739 (clobber (reg:CC CC_REGNUM))]
10740 "TARGET_ARM && reload_completed"
10741 [(set (match_dup 6) (match_dup 7))
10742 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10743 (set (match_dup 0) (match_dup 4)))
10744 (cond_exec (match_dup 8)
10745 (set (match_dup 0) (match_dup 5)))]
10746 "
10747 {
10748 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10749 operands[2], operands[3]);
10750 enum rtx_code rc = GET_CODE (operands[1]);
10751
10752 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10753 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10754 if (mode == CCFPmode || mode == CCFPEmode)
10755 rc = reverse_condition_maybe_unordered (rc);
10756 else
10757 rc = reverse_condition (rc);
10758
10759 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10760 }"
10761 )
10762
10763 (define_split
10764 [(set (match_operand:SI 0 "s_register_operand" "")
10765 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10766 [(match_operand:SI 2 "s_register_operand" "")
10767 (match_operand:SI 3 "arm_add_operand" "")])
10768 (match_operand:SI 4 "arm_rhs_operand" "")
10769 (not:SI
10770 (match_operand:SI 5 "s_register_operand" ""))))
10771 (clobber (reg:CC CC_REGNUM))]
10772 "TARGET_ARM && reload_completed"
10773 [(set (match_dup 6) (match_dup 7))
10774 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10775 (set (match_dup 0) (match_dup 4)))
10776 (cond_exec (match_dup 8)
10777 (set (match_dup 0) (not:SI (match_dup 5))))]
10778 "
10779 {
10780 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10781 operands[2], operands[3]);
10782 enum rtx_code rc = GET_CODE (operands[1]);
10783
10784 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10785 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10786 if (mode == CCFPmode || mode == CCFPEmode)
10787 rc = reverse_condition_maybe_unordered (rc);
10788 else
10789 rc = reverse_condition (rc);
10790
10791 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10792 }"
10793 )
10794
10795 (define_insn "*cond_move_not"
10796 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10797 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10798 [(match_operand 3 "cc_register" "") (const_int 0)])
10799 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10800 (not:SI
10801 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10802 "TARGET_ARM"
10803 "@
10804 mvn%D4\\t%0, %2
10805 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10806 [(set_attr "conds" "use")
10807 (set_attr "length" "4,8")]
10808 )
10809
10810 ;; The next two patterns occur when an AND operation is followed by a
10811 ;; scc insn sequence
10812
10813 (define_insn "*sign_extract_onebit"
10814 [(set (match_operand:SI 0 "s_register_operand" "=r")
10815 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10816 (const_int 1)
10817 (match_operand:SI 2 "const_int_operand" "n")))
10818 (clobber (reg:CC CC_REGNUM))]
10819 "TARGET_ARM"
10820 "*
10821 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10822 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10823 return \"mvnne\\t%0, #0\";
10824 "
10825 [(set_attr "conds" "clob")
10826 (set_attr "length" "8")]
10827 )
10828
10829 (define_insn "*not_signextract_onebit"
10830 [(set (match_operand:SI 0 "s_register_operand" "=r")
10831 (not:SI
10832 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10833 (const_int 1)
10834 (match_operand:SI 2 "const_int_operand" "n"))))
10835 (clobber (reg:CC CC_REGNUM))]
10836 "TARGET_ARM"
10837 "*
10838 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10839 output_asm_insn (\"tst\\t%1, %2\", operands);
10840 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10841 return \"movne\\t%0, #0\";
10842 "
10843 [(set_attr "conds" "clob")
10844 (set_attr "length" "12")]
10845 )
10846 ;; ??? The above patterns need auditing for Thumb-2
10847
10848 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10849 ;; expressions. For simplicity, the first register is also in the unspec
10850 ;; part.
10851 (define_insn "*push_multi"
10852 [(match_parallel 2 "multi_register_push"
10853 [(set (match_operand:BLK 0 "memory_operand" "=m")
10854 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10855 UNSPEC_PUSH_MULT))])]
10856 "TARGET_32BIT"
10857 "*
10858 {
10859 int num_saves = XVECLEN (operands[2], 0);
10860
10861 /* For the StrongARM at least it is faster to
10862 use STR to store only a single register.
10863 In Thumb mode always use push, and the assembler will pick
10864 something appropriate. */
10865 if (num_saves == 1 && TARGET_ARM)
10866 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10867 else
10868 {
10869 int i;
10870 char pattern[100];
10871
10872 if (TARGET_ARM)
10873 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10874 else
10875 strcpy (pattern, \"push\\t{%1\");
10876
10877 for (i = 1; i < num_saves; i++)
10878 {
10879 strcat (pattern, \", %|\");
10880 strcat (pattern,
10881 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10882 }
10883
10884 strcat (pattern, \"}\");
10885 output_asm_insn (pattern, operands);
10886 }
10887
10888 return \"\";
10889 }"
10890 [(set_attr "type" "store4")]
10891 )
10892
10893 (define_insn "stack_tie"
10894 [(set (mem:BLK (scratch))
10895 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10896 (match_operand:SI 1 "s_register_operand" "rk")]
10897 UNSPEC_PRLG_STK))]
10898 ""
10899 ""
10900 [(set_attr "length" "0")]
10901 )
10902
10903 ;; Similarly for the floating point registers
10904 (define_insn "*push_fp_multi"
10905 [(match_parallel 2 "multi_register_push"
10906 [(set (match_operand:BLK 0 "memory_operand" "=m")
10907 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10908 UNSPEC_PUSH_MULT))])]
10909 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10910 "*
10911 {
10912 char pattern[100];
10913
10914 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10915 output_asm_insn (pattern, operands);
10916 return \"\";
10917 }"
10918 [(set_attr "type" "f_store")]
10919 )
10920
10921 ;; Special patterns for dealing with the constant pool
10922
10923 (define_insn "align_4"
10924 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10925 "TARGET_EITHER"
10926 "*
10927 assemble_align (32);
10928 return \"\";
10929 "
10930 )
10931
10932 (define_insn "align_8"
10933 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10934 "TARGET_EITHER"
10935 "*
10936 assemble_align (64);
10937 return \"\";
10938 "
10939 )
10940
10941 (define_insn "consttable_end"
10942 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10943 "TARGET_EITHER"
10944 "*
10945 making_const_table = FALSE;
10946 return \"\";
10947 "
10948 )
10949
10950 (define_insn "consttable_1"
10951 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10952 "TARGET_THUMB1"
10953 "*
10954 making_const_table = TRUE;
10955 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10956 assemble_zeros (3);
10957 return \"\";
10958 "
10959 [(set_attr "length" "4")]
10960 )
10961
10962 (define_insn "consttable_2"
10963 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10964 "TARGET_THUMB1"
10965 "*
10966 making_const_table = TRUE;
10967 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10968 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10969 assemble_zeros (2);
10970 return \"\";
10971 "
10972 [(set_attr "length" "4")]
10973 )
10974
10975 (define_insn "consttable_4"
10976 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10977 "TARGET_EITHER"
10978 "*
10979 {
10980 rtx x = operands[0];
10981 making_const_table = TRUE;
10982 switch (GET_MODE_CLASS (GET_MODE (x)))
10983 {
10984 case MODE_FLOAT:
10985 if (GET_MODE (x) == HFmode)
10986 arm_emit_fp16_const (x);
10987 else
10988 {
10989 REAL_VALUE_TYPE r;
10990 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10991 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10992 }
10993 break;
10994 default:
10995 /* XXX: Sometimes gcc does something really dumb and ends up with
10996 a HIGH in a constant pool entry, usually because it's trying to
10997 load into a VFP register. We know this will always be used in
10998 combination with a LO_SUM which ignores the high bits, so just
10999 strip off the HIGH. */
11000 if (GET_CODE (x) == HIGH)
11001 x = XEXP (x, 0);
11002 assemble_integer (x, 4, BITS_PER_WORD, 1);
11003 mark_symbol_refs_as_used (x);
11004 break;
11005 }
11006 return \"\";
11007 }"
11008 [(set_attr "length" "4")]
11009 )
11010
11011 (define_insn "consttable_8"
11012 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11013 "TARGET_EITHER"
11014 "*
11015 {
11016 making_const_table = TRUE;
11017 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11018 {
11019 case MODE_FLOAT:
11020 {
11021 REAL_VALUE_TYPE r;
11022 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11023 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11024 break;
11025 }
11026 default:
11027 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11028 break;
11029 }
11030 return \"\";
11031 }"
11032 [(set_attr "length" "8")]
11033 )
11034
11035 (define_insn "consttable_16"
11036 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11037 "TARGET_EITHER"
11038 "*
11039 {
11040 making_const_table = TRUE;
11041 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11042 {
11043 case MODE_FLOAT:
11044 {
11045 REAL_VALUE_TYPE r;
11046 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11047 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11048 break;
11049 }
11050 default:
11051 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11052 break;
11053 }
11054 return \"\";
11055 }"
11056 [(set_attr "length" "16")]
11057 )
11058
11059 ;; Miscellaneous Thumb patterns
11060
11061 (define_expand "tablejump"
11062 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11063 (use (label_ref (match_operand 1 "" "")))])]
11064 "TARGET_THUMB1"
11065 "
11066 if (flag_pic)
11067 {
11068 /* Hopefully, CSE will eliminate this copy. */
11069 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11070 rtx reg2 = gen_reg_rtx (SImode);
11071
11072 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11073 operands[0] = reg2;
11074 }
11075 "
11076 )
11077
11078 ;; NB never uses BX.
11079 (define_insn "*thumb1_tablejump"
11080 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11081 (use (label_ref (match_operand 1 "" "")))]
11082 "TARGET_THUMB1"
11083 "mov\\t%|pc, %0"
11084 [(set_attr "length" "2")]
11085 )
11086
11087 ;; V5 Instructions,
11088
11089 (define_insn "clzsi2"
11090 [(set (match_operand:SI 0 "s_register_operand" "=r")
11091 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11092 "TARGET_32BIT && arm_arch5"
11093 "clz%?\\t%0, %1"
11094 [(set_attr "predicable" "yes")
11095 (set_attr "insn" "clz")])
11096
11097 (define_insn "rbitsi2"
11098 [(set (match_operand:SI 0 "s_register_operand" "=r")
11099 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11100 "TARGET_32BIT && arm_arch_thumb2"
11101 "rbit%?\\t%0, %1"
11102 [(set_attr "predicable" "yes")
11103 (set_attr "insn" "clz")])
11104
11105 (define_expand "ctzsi2"
11106 [(set (match_operand:SI 0 "s_register_operand" "")
11107 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
11108 "TARGET_32BIT && arm_arch_thumb2"
11109 "
11110 {
11111 rtx tmp = gen_reg_rtx (SImode);
11112 emit_insn (gen_rbitsi2 (tmp, operands[1]));
11113 emit_insn (gen_clzsi2 (operands[0], tmp));
11114 }
11115 DONE;
11116 "
11117 )
11118
11119 ;; V5E instructions.
11120
11121 (define_insn "prefetch"
11122 [(prefetch (match_operand:SI 0 "address_operand" "p")
11123 (match_operand:SI 1 "" "")
11124 (match_operand:SI 2 "" ""))]
11125 "TARGET_32BIT && arm_arch5e"
11126 "pld\\t%a0")
11127
11128 ;; General predication pattern
11129
11130 (define_cond_exec
11131 [(match_operator 0 "arm_comparison_operator"
11132 [(match_operand 1 "cc_register" "")
11133 (const_int 0)])]
11134 "TARGET_32BIT"
11135 ""
11136 )
11137
11138 (define_insn "prologue_use"
11139 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11140 ""
11141 "%@ %0 needed for prologue"
11142 [(set_attr "length" "0")]
11143 )
11144
11145
11146 ;; Patterns for exception handling
11147
11148 (define_expand "eh_return"
11149 [(use (match_operand 0 "general_operand" ""))]
11150 "TARGET_EITHER"
11151 "
11152 {
11153 if (TARGET_32BIT)
11154 emit_insn (gen_arm_eh_return (operands[0]));
11155 else
11156 emit_insn (gen_thumb_eh_return (operands[0]));
11157 DONE;
11158 }"
11159 )
11160
11161 ;; We can't expand this before we know where the link register is stored.
11162 (define_insn_and_split "arm_eh_return"
11163 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11164 VUNSPEC_EH_RETURN)
11165 (clobber (match_scratch:SI 1 "=&r"))]
11166 "TARGET_ARM"
11167 "#"
11168 "&& reload_completed"
11169 [(const_int 0)]
11170 "
11171 {
11172 arm_set_return_address (operands[0], operands[1]);
11173 DONE;
11174 }"
11175 )
11176
11177 (define_insn_and_split "thumb_eh_return"
11178 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11179 VUNSPEC_EH_RETURN)
11180 (clobber (match_scratch:SI 1 "=&l"))]
11181 "TARGET_THUMB1"
11182 "#"
11183 "&& reload_completed"
11184 [(const_int 0)]
11185 "
11186 {
11187 thumb_set_return_address (operands[0], operands[1]);
11188 DONE;
11189 }"
11190 )
11191
11192 \f
11193 ;; TLS support
11194
11195 (define_insn "load_tp_hard"
11196 [(set (match_operand:SI 0 "register_operand" "=r")
11197 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11198 "TARGET_HARD_TP"
11199 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11200 [(set_attr "predicable" "yes")]
11201 )
11202
11203 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11204 (define_insn "load_tp_soft"
11205 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11206 (clobber (reg:SI LR_REGNUM))
11207 (clobber (reg:SI IP_REGNUM))
11208 (clobber (reg:CC CC_REGNUM))]
11209 "TARGET_SOFT_TP"
11210 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11211 [(set_attr "conds" "clob")]
11212 )
11213
11214 (define_insn "*arm_movtas_ze"
11215 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11216 (const_int 16)
11217 (const_int 16))
11218 (match_operand:SI 1 "const_int_operand" ""))]
11219 "TARGET_32BIT"
11220 "movt%?\t%0, %c1"
11221 [(set_attr "predicable" "yes")
11222 (set_attr "length" "4")]
11223 )
11224
11225 (define_insn "arm_rev"
11226 [(set (match_operand:SI 0 "s_register_operand" "=r")
11227 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11228 "TARGET_EITHER && arm_arch6"
11229 "rev\t%0, %1"
11230 [(set (attr "length")
11231 (if_then_else (eq_attr "is_thumb" "yes")
11232 (const_int 2)
11233 (const_int 4)))]
11234 )
11235
11236 (define_expand "arm_legacy_rev"
11237 [(set (match_operand:SI 2 "s_register_operand" "")
11238 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11239 (const_int 16))
11240 (match_dup 1)))
11241 (set (match_dup 2)
11242 (lshiftrt:SI (match_dup 2)
11243 (const_int 8)))
11244 (set (match_operand:SI 3 "s_register_operand" "")
11245 (rotatert:SI (match_dup 1)
11246 (const_int 8)))
11247 (set (match_dup 2)
11248 (and:SI (match_dup 2)
11249 (const_int -65281)))
11250 (set (match_operand:SI 0 "s_register_operand" "")
11251 (xor:SI (match_dup 3)
11252 (match_dup 2)))]
11253 "TARGET_32BIT"
11254 ""
11255 )
11256
11257 ;; Reuse temporaries to keep register pressure down.
11258 (define_expand "thumb_legacy_rev"
11259 [(set (match_operand:SI 2 "s_register_operand" "")
11260 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11261 (const_int 24)))
11262 (set (match_operand:SI 3 "s_register_operand" "")
11263 (lshiftrt:SI (match_dup 1)
11264 (const_int 24)))
11265 (set (match_dup 3)
11266 (ior:SI (match_dup 3)
11267 (match_dup 2)))
11268 (set (match_operand:SI 4 "s_register_operand" "")
11269 (const_int 16))
11270 (set (match_operand:SI 5 "s_register_operand" "")
11271 (rotatert:SI (match_dup 1)
11272 (match_dup 4)))
11273 (set (match_dup 2)
11274 (ashift:SI (match_dup 5)
11275 (const_int 24)))
11276 (set (match_dup 5)
11277 (lshiftrt:SI (match_dup 5)
11278 (const_int 24)))
11279 (set (match_dup 5)
11280 (ior:SI (match_dup 5)
11281 (match_dup 2)))
11282 (set (match_dup 5)
11283 (rotatert:SI (match_dup 5)
11284 (match_dup 4)))
11285 (set (match_operand:SI 0 "s_register_operand" "")
11286 (ior:SI (match_dup 5)
11287 (match_dup 3)))]
11288 "TARGET_THUMB"
11289 ""
11290 )
11291
11292 (define_expand "bswapsi2"
11293 [(set (match_operand:SI 0 "s_register_operand" "=r")
11294 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11295 "TARGET_EITHER"
11296 "
11297 if (!arm_arch6)
11298 {
11299 if (!optimize_size)
11300 {
11301 rtx op2 = gen_reg_rtx (SImode);
11302 rtx op3 = gen_reg_rtx (SImode);
11303
11304 if (TARGET_THUMB)
11305 {
11306 rtx op4 = gen_reg_rtx (SImode);
11307 rtx op5 = gen_reg_rtx (SImode);
11308
11309 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11310 op2, op3, op4, op5));
11311 }
11312 else
11313 {
11314 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11315 op2, op3));
11316 }
11317
11318 DONE;
11319 }
11320 else
11321 FAIL;
11322 }
11323 "
11324 )
11325
11326 ;; Load the FPA co-processor patterns
11327 (include "fpa.md")
11328 ;; Load the Maverick co-processor patterns
11329 (include "cirrus.md")
11330 ;; Vector bits common to IWMMXT and Neon
11331 (include "vec-common.md")
11332 ;; Load the Intel Wireless Multimedia Extension patterns
11333 (include "iwmmxt.md")
11334 ;; Load the VFP co-processor patterns
11335 (include "vfp.md")
11336 ;; Thumb-2 patterns
11337 (include "thumb2.md")
11338 ;; Neon patterns
11339 (include "neon.md")
11340