1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
45 ;;---------------------------------------------------------------------------
48 ;; Register numbers -- All machine registers should be defined here
50 [(R0_REGNUM 0) ; First CORE register
51 (R1_REGNUM 1) ; Second CORE register
52 (IP_REGNUM 12) ; Scratch register
53 (SP_REGNUM 13) ; Stack pointer
54 (LR_REGNUM 14) ; Return address register
55 (PC_REGNUM 15) ; Program counter
56 (LAST_ARM_REGNUM 15) ;
57 (CC_REGNUM 100) ; Condition code pseudo register
58 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
61 ;; 3rd operand to select_dominance_cc_mode
68 ;; conditional compare combination
79 ;;---------------------------------------------------------------------------
82 ;; Processor type. This is created automatically from arm-cores.def.
83 (include "arm-tune.md")
85 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
86 ; generating ARM code. This is used to control the length of some insn
87 ; patterns that share the same RTL in both ARM and Thumb code.
88 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
90 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
91 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
93 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
94 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
96 ; We use this attribute to disable alternatives that can produce 32-bit
97 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
98 ; that contain 32-bit instructions.
99 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
101 ; This attribute is used to disable a predicated alternative when we have
103 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
105 ;; Operand number of an input operand that is shifted. Zero if the
106 ;; given instruction does not shift one of its input operands.
107 (define_attr "shift" "" (const_int 0))
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
115 (define_attr "predicated" "yes,no" (const_string "no"))
117 ; LENGTH of an instruction (in bytes)
118 (define_attr "length" ""
121 ; The architecture which supports the instruction (or alternative).
122 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
123 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
124 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
125 ; arm_arch6. This attribute is used to compute attribute "enabled",
126 ; use type "any" to enable an alternative in all cases.
127 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
128 (const_string "any"))
130 (define_attr "arch_enabled" "no,yes"
131 (cond [(eq_attr "arch" "any")
134 (and (eq_attr "arch" "a")
135 (match_test "TARGET_ARM"))
138 (and (eq_attr "arch" "t")
139 (match_test "TARGET_THUMB"))
142 (and (eq_attr "arch" "t1")
143 (match_test "TARGET_THUMB1"))
146 (and (eq_attr "arch" "t2")
147 (match_test "TARGET_THUMB2"))
150 (and (eq_attr "arch" "32")
151 (match_test "TARGET_32BIT"))
154 (and (eq_attr "arch" "v6")
155 (match_test "TARGET_32BIT && arm_arch6"))
158 (and (eq_attr "arch" "nov6")
159 (match_test "TARGET_32BIT && !arm_arch6"))
162 (and (eq_attr "arch" "avoid_neon_for_64bits")
163 (match_test "TARGET_NEON")
164 (not (match_test "TARGET_PREFER_NEON_64BITS")))
167 (and (eq_attr "arch" "neon_for_64bits")
168 (match_test "TARGET_NEON")
169 (match_test "TARGET_PREFER_NEON_64BITS"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")]
176 (const_string "no")))
178 (define_attr "opt" "any,speed,size"
179 (const_string "any"))
181 (define_attr "opt_enabled" "no,yes"
182 (cond [(eq_attr "opt" "any")
185 (and (eq_attr "opt" "speed")
186 (match_test "optimize_function_for_speed_p (cfun)"))
189 (and (eq_attr "opt" "size")
190 (match_test "optimize_function_for_size_p (cfun)"))
191 (const_string "yes")]
192 (const_string "no")))
194 ; Allows an insn to disable certain alternatives for reasons other than
196 (define_attr "insn_enabled" "no,yes"
197 (const_string "yes"))
199 ; Enable all alternatives that are both arch_enabled and insn_enabled.
200 (define_attr "enabled" "no,yes"
201 (cond [(eq_attr "insn_enabled" "no")
204 (and (eq_attr "predicable_short_it" "no")
205 (and (eq_attr "predicated" "yes")
206 (match_test "arm_restrict_it")))
209 (and (eq_attr "enabled_for_depr_it" "no")
210 (match_test "arm_restrict_it"))
213 (eq_attr "arch_enabled" "no")
216 (eq_attr "opt_enabled" "no")
218 (const_string "yes")))
220 ; POOL_RANGE is how far away from a constant pool entry that this insn
221 ; can be placed. If the distance is zero, then this insn will never
222 ; reference the pool.
223 ; Note that for Thumb constant pools the PC value is rounded down to the
224 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
225 ; Thumb insns) should be set to <max_range> - 2.
226 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
227 ; before its address. It is set to <max_range> - (8 + <data_size>).
228 (define_attr "arm_pool_range" "" (const_int 0))
229 (define_attr "thumb2_pool_range" "" (const_int 0))
230 (define_attr "arm_neg_pool_range" "" (const_int 0))
231 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
233 (define_attr "pool_range" ""
234 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
235 (attr "arm_pool_range")))
236 (define_attr "neg_pool_range" ""
237 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
238 (attr "arm_neg_pool_range")))
240 ; An assembler sequence may clobber the condition codes without us knowing.
241 ; If such an insn references the pool, then we have no way of knowing how,
242 ; so use the most conservative value for pool_range.
243 (define_asm_attributes
244 [(set_attr "conds" "clob")
245 (set_attr "length" "4")
246 (set_attr "pool_range" "250")])
248 ; TYPE attribute is used to classify instructions for use in scheduling.
250 ; Instruction classification:
252 ; arlo_imm any arithmetic or logical instruction that doesn't have
253 ; a shifted operand and has an immediate operand. This
254 ; excludes MOV, MVN and RSB(S) immediate.
255 ; arlo_reg any arithmetic or logical instruction that doesn't have
256 ; a shifted or an immediate operand. This excludes
257 ; MOV and MVN but includes MOVT. This is also the default.
258 ; arlo_shift any arithmetic or logical instruction that has a source
259 ; operand shifted by a constant. This excludes
261 ; arlo_shift_reg as arlo_shift, with the shift amount specified in a
263 ; block blockage insn, this blocks all functional units.
265 ; call subroutine call.
266 ; clz count leading zeros (CLZ).
267 ; extend extend instruction (SXTB, SXTH, UXTB, UXTH).
268 ; f_2_r transfer from float to core (no memory needed).
269 ; f_cvt conversion between float and integral.
270 ; f_flag transfer of co-processor flags to the CPSR.
271 ; f_load[d,s] double/single load from memory. Used for VFP unit.
272 ; f_minmax[d,s] double/single floating point minimum/maximum.
273 ; f_rint[d,s] double/single floating point rount to integral.
274 ; f_sel[d,s] double/single floating byte select.
275 ; f_store[d,s] double/single store to memory. Used for VFP unit.
276 ; fadd[d,s] double/single floating-point scalar addition.
277 ; fcmp[d,s] double/single floating-point compare.
278 ; fconst[d,s] double/single load immediate.
279 ; fcpys single precision floating point cpy.
280 ; fdiv[d,s] double/single precision floating point division.
281 ; ffarith[d,s] double/single floating point abs/neg/cpy.
282 ; ffma[d,s] double/single floating point fused multiply-accumulate.
283 ; float floating point arithmetic operation.
284 ; fmac[d,s] double/single floating point multiply-accumulate.
285 ; fmul[d,s] double/single floating point multiply.
286 ; load_byte load byte(s) from memory to arm registers.
287 ; load1 load 1 word from memory to arm registers.
288 ; load2 load 2 words from memory to arm registers.
289 ; load3 load 3 words from memory to arm registers.
290 ; load4 load 4 words from memory to arm registers.
291 ; mla integer multiply accumulate.
292 ; mlas integer multiply accumulate, flag setting.
293 ; mov_imm simple MOV instruction that moves an immediate to
294 ; register. This includes MOVW, but not MOVT.
295 ; mov_reg simple MOV instruction that moves a register to another
296 ; register. This includes MOVW, but not MOVT.
297 ; mov_shift simple MOV instruction, shifted operand by a constant.
298 ; mov_shift_reg simple MOV instruction, shifted operand by a register.
299 ; mul integer multiply.
300 ; muls integer multiply, flag setting.
301 ; mvn_imm inverting move instruction, immediate.
302 ; mvn_reg inverting move instruction, register.
303 ; mvn_shift inverting move instruction, shifted operand by a constant.
304 ; mvn_shift_reg inverting move instruction, shifted operand by a register.
305 ; r_2_f transfer from core to float.
306 ; sdiv signed division.
307 ; shift simple shift operation (LSL, LSR, ASR, ROR) with an
309 ; shift_reg simple shift by a register.
310 ; smlad signed multiply accumulate dual.
311 ; smladx signed multiply accumulate dual reverse.
312 ; smlal signed multiply accumulate long.
313 ; smlald signed multiply accumulate long dual.
314 ; smlals signed multiply accumulate long, flag setting.
315 ; smlalxy signed multiply accumulate, 16x16-bit, 64-bit accumulate.
316 ; smlawx signed multiply accumulate, 32x16-bit, 32-bit accumulate.
317 ; smlawy signed multiply accumulate wide, 32x16-bit,
319 ; smlaxy signed multiply accumulate, 16x16-bit, 32-bit accumulate.
320 ; smlsd signed multiply subtract dual.
321 ; smlsdx signed multiply subtract dual reverse.
322 ; smlsld signed multiply subtract long dual.
323 ; smmla signed most significant word multiply accumulate.
324 ; smmul signed most significant word multiply.
325 ; smmulr signed most significant word multiply, rounded.
326 ; smuad signed dual multiply add.
327 ; smuadx signed dual multiply add reverse.
328 ; smull signed multiply long.
329 ; smulls signed multiply long, flag setting.
330 ; smulwy signed multiply wide, 32x16-bit, 32-bit accumulate.
331 ; smulxy signed multiply, 16x16-bit, 32-bit accumulate.
332 ; smusd signed dual multiply subtract.
333 ; smusdx signed dual multiply subtract reverse.
334 ; store1 store 1 word to memory from arm registers.
335 ; store2 store 2 words to memory from arm registers.
336 ; store3 store 3 words to memory from arm registers.
337 ; store4 store 4 (or more) words to memory from arm registers.
338 ; udiv unsigned division.
339 ; umaal unsigned multiply accumulate accumulate long.
340 ; umlal unsigned multiply accumulate long.
341 ; umlals unsigned multiply accumulate long, flag setting.
342 ; umull unsigned multiply long.
343 ; umulls unsigned multiply long, flag setting.
345 ; The classification below is for instructions used by the Wireless MMX
346 ; Technology. Each attribute value is used to classify an instruction of the
347 ; same name or family.
563 (const_string "arlo_reg"))
565 ; Is this an (integer side) multiply with a 32-bit (or smaller) result?
566 (define_attr "mul32" "no,yes"
569 "smulxy,smlaxy,smulwy,smlawx,mul,muls,mla,mlas,smlawy,smuad,smuadx,\
570 smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,smlald,smlsld")
572 (const_string "no")))
574 ; Is this an (integer side) multiply with a 64-bit result?
575 (define_attr "mul64" "no,yes"
578 "smlalxy,umull,umulls,umaal,umlal,umlals,smull,smulls,smlal,smlals")
580 (const_string "no")))
582 ; Load scheduling, set from the arm_ld_sched variable
583 ; initialized by arm_option_override()
584 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
586 ;; Classification of NEON instructions for scheduling purposes.
587 (define_attr "neon_type"
598 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
599 neon_mul_qqq_8_16_32_ddd_32,\
600 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
601 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
603 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
604 neon_mla_qqq_32_qqd_32_scalar,\
605 neon_mul_ddd_16_scalar_32_16_long_scalar,\
606 neon_mul_qqd_32_scalar,\
607 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
612 neon_vqshl_vrshl_vqrshl_qqq,\
614 neon_fp_vadd_ddd_vabs_dd,\
615 neon_fp_vadd_qqq_vabs_qq,\
621 neon_fp_vmla_ddd_scalar,\
622 neon_fp_vmla_qqq_scalar,\
623 neon_fp_vrecps_vrsqrts_ddd,\
624 neon_fp_vrecps_vrsqrts_qqq,\
632 neon_vld2_2_regs_vld1_vld2_all_lanes,\
635 neon_vst1_1_2_regs_vst2_2_regs,\
637 neon_vst2_4_regs_vst3_vst4,\
639 neon_vld1_vld2_lane,\
640 neon_vld3_vld4_lane,\
641 neon_vst1_vst2_lane,\
642 neon_vst3_vst4_lane,\
643 neon_vld3_vld4_all_lanes,\
651 (const_string "none"))
653 ; condition codes: this one is used by final_prescan_insn to speed up
654 ; conditionalizing instructions. It saves having to scan the rtl to see if
655 ; it uses or alters the condition codes.
657 ; USE means that the condition codes are used by the insn in the process of
658 ; outputting code, this means (at present) that we can't use the insn in
661 ; SET means that the purpose of the insn is to set the condition codes in a
662 ; well defined manner.
664 ; CLOB means that the condition codes are altered in an undefined manner, if
665 ; they are altered at all
667 ; UNCONDITIONAL means the instruction can not be conditionally executed and
668 ; that the instruction does not use or alter the condition codes.
670 ; NOCOND means that the instruction does not use or alter the condition
671 ; codes but can be converted into a conditionally exectuted instruction.
673 (define_attr "conds" "use,set,clob,unconditional,nocond"
675 (ior (eq_attr "is_thumb1" "yes")
676 (eq_attr "type" "call"))
677 (const_string "clob")
678 (if_then_else (eq_attr "neon_type" "none")
679 (const_string "nocond")
680 (const_string "unconditional"))))
682 ; Predicable means that the insn can be conditionally executed based on
683 ; an automatically added predicate (additional patterns are generated by
684 ; gen...). We default to 'no' because no Thumb patterns match this rule
685 ; and not all ARM patterns do.
686 (define_attr "predicable" "no,yes" (const_string "no"))
688 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
689 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
690 ; suffer blockages enough to warrant modelling this (and it can adversely
691 ; affect the schedule).
692 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
694 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
695 ; to stall the processor. Used with model_wbuf above.
696 (define_attr "write_conflict" "no,yes"
697 (if_then_else (eq_attr "type"
700 (const_string "no")))
702 ; Classify the insns into those that take one cycle and those that take more
703 ; than one on the main cpu execution unit.
704 (define_attr "core_cycles" "single,multi"
705 (if_then_else (eq_attr "type"
706 "arlo_imm, arlo_reg,\
707 extend, shift, arlo_shift, float, fdivd, fdivs,\
708 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
709 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
710 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
711 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
712 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
713 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
714 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
715 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
716 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
717 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
718 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
719 (const_string "single")
720 (const_string "multi")))
722 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
723 ;; distant label. Only applicable to Thumb code.
724 (define_attr "far_jump" "yes,no" (const_string "no"))
727 ;; The number of machine instructions this pattern expands to.
728 ;; Used for Thumb-2 conditional execution.
729 (define_attr "ce_count" "" (const_int 1))
731 ;;---------------------------------------------------------------------------
734 (include "unspecs.md")
736 ;;---------------------------------------------------------------------------
739 (include "iterators.md")
741 ;;---------------------------------------------------------------------------
744 (include "predicates.md")
745 (include "constraints.md")
747 ;;---------------------------------------------------------------------------
748 ;; Pipeline descriptions
750 (define_attr "tune_cortexr4" "yes,no"
752 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
754 (const_string "no"))))
756 ;; True if the generic scheduling description should be used.
758 (define_attr "generic_sched" "yes,no"
760 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
761 (eq_attr "tune_cortexr4" "yes"))
763 (const_string "yes"))))
765 (define_attr "generic_vfp" "yes,no"
767 (and (eq_attr "fpu" "vfp")
768 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
769 (eq_attr "tune_cortexr4" "no"))
771 (const_string "no"))))
773 (include "marvell-f-iwmmxt.md")
774 (include "arm-generic.md")
775 (include "arm926ejs.md")
776 (include "arm1020e.md")
777 (include "arm1026ejs.md")
778 (include "arm1136jfs.md")
780 (include "fa606te.md")
781 (include "fa626te.md")
782 (include "fmp626.md")
783 (include "fa726te.md")
784 (include "cortex-a5.md")
785 (include "cortex-a7.md")
786 (include "cortex-a8.md")
787 (include "cortex-a9.md")
788 (include "cortex-a15.md")
789 (include "cortex-a53.md")
790 (include "cortex-r4.md")
791 (include "cortex-r4f.md")
792 (include "cortex-m4.md")
793 (include "cortex-m4-fpu.md")
795 (include "marvell-pj4.md")
798 ;;---------------------------------------------------------------------------
803 ;; Note: For DImode insns, there is normally no reason why operands should
804 ;; not be in the same register, what we don't want is for something being
805 ;; written to partially overlap something that is an input.
807 (define_expand "adddi3"
809 [(set (match_operand:DI 0 "s_register_operand" "")
810 (plus:DI (match_operand:DI 1 "s_register_operand" "")
811 (match_operand:DI 2 "arm_adddi_operand" "")))
812 (clobber (reg:CC CC_REGNUM))])]
817 if (!REG_P (operands[1]))
818 operands[1] = force_reg (DImode, operands[1]);
819 if (!REG_P (operands[2]))
820 operands[2] = force_reg (DImode, operands[2]);
825 (define_insn "*thumb1_adddi3"
826 [(set (match_operand:DI 0 "register_operand" "=l")
827 (plus:DI (match_operand:DI 1 "register_operand" "%0")
828 (match_operand:DI 2 "register_operand" "l")))
829 (clobber (reg:CC CC_REGNUM))
832 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
833 [(set_attr "length" "4")]
836 (define_insn_and_split "*arm_adddi3"
837 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
838 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
839 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
840 (clobber (reg:CC CC_REGNUM))]
841 "TARGET_32BIT && !TARGET_NEON"
843 "TARGET_32BIT && reload_completed
844 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
845 [(parallel [(set (reg:CC_C CC_REGNUM)
846 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
848 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
849 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
850 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
853 operands[3] = gen_highpart (SImode, operands[0]);
854 operands[0] = gen_lowpart (SImode, operands[0]);
855 operands[4] = gen_highpart (SImode, operands[1]);
856 operands[1] = gen_lowpart (SImode, operands[1]);
857 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
858 operands[2] = gen_lowpart (SImode, operands[2]);
860 [(set_attr "conds" "clob")
861 (set_attr "length" "8")]
864 (define_insn_and_split "*adddi_sesidi_di"
865 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
866 (plus:DI (sign_extend:DI
867 (match_operand:SI 2 "s_register_operand" "r,r"))
868 (match_operand:DI 1 "s_register_operand" "0,r")))
869 (clobber (reg:CC CC_REGNUM))]
872 "TARGET_32BIT && reload_completed"
873 [(parallel [(set (reg:CC_C CC_REGNUM)
874 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
876 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
877 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
880 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
883 operands[3] = gen_highpart (SImode, operands[0]);
884 operands[0] = gen_lowpart (SImode, operands[0]);
885 operands[4] = gen_highpart (SImode, operands[1]);
886 operands[1] = gen_lowpart (SImode, operands[1]);
887 operands[2] = gen_lowpart (SImode, operands[2]);
889 [(set_attr "conds" "clob")
890 (set_attr "length" "8")]
893 (define_insn_and_split "*adddi_zesidi_di"
894 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
895 (plus:DI (zero_extend:DI
896 (match_operand:SI 2 "s_register_operand" "r,r"))
897 (match_operand:DI 1 "s_register_operand" "0,r")))
898 (clobber (reg:CC CC_REGNUM))]
901 "TARGET_32BIT && reload_completed"
902 [(parallel [(set (reg:CC_C CC_REGNUM)
903 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
905 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
906 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
907 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
910 operands[3] = gen_highpart (SImode, operands[0]);
911 operands[0] = gen_lowpart (SImode, operands[0]);
912 operands[4] = gen_highpart (SImode, operands[1]);
913 operands[1] = gen_lowpart (SImode, operands[1]);
914 operands[2] = gen_lowpart (SImode, operands[2]);
916 [(set_attr "conds" "clob")
917 (set_attr "length" "8")]
920 (define_expand "addsi3"
921 [(set (match_operand:SI 0 "s_register_operand" "")
922 (plus:SI (match_operand:SI 1 "s_register_operand" "")
923 (match_operand:SI 2 "reg_or_int_operand" "")))]
926 if (TARGET_32BIT && CONST_INT_P (operands[2]))
928 arm_split_constant (PLUS, SImode, NULL_RTX,
929 INTVAL (operands[2]), operands[0], operands[1],
930 optimize && can_create_pseudo_p ());
936 ; If there is a scratch available, this will be faster than synthesizing the
939 [(match_scratch:SI 3 "r")
940 (set (match_operand:SI 0 "arm_general_register_operand" "")
941 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
942 (match_operand:SI 2 "const_int_operand" "")))]
944 !(const_ok_for_arm (INTVAL (operands[2]))
945 || const_ok_for_arm (-INTVAL (operands[2])))
946 && const_ok_for_arm (~INTVAL (operands[2]))"
947 [(set (match_dup 3) (match_dup 2))
948 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
952 ;; The r/r/k alternative is required when reloading the address
953 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
954 ;; put the duplicated register first, and not try the commutative version.
955 (define_insn_and_split "*arm_addsi3"
956 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,r ,k ,r ,k,k,r ,k ,r")
957 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,rk,k ,rk,k,r,rk,k ,rk")
958 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
973 subw%?\\t%0, %1, #%n2
974 subw%?\\t%0, %1, #%n2
977 && CONST_INT_P (operands[2])
978 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
979 && (reload_completed || !arm_eliminable_register (operands[1]))"
980 [(clobber (const_int 0))]
982 arm_split_constant (PLUS, SImode, curr_insn,
983 INTVAL (operands[2]), operands[0],
987 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
988 (set_attr "predicable" "yes")
989 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no")
990 (set_attr "arch" "t2,t2,t2,t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
991 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
992 (const_string "arlo_imm")
993 (const_string "arlo_reg")))
997 (define_insn_and_split "*thumb1_addsi3"
998 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
999 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
1000 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
1003 static const char * const asms[] =
1005 \"add\\t%0, %0, %2\",
1006 \"sub\\t%0, %0, #%n2\",
1007 \"add\\t%0, %1, %2\",
1008 \"add\\t%0, %0, %2\",
1009 \"add\\t%0, %0, %2\",
1010 \"add\\t%0, %1, %2\",
1011 \"add\\t%0, %1, %2\",
1016 if ((which_alternative == 2 || which_alternative == 6)
1017 && CONST_INT_P (operands[2])
1018 && INTVAL (operands[2]) < 0)
1019 return \"sub\\t%0, %1, #%n2\";
1020 return asms[which_alternative];
1022 "&& reload_completed && CONST_INT_P (operands[2])
1023 && ((operands[1] != stack_pointer_rtx
1024 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
1025 || (operands[1] == stack_pointer_rtx
1026 && INTVAL (operands[2]) > 1020))"
1027 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
1028 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
1030 HOST_WIDE_INT offset = INTVAL (operands[2]);
1031 if (operands[1] == stack_pointer_rtx)
1037 else if (offset < -255)
1040 operands[3] = GEN_INT (offset);
1041 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
1043 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
1046 ;; Reloading and elimination of the frame pointer can
1047 ;; sometimes cause this optimization to be missed.
1049 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1050 (match_operand:SI 1 "const_int_operand" ""))
1052 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
1054 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
1055 && (INTVAL (operands[1]) & 3) == 0"
1056 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
1060 (define_insn "addsi3_compare0"
1061 [(set (reg:CC_NOOV CC_REGNUM)
1063 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1064 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1066 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1067 (plus:SI (match_dup 1) (match_dup 2)))]
1071 sub%.\\t%0, %1, #%n2
1073 [(set_attr "conds" "set")
1074 (set_attr "type" "arlo_imm,arlo_imm,*")]
1077 (define_insn "*addsi3_compare0_scratch"
1078 [(set (reg:CC_NOOV CC_REGNUM)
1080 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1081 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1088 [(set_attr "conds" "set")
1089 (set_attr "predicable" "yes")
1090 (set_attr "type" "arlo_imm,arlo_imm,*")
1094 (define_insn "*compare_negsi_si"
1095 [(set (reg:CC_Z CC_REGNUM)
1097 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1098 (match_operand:SI 1 "s_register_operand" "l,r")))]
1101 [(set_attr "conds" "set")
1102 (set_attr "predicable" "yes")
1103 (set_attr "arch" "t2,*")
1104 (set_attr "length" "2,4")
1105 (set_attr "predicable_short_it" "yes,no")]
1108 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
1109 ;; addend is a constant.
1110 (define_insn "cmpsi2_addneg"
1111 [(set (reg:CC CC_REGNUM)
1113 (match_operand:SI 1 "s_register_operand" "r,r")
1114 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
1115 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1116 (plus:SI (match_dup 1)
1117 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
1118 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
1121 sub%.\\t%0, %1, #%n3"
1122 [(set_attr "conds" "set")]
1125 ;; Convert the sequence
1127 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1131 ;; bcs dest ((unsigned)rn >= 1)
1132 ;; similarly for the beq variant using bcc.
1133 ;; This is a common looping idiom (while (n--))
1135 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1136 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1138 (set (match_operand 2 "cc_register" "")
1139 (compare (match_dup 0) (const_int -1)))
1141 (if_then_else (match_operator 3 "equality_operator"
1142 [(match_dup 2) (const_int 0)])
1143 (match_operand 4 "" "")
1144 (match_operand 5 "" "")))]
1145 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1149 (match_dup 1) (const_int 1)))
1150 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1152 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1155 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1156 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1159 operands[2], const0_rtx);"
1162 ;; The next four insns work because they compare the result with one of
1163 ;; the operands, and we know that the use of the condition code is
1164 ;; either GEU or LTU, so we can use the carry flag from the addition
1165 ;; instead of doing the compare a second time.
1166 (define_insn "*addsi3_compare_op1"
1167 [(set (reg:CC_C CC_REGNUM)
1169 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1170 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1172 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1173 (plus:SI (match_dup 1) (match_dup 2)))]
1177 sub%.\\t%0, %1, #%n2
1179 [(set_attr "conds" "set")
1180 (set_attr "type" "arlo_imm,arlo_imm,*")]
1183 (define_insn "*addsi3_compare_op2"
1184 [(set (reg:CC_C CC_REGNUM)
1186 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1187 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1189 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1190 (plus:SI (match_dup 1) (match_dup 2)))]
1195 sub%.\\t%0, %1, #%n2"
1196 [(set_attr "conds" "set")
1197 (set_attr "type" "arlo_imm,arlo_imm,*")]
1200 (define_insn "*compare_addsi2_op0"
1201 [(set (reg:CC_C CC_REGNUM)
1203 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
1204 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
1213 [(set_attr "conds" "set")
1214 (set_attr "predicable" "yes")
1215 (set_attr "arch" "t2,t2,*,*,*")
1216 (set_attr "predicable_short_it" "yes,yes,no,no,no")
1217 (set_attr "length" "2,2,4,4,4")
1218 (set_attr "type" "arlo_imm,*,arlo_imm,arlo_imm,*")]
1221 (define_insn "*compare_addsi2_op1"
1222 [(set (reg:CC_C CC_REGNUM)
1224 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
1225 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
1234 [(set_attr "conds" "set")
1235 (set_attr "predicable" "yes")
1236 (set_attr "arch" "t2,t2,*,*,*")
1237 (set_attr "predicable_short_it" "yes,yes,no,no,no")
1238 (set_attr "length" "2,2,4,4,4")
1240 "arlo_imm,*,arlo_imm,arlo_imm,*")]
1243 (define_insn "*addsi3_carryin_<optab>"
1244 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1245 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1246 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1247 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1252 sbc%?\\t%0, %1, #%B2"
1253 [(set_attr "conds" "use")
1254 (set_attr "predicable" "yes")
1255 (set_attr "arch" "t2,*,*")
1256 (set_attr "length" "4")
1257 (set_attr "predicable_short_it" "yes,no,no")]
1260 (define_insn "*addsi3_carryin_alt2_<optab>"
1261 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1262 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1263 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1264 (match_operand:SI 2 "arm_rhs_operand" "l,rI,K")))]
1269 sbc%?\\t%0, %1, #%B2"
1270 [(set_attr "conds" "use")
1271 (set_attr "predicable" "yes")
1272 (set_attr "arch" "t2,*,*")
1273 (set_attr "length" "4")
1274 (set_attr "predicable_short_it" "yes,no,no")]
1277 (define_insn "*addsi3_carryin_shift_<optab>"
1278 [(set (match_operand:SI 0 "s_register_operand" "=r")
1280 (match_operator:SI 2 "shift_operator"
1281 [(match_operand:SI 3 "s_register_operand" "r")
1282 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1283 (match_operand:SI 1 "s_register_operand" "r"))
1284 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1286 "adc%?\\t%0, %1, %3%S2"
1287 [(set_attr "conds" "use")
1288 (set_attr "predicable" "yes")
1289 (set_attr "predicable_short_it" "no")
1290 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1291 (const_string "arlo_shift")
1292 (const_string "arlo_shift_reg")))]
1295 (define_insn "*addsi3_carryin_clobercc_<optab>"
1296 [(set (match_operand:SI 0 "s_register_operand" "=r")
1297 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1298 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1299 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1300 (clobber (reg:CC CC_REGNUM))]
1302 "adc%.\\t%0, %1, %2"
1303 [(set_attr "conds" "set")]
1306 (define_insn "*subsi3_carryin"
1307 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1308 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1309 (match_operand:SI 2 "s_register_operand" "r,r"))
1310 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1315 [(set_attr "conds" "use")
1316 (set_attr "arch" "*,a")
1317 (set_attr "predicable" "yes")
1318 (set_attr "predicable_short_it" "no")]
1321 (define_insn "*subsi3_carryin_const"
1322 [(set (match_operand:SI 0 "s_register_operand" "=r")
1323 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1324 (match_operand:SI 2 "arm_not_operand" "K"))
1325 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1327 "sbc\\t%0, %1, #%B2"
1328 [(set_attr "conds" "use")]
1331 (define_insn "*subsi3_carryin_compare"
1332 [(set (reg:CC CC_REGNUM)
1333 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1334 (match_operand:SI 2 "s_register_operand" "r")))
1335 (set (match_operand:SI 0 "s_register_operand" "=r")
1336 (minus:SI (minus:SI (match_dup 1)
1338 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1341 [(set_attr "conds" "set")]
1344 (define_insn "*subsi3_carryin_compare_const"
1345 [(set (reg:CC CC_REGNUM)
1346 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1347 (match_operand:SI 2 "arm_not_operand" "K")))
1348 (set (match_operand:SI 0 "s_register_operand" "=r")
1349 (minus:SI (plus:SI (match_dup 1)
1351 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1353 "sbcs\\t%0, %1, #%B2"
1354 [(set_attr "conds" "set")]
1357 (define_insn "*subsi3_carryin_shift"
1358 [(set (match_operand:SI 0 "s_register_operand" "=r")
1360 (match_operand:SI 1 "s_register_operand" "r")
1361 (match_operator:SI 2 "shift_operator"
1362 [(match_operand:SI 3 "s_register_operand" "r")
1363 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1364 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1366 "sbc%?\\t%0, %1, %3%S2"
1367 [(set_attr "conds" "use")
1368 (set_attr "predicable" "yes")
1369 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1370 (const_string "arlo_shift")
1371 (const_string "arlo_shift_reg")))]
1374 (define_insn "*rsbsi3_carryin_shift"
1375 [(set (match_operand:SI 0 "s_register_operand" "=r")
1377 (match_operator:SI 2 "shift_operator"
1378 [(match_operand:SI 3 "s_register_operand" "r")
1379 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1380 (match_operand:SI 1 "s_register_operand" "r"))
1381 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1383 "rsc%?\\t%0, %1, %3%S2"
1384 [(set_attr "conds" "use")
1385 (set_attr "predicable" "yes")
1386 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1387 (const_string "arlo_shift")
1388 (const_string "arlo_shift_reg")))]
1391 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1393 [(set (match_operand:SI 0 "s_register_operand" "")
1394 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1395 (match_operand:SI 2 "s_register_operand" ""))
1397 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1399 [(set (match_dup 3) (match_dup 1))
1400 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1402 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1405 (define_expand "addsf3"
1406 [(set (match_operand:SF 0 "s_register_operand" "")
1407 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1408 (match_operand:SF 2 "s_register_operand" "")))]
1409 "TARGET_32BIT && TARGET_HARD_FLOAT"
1413 (define_expand "adddf3"
1414 [(set (match_operand:DF 0 "s_register_operand" "")
1415 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1416 (match_operand:DF 2 "s_register_operand" "")))]
1417 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1421 (define_expand "subdi3"
1423 [(set (match_operand:DI 0 "s_register_operand" "")
1424 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1425 (match_operand:DI 2 "s_register_operand" "")))
1426 (clobber (reg:CC CC_REGNUM))])]
1431 if (!REG_P (operands[1]))
1432 operands[1] = force_reg (DImode, operands[1]);
1433 if (!REG_P (operands[2]))
1434 operands[2] = force_reg (DImode, operands[2]);
1439 (define_insn_and_split "*arm_subdi3"
1440 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1441 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1442 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1443 (clobber (reg:CC CC_REGNUM))]
1444 "TARGET_32BIT && !TARGET_NEON"
1445 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1446 "&& reload_completed"
1447 [(parallel [(set (reg:CC CC_REGNUM)
1448 (compare:CC (match_dup 1) (match_dup 2)))
1449 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1450 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1451 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1453 operands[3] = gen_highpart (SImode, operands[0]);
1454 operands[0] = gen_lowpart (SImode, operands[0]);
1455 operands[4] = gen_highpart (SImode, operands[1]);
1456 operands[1] = gen_lowpart (SImode, operands[1]);
1457 operands[5] = gen_highpart (SImode, operands[2]);
1458 operands[2] = gen_lowpart (SImode, operands[2]);
1460 [(set_attr "conds" "clob")
1461 (set_attr "length" "8")]
1464 (define_insn "*thumb_subdi3"
1465 [(set (match_operand:DI 0 "register_operand" "=l")
1466 (minus:DI (match_operand:DI 1 "register_operand" "0")
1467 (match_operand:DI 2 "register_operand" "l")))
1468 (clobber (reg:CC CC_REGNUM))]
1470 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1471 [(set_attr "length" "4")]
1474 (define_insn_and_split "*subdi_di_zesidi"
1475 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1476 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1478 (match_operand:SI 2 "s_register_operand" "r,r"))))
1479 (clobber (reg:CC CC_REGNUM))]
1481 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1482 "&& reload_completed"
1483 [(parallel [(set (reg:CC CC_REGNUM)
1484 (compare:CC (match_dup 1) (match_dup 2)))
1485 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1486 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1487 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1489 operands[3] = gen_highpart (SImode, operands[0]);
1490 operands[0] = gen_lowpart (SImode, operands[0]);
1491 operands[4] = gen_highpart (SImode, operands[1]);
1492 operands[1] = gen_lowpart (SImode, operands[1]);
1493 operands[5] = GEN_INT (~0);
1495 [(set_attr "conds" "clob")
1496 (set_attr "length" "8")]
1499 (define_insn_and_split "*subdi_di_sesidi"
1500 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1501 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1503 (match_operand:SI 2 "s_register_operand" "r,r"))))
1504 (clobber (reg:CC CC_REGNUM))]
1506 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1507 "&& reload_completed"
1508 [(parallel [(set (reg:CC CC_REGNUM)
1509 (compare:CC (match_dup 1) (match_dup 2)))
1510 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1511 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1512 (ashiftrt:SI (match_dup 2)
1514 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1516 operands[3] = gen_highpart (SImode, operands[0]);
1517 operands[0] = gen_lowpart (SImode, operands[0]);
1518 operands[4] = gen_highpart (SImode, operands[1]);
1519 operands[1] = gen_lowpart (SImode, operands[1]);
1521 [(set_attr "conds" "clob")
1522 (set_attr "length" "8")]
1525 (define_insn_and_split "*subdi_zesidi_di"
1526 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1527 (minus:DI (zero_extend:DI
1528 (match_operand:SI 2 "s_register_operand" "r,r"))
1529 (match_operand:DI 1 "s_register_operand" "0,r")))
1530 (clobber (reg:CC CC_REGNUM))]
1532 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1534 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1535 "&& reload_completed"
1536 [(parallel [(set (reg:CC CC_REGNUM)
1537 (compare:CC (match_dup 2) (match_dup 1)))
1538 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1539 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1540 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1542 operands[3] = gen_highpart (SImode, operands[0]);
1543 operands[0] = gen_lowpart (SImode, operands[0]);
1544 operands[4] = gen_highpart (SImode, operands[1]);
1545 operands[1] = gen_lowpart (SImode, operands[1]);
1547 [(set_attr "conds" "clob")
1548 (set_attr "length" "8")]
1551 (define_insn_and_split "*subdi_sesidi_di"
1552 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1553 (minus:DI (sign_extend:DI
1554 (match_operand:SI 2 "s_register_operand" "r,r"))
1555 (match_operand:DI 1 "s_register_operand" "0,r")))
1556 (clobber (reg:CC CC_REGNUM))]
1558 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1560 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1561 "&& reload_completed"
1562 [(parallel [(set (reg:CC CC_REGNUM)
1563 (compare:CC (match_dup 2) (match_dup 1)))
1564 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1565 (set (match_dup 3) (minus:SI (minus:SI
1566 (ashiftrt:SI (match_dup 2)
1569 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1571 operands[3] = gen_highpart (SImode, operands[0]);
1572 operands[0] = gen_lowpart (SImode, operands[0]);
1573 operands[4] = gen_highpart (SImode, operands[1]);
1574 operands[1] = gen_lowpart (SImode, operands[1]);
1576 [(set_attr "conds" "clob")
1577 (set_attr "length" "8")]
1580 (define_insn_and_split "*subdi_zesidi_zesidi"
1581 [(set (match_operand:DI 0 "s_register_operand" "=r")
1582 (minus:DI (zero_extend:DI
1583 (match_operand:SI 1 "s_register_operand" "r"))
1585 (match_operand:SI 2 "s_register_operand" "r"))))
1586 (clobber (reg:CC CC_REGNUM))]
1588 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1589 "&& reload_completed"
1590 [(parallel [(set (reg:CC CC_REGNUM)
1591 (compare:CC (match_dup 1) (match_dup 2)))
1592 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1593 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1594 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1596 operands[3] = gen_highpart (SImode, operands[0]);
1597 operands[0] = gen_lowpart (SImode, operands[0]);
1599 [(set_attr "conds" "clob")
1600 (set_attr "length" "8")]
1603 (define_expand "subsi3"
1604 [(set (match_operand:SI 0 "s_register_operand" "")
1605 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1606 (match_operand:SI 2 "s_register_operand" "")))]
1609 if (CONST_INT_P (operands[1]))
1613 arm_split_constant (MINUS, SImode, NULL_RTX,
1614 INTVAL (operands[1]), operands[0],
1615 operands[2], optimize && can_create_pseudo_p ());
1618 else /* TARGET_THUMB1 */
1619 operands[1] = force_reg (SImode, operands[1]);
1624 (define_insn "thumb1_subsi3_insn"
1625 [(set (match_operand:SI 0 "register_operand" "=l")
1626 (minus:SI (match_operand:SI 1 "register_operand" "l")
1627 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1630 [(set_attr "length" "2")
1631 (set_attr "conds" "set")])
1633 ; ??? Check Thumb-2 split length
1634 (define_insn_and_split "*arm_subsi3_insn"
1635 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r ,r,r,rk,r")
1636 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,rI,r,r,k ,?n")
1637 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r ,I,r,r ,r")))]
1649 "&& (CONST_INT_P (operands[1])
1650 && !const_ok_for_arm (INTVAL (operands[1])))"
1651 [(clobber (const_int 0))]
1653 arm_split_constant (MINUS, SImode, curr_insn,
1654 INTVAL (operands[1]), operands[0], operands[2], 0);
1657 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1658 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1659 (set_attr "predicable" "yes")
1660 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1661 (set_attr "type" "*,*,*,*,arlo_imm,arlo_imm,*,*,arlo_imm")]
1665 [(match_scratch:SI 3 "r")
1666 (set (match_operand:SI 0 "arm_general_register_operand" "")
1667 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1668 (match_operand:SI 2 "arm_general_register_operand" "")))]
1670 && !const_ok_for_arm (INTVAL (operands[1]))
1671 && const_ok_for_arm (~INTVAL (operands[1]))"
1672 [(set (match_dup 3) (match_dup 1))
1673 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1677 (define_insn "*subsi3_compare0"
1678 [(set (reg:CC_NOOV CC_REGNUM)
1680 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1681 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1683 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1684 (minus:SI (match_dup 1) (match_dup 2)))]
1690 [(set_attr "conds" "set")
1691 (set_attr "type" "arlo_imm,*,*")]
1694 (define_insn "subsi3_compare"
1695 [(set (reg:CC CC_REGNUM)
1696 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1697 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1698 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1699 (minus:SI (match_dup 1) (match_dup 2)))]
1705 [(set_attr "conds" "set")
1706 (set_attr "type" "arlo_imm,*,*")]
1709 (define_expand "subsf3"
1710 [(set (match_operand:SF 0 "s_register_operand" "")
1711 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1712 (match_operand:SF 2 "s_register_operand" "")))]
1713 "TARGET_32BIT && TARGET_HARD_FLOAT"
1717 (define_expand "subdf3"
1718 [(set (match_operand:DF 0 "s_register_operand" "")
1719 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1720 (match_operand:DF 2 "s_register_operand" "")))]
1721 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1726 ;; Multiplication insns
1728 (define_expand "mulhi3"
1729 [(set (match_operand:HI 0 "s_register_operand" "")
1730 (mult:HI (match_operand:HI 1 "s_register_operand" "")
1731 (match_operand:HI 2 "s_register_operand" "")))]
1732 "TARGET_DSP_MULTIPLY"
1735 rtx result = gen_reg_rtx (SImode);
1736 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1737 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1742 (define_expand "mulsi3"
1743 [(set (match_operand:SI 0 "s_register_operand" "")
1744 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1745 (match_operand:SI 1 "s_register_operand" "")))]
1750 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1751 (define_insn "*arm_mulsi3"
1752 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1753 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1754 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1755 "TARGET_32BIT && !arm_arch6"
1756 "mul%?\\t%0, %2, %1"
1757 [(set_attr "type" "mul")
1758 (set_attr "predicable" "yes")]
1761 (define_insn "*arm_mulsi3_v6"
1762 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
1763 (mult:SI (match_operand:SI 1 "s_register_operand" "0,l,r")
1764 (match_operand:SI 2 "s_register_operand" "l,0,r")))]
1765 "TARGET_32BIT && arm_arch6"
1766 "mul%?\\t%0, %1, %2"
1767 [(set_attr "type" "mul")
1768 (set_attr "predicable" "yes")
1769 (set_attr "arch" "t2,t2,*")
1770 (set_attr "length" "4")
1771 (set_attr "predicable_short_it" "yes,yes,no")]
1774 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1775 ; 1 and 2; are the same, because reload will make operand 0 match
1776 ; operand 1 without realizing that this conflicts with operand 2. We fix
1777 ; this by adding another alternative to match this case, and then `reload'
1778 ; it ourselves. This alternative must come first.
1779 (define_insn "*thumb_mulsi3"
1780 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1781 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1782 (match_operand:SI 2 "register_operand" "l,l,l")))]
1783 "TARGET_THUMB1 && !arm_arch6"
1785 if (which_alternative < 2)
1786 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1788 return \"mul\\t%0, %2\";
1790 [(set_attr "length" "4,4,2")
1791 (set_attr "type" "muls")]
1794 (define_insn "*thumb_mulsi3_v6"
1795 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1796 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1797 (match_operand:SI 2 "register_operand" "l,0,0")))]
1798 "TARGET_THUMB1 && arm_arch6"
1803 [(set_attr "length" "2")
1804 (set_attr "type" "muls")]
1807 (define_insn "*mulsi3_compare0"
1808 [(set (reg:CC_NOOV CC_REGNUM)
1809 (compare:CC_NOOV (mult:SI
1810 (match_operand:SI 2 "s_register_operand" "r,r")
1811 (match_operand:SI 1 "s_register_operand" "%0,r"))
1813 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1814 (mult:SI (match_dup 2) (match_dup 1)))]
1815 "TARGET_ARM && !arm_arch6"
1816 "mul%.\\t%0, %2, %1"
1817 [(set_attr "conds" "set")
1818 (set_attr "type" "muls")]
1821 (define_insn "*mulsi3_compare0_v6"
1822 [(set (reg:CC_NOOV CC_REGNUM)
1823 (compare:CC_NOOV (mult:SI
1824 (match_operand:SI 2 "s_register_operand" "r")
1825 (match_operand:SI 1 "s_register_operand" "r"))
1827 (set (match_operand:SI 0 "s_register_operand" "=r")
1828 (mult:SI (match_dup 2) (match_dup 1)))]
1829 "TARGET_ARM && arm_arch6 && optimize_size"
1830 "mul%.\\t%0, %2, %1"
1831 [(set_attr "conds" "set")
1832 (set_attr "type" "muls")]
1835 (define_insn "*mulsi_compare0_scratch"
1836 [(set (reg:CC_NOOV CC_REGNUM)
1837 (compare:CC_NOOV (mult:SI
1838 (match_operand:SI 2 "s_register_operand" "r,r")
1839 (match_operand:SI 1 "s_register_operand" "%0,r"))
1841 (clobber (match_scratch:SI 0 "=&r,&r"))]
1842 "TARGET_ARM && !arm_arch6"
1843 "mul%.\\t%0, %2, %1"
1844 [(set_attr "conds" "set")
1845 (set_attr "type" "muls")]
1848 (define_insn "*mulsi_compare0_scratch_v6"
1849 [(set (reg:CC_NOOV CC_REGNUM)
1850 (compare:CC_NOOV (mult:SI
1851 (match_operand:SI 2 "s_register_operand" "r")
1852 (match_operand:SI 1 "s_register_operand" "r"))
1854 (clobber (match_scratch:SI 0 "=r"))]
1855 "TARGET_ARM && arm_arch6 && optimize_size"
1856 "mul%.\\t%0, %2, %1"
1857 [(set_attr "conds" "set")
1858 (set_attr "type" "muls")]
1861 ;; Unnamed templates to match MLA instruction.
1863 (define_insn "*mulsi3addsi"
1864 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1866 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1867 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1868 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1869 "TARGET_32BIT && !arm_arch6"
1870 "mla%?\\t%0, %2, %1, %3"
1871 [(set_attr "type" "mla")
1872 (set_attr "predicable" "yes")]
1875 (define_insn "*mulsi3addsi_v6"
1876 [(set (match_operand:SI 0 "s_register_operand" "=r")
1878 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1879 (match_operand:SI 1 "s_register_operand" "r"))
1880 (match_operand:SI 3 "s_register_operand" "r")))]
1881 "TARGET_32BIT && arm_arch6"
1882 "mla%?\\t%0, %2, %1, %3"
1883 [(set_attr "type" "mla")
1884 (set_attr "predicable" "yes")
1885 (set_attr "predicable_short_it" "no")]
1888 (define_insn "*mulsi3addsi_compare0"
1889 [(set (reg:CC_NOOV CC_REGNUM)
1892 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1893 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1894 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1896 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1897 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1899 "TARGET_ARM && arm_arch6"
1900 "mla%.\\t%0, %2, %1, %3"
1901 [(set_attr "conds" "set")
1902 (set_attr "type" "mlas")]
1905 (define_insn "*mulsi3addsi_compare0_v6"
1906 [(set (reg:CC_NOOV CC_REGNUM)
1909 (match_operand:SI 2 "s_register_operand" "r")
1910 (match_operand:SI 1 "s_register_operand" "r"))
1911 (match_operand:SI 3 "s_register_operand" "r"))
1913 (set (match_operand:SI 0 "s_register_operand" "=r")
1914 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1916 "TARGET_ARM && arm_arch6 && optimize_size"
1917 "mla%.\\t%0, %2, %1, %3"
1918 [(set_attr "conds" "set")
1919 (set_attr "type" "mlas")]
1922 (define_insn "*mulsi3addsi_compare0_scratch"
1923 [(set (reg:CC_NOOV CC_REGNUM)
1926 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1927 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1928 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1930 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1931 "TARGET_ARM && !arm_arch6"
1932 "mla%.\\t%0, %2, %1, %3"
1933 [(set_attr "conds" "set")
1934 (set_attr "type" "mlas")]
1937 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1938 [(set (reg:CC_NOOV CC_REGNUM)
1941 (match_operand:SI 2 "s_register_operand" "r")
1942 (match_operand:SI 1 "s_register_operand" "r"))
1943 (match_operand:SI 3 "s_register_operand" "r"))
1945 (clobber (match_scratch:SI 0 "=r"))]
1946 "TARGET_ARM && arm_arch6 && optimize_size"
1947 "mla%.\\t%0, %2, %1, %3"
1948 [(set_attr "conds" "set")
1949 (set_attr "type" "mlas")]
1952 (define_insn "*mulsi3subsi"
1953 [(set (match_operand:SI 0 "s_register_operand" "=r")
1955 (match_operand:SI 3 "s_register_operand" "r")
1956 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1957 (match_operand:SI 1 "s_register_operand" "r"))))]
1958 "TARGET_32BIT && arm_arch_thumb2"
1959 "mls%?\\t%0, %2, %1, %3"
1960 [(set_attr "type" "mla")
1961 (set_attr "predicable" "yes")
1962 (set_attr "predicable_short_it" "no")]
1965 (define_expand "maddsidi4"
1966 [(set (match_operand:DI 0 "s_register_operand" "")
1969 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1970 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1971 (match_operand:DI 3 "s_register_operand" "")))]
1972 "TARGET_32BIT && arm_arch3m"
1975 (define_insn "*mulsidi3adddi"
1976 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1979 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1980 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1981 (match_operand:DI 1 "s_register_operand" "0")))]
1982 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1983 "smlal%?\\t%Q0, %R0, %3, %2"
1984 [(set_attr "type" "smlal")
1985 (set_attr "predicable" "yes")]
1988 (define_insn "*mulsidi3adddi_v6"
1989 [(set (match_operand:DI 0 "s_register_operand" "=r")
1992 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1993 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1994 (match_operand:DI 1 "s_register_operand" "0")))]
1995 "TARGET_32BIT && arm_arch6"
1996 "smlal%?\\t%Q0, %R0, %3, %2"
1997 [(set_attr "type" "smlal")
1998 (set_attr "predicable" "yes")
1999 (set_attr "predicable_short_it" "no")]
2002 ;; 32x32->64 widening multiply.
2003 ;; As with mulsi3, the only difference between the v3-5 and v6+
2004 ;; versions of these patterns is the requirement that the output not
2005 ;; overlap the inputs, but that still means we have to have a named
2006 ;; expander and two different starred insns.
2008 (define_expand "mulsidi3"
2009 [(set (match_operand:DI 0 "s_register_operand" "")
2011 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2012 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
2013 "TARGET_32BIT && arm_arch3m"
2017 (define_insn "*mulsidi3_nov6"
2018 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2020 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
2021 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2022 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2023 "smull%?\\t%Q0, %R0, %1, %2"
2024 [(set_attr "type" "smull")
2025 (set_attr "predicable" "yes")]
2028 (define_insn "*mulsidi3_v6"
2029 [(set (match_operand:DI 0 "s_register_operand" "=r")
2031 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2032 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2033 "TARGET_32BIT && arm_arch6"
2034 "smull%?\\t%Q0, %R0, %1, %2"
2035 [(set_attr "type" "smull")
2036 (set_attr "predicable" "yes")
2037 (set_attr "predicable_short_it" "no")]
2040 (define_expand "umulsidi3"
2041 [(set (match_operand:DI 0 "s_register_operand" "")
2043 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2044 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
2045 "TARGET_32BIT && arm_arch3m"
2049 (define_insn "*umulsidi3_nov6"
2050 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2052 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
2053 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2054 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2055 "umull%?\\t%Q0, %R0, %1, %2"
2056 [(set_attr "type" "umull")
2057 (set_attr "predicable" "yes")]
2060 (define_insn "*umulsidi3_v6"
2061 [(set (match_operand:DI 0 "s_register_operand" "=r")
2063 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2064 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
2065 "TARGET_32BIT && arm_arch6"
2066 "umull%?\\t%Q0, %R0, %1, %2"
2067 [(set_attr "type" "umull")
2068 (set_attr "predicable" "yes")
2069 (set_attr "predicable_short_it" "no")]
2072 (define_expand "umaddsidi4"
2073 [(set (match_operand:DI 0 "s_register_operand" "")
2076 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2077 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2078 (match_operand:DI 3 "s_register_operand" "")))]
2079 "TARGET_32BIT && arm_arch3m"
2082 (define_insn "*umulsidi3adddi"
2083 [(set (match_operand:DI 0 "s_register_operand" "=&r")
2086 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
2087 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2088 (match_operand:DI 1 "s_register_operand" "0")))]
2089 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2090 "umlal%?\\t%Q0, %R0, %3, %2"
2091 [(set_attr "type" "umlal")
2092 (set_attr "predicable" "yes")]
2095 (define_insn "*umulsidi3adddi_v6"
2096 [(set (match_operand:DI 0 "s_register_operand" "=r")
2099 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
2100 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
2101 (match_operand:DI 1 "s_register_operand" "0")))]
2102 "TARGET_32BIT && arm_arch6"
2103 "umlal%?\\t%Q0, %R0, %3, %2"
2104 [(set_attr "type" "umlal")
2105 (set_attr "predicable" "yes")
2106 (set_attr "predicable_short_it" "no")]
2109 (define_expand "smulsi3_highpart"
2111 [(set (match_operand:SI 0 "s_register_operand" "")
2115 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2116 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2118 (clobber (match_scratch:SI 3 ""))])]
2119 "TARGET_32BIT && arm_arch3m"
2123 (define_insn "*smulsi3_highpart_nov6"
2124 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2128 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2129 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2131 (clobber (match_scratch:SI 3 "=&r,&r"))]
2132 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2133 "smull%?\\t%3, %0, %2, %1"
2134 [(set_attr "type" "smull")
2135 (set_attr "predicable" "yes")]
2138 (define_insn "*smulsi3_highpart_v6"
2139 [(set (match_operand:SI 0 "s_register_operand" "=r")
2143 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2144 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2146 (clobber (match_scratch:SI 3 "=r"))]
2147 "TARGET_32BIT && arm_arch6"
2148 "smull%?\\t%3, %0, %2, %1"
2149 [(set_attr "type" "smull")
2150 (set_attr "predicable" "yes")
2151 (set_attr "predicable_short_it" "no")]
2154 (define_expand "umulsi3_highpart"
2156 [(set (match_operand:SI 0 "s_register_operand" "")
2160 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
2161 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
2163 (clobber (match_scratch:SI 3 ""))])]
2164 "TARGET_32BIT && arm_arch3m"
2168 (define_insn "*umulsi3_highpart_nov6"
2169 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2173 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
2174 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
2176 (clobber (match_scratch:SI 3 "=&r,&r"))]
2177 "TARGET_32BIT && arm_arch3m && !arm_arch6"
2178 "umull%?\\t%3, %0, %2, %1"
2179 [(set_attr "type" "umull")
2180 (set_attr "predicable" "yes")]
2183 (define_insn "*umulsi3_highpart_v6"
2184 [(set (match_operand:SI 0 "s_register_operand" "=r")
2188 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
2189 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
2191 (clobber (match_scratch:SI 3 "=r"))]
2192 "TARGET_32BIT && arm_arch6"
2193 "umull%?\\t%3, %0, %2, %1"
2194 [(set_attr "type" "umull")
2195 (set_attr "predicable" "yes")
2196 (set_attr "predicable_short_it" "no")]
2199 (define_insn "mulhisi3"
2200 [(set (match_operand:SI 0 "s_register_operand" "=r")
2201 (mult:SI (sign_extend:SI
2202 (match_operand:HI 1 "s_register_operand" "%r"))
2204 (match_operand:HI 2 "s_register_operand" "r"))))]
2205 "TARGET_DSP_MULTIPLY"
2206 "smulbb%?\\t%0, %1, %2"
2207 [(set_attr "type" "smulxy")
2208 (set_attr "predicable" "yes")]
2211 (define_insn "*mulhisi3tb"
2212 [(set (match_operand:SI 0 "s_register_operand" "=r")
2213 (mult:SI (ashiftrt:SI
2214 (match_operand:SI 1 "s_register_operand" "r")
2217 (match_operand:HI 2 "s_register_operand" "r"))))]
2218 "TARGET_DSP_MULTIPLY"
2219 "smultb%?\\t%0, %1, %2"
2220 [(set_attr "type" "smulxy")
2221 (set_attr "predicable" "yes")
2222 (set_attr "predicable_short_it" "no")]
2225 (define_insn "*mulhisi3bt"
2226 [(set (match_operand:SI 0 "s_register_operand" "=r")
2227 (mult:SI (sign_extend:SI
2228 (match_operand:HI 1 "s_register_operand" "r"))
2230 (match_operand:SI 2 "s_register_operand" "r")
2232 "TARGET_DSP_MULTIPLY"
2233 "smulbt%?\\t%0, %1, %2"
2234 [(set_attr "type" "smulxy")
2235 (set_attr "predicable" "yes")
2236 (set_attr "predicable_short_it" "no")]
2239 (define_insn "*mulhisi3tt"
2240 [(set (match_operand:SI 0 "s_register_operand" "=r")
2241 (mult:SI (ashiftrt:SI
2242 (match_operand:SI 1 "s_register_operand" "r")
2245 (match_operand:SI 2 "s_register_operand" "r")
2247 "TARGET_DSP_MULTIPLY"
2248 "smultt%?\\t%0, %1, %2"
2249 [(set_attr "type" "smulxy")
2250 (set_attr "predicable" "yes")
2251 (set_attr "predicable_short_it" "no")]
2254 (define_insn "maddhisi4"
2255 [(set (match_operand:SI 0 "s_register_operand" "=r")
2256 (plus:SI (mult:SI (sign_extend:SI
2257 (match_operand:HI 1 "s_register_operand" "r"))
2259 (match_operand:HI 2 "s_register_operand" "r")))
2260 (match_operand:SI 3 "s_register_operand" "r")))]
2261 "TARGET_DSP_MULTIPLY"
2262 "smlabb%?\\t%0, %1, %2, %3"
2263 [(set_attr "type" "smlaxy")
2264 (set_attr "predicable" "yes")
2265 (set_attr "predicable_short_it" "no")]
2268 ;; Note: there is no maddhisi4ibt because this one is canonical form
2269 (define_insn "*maddhisi4tb"
2270 [(set (match_operand:SI 0 "s_register_operand" "=r")
2271 (plus:SI (mult:SI (ashiftrt:SI
2272 (match_operand:SI 1 "s_register_operand" "r")
2275 (match_operand:HI 2 "s_register_operand" "r")))
2276 (match_operand:SI 3 "s_register_operand" "r")))]
2277 "TARGET_DSP_MULTIPLY"
2278 "smlatb%?\\t%0, %1, %2, %3"
2279 [(set_attr "type" "smlaxy")
2280 (set_attr "predicable" "yes")
2281 (set_attr "predicable_short_it" "no")]
2284 (define_insn "*maddhisi4tt"
2285 [(set (match_operand:SI 0 "s_register_operand" "=r")
2286 (plus:SI (mult:SI (ashiftrt:SI
2287 (match_operand:SI 1 "s_register_operand" "r")
2290 (match_operand:SI 2 "s_register_operand" "r")
2292 (match_operand:SI 3 "s_register_operand" "r")))]
2293 "TARGET_DSP_MULTIPLY"
2294 "smlatt%?\\t%0, %1, %2, %3"
2295 [(set_attr "type" "smlaxy")
2296 (set_attr "predicable" "yes")
2297 (set_attr "predicable_short_it" "no")]
2300 (define_insn "maddhidi4"
2301 [(set (match_operand:DI 0 "s_register_operand" "=r")
2303 (mult:DI (sign_extend:DI
2304 (match_operand:HI 1 "s_register_operand" "r"))
2306 (match_operand:HI 2 "s_register_operand" "r")))
2307 (match_operand:DI 3 "s_register_operand" "0")))]
2308 "TARGET_DSP_MULTIPLY"
2309 "smlalbb%?\\t%Q0, %R0, %1, %2"
2310 [(set_attr "type" "smlalxy")
2311 (set_attr "predicable" "yes")
2312 (set_attr "predicable_short_it" "no")])
2314 ;; Note: there is no maddhidi4ibt because this one is canonical form
2315 (define_insn "*maddhidi4tb"
2316 [(set (match_operand:DI 0 "s_register_operand" "=r")
2318 (mult:DI (sign_extend:DI
2320 (match_operand:SI 1 "s_register_operand" "r")
2323 (match_operand:HI 2 "s_register_operand" "r")))
2324 (match_operand:DI 3 "s_register_operand" "0")))]
2325 "TARGET_DSP_MULTIPLY"
2326 "smlaltb%?\\t%Q0, %R0, %1, %2"
2327 [(set_attr "type" "smlalxy")
2328 (set_attr "predicable" "yes")
2329 (set_attr "predicable_short_it" "no")])
2331 (define_insn "*maddhidi4tt"
2332 [(set (match_operand:DI 0 "s_register_operand" "=r")
2334 (mult:DI (sign_extend:DI
2336 (match_operand:SI 1 "s_register_operand" "r")
2340 (match_operand:SI 2 "s_register_operand" "r")
2342 (match_operand:DI 3 "s_register_operand" "0")))]
2343 "TARGET_DSP_MULTIPLY"
2344 "smlaltt%?\\t%Q0, %R0, %1, %2"
2345 [(set_attr "type" "smlalxy")
2346 (set_attr "predicable" "yes")
2347 (set_attr "predicable_short_it" "no")])
2349 (define_expand "mulsf3"
2350 [(set (match_operand:SF 0 "s_register_operand" "")
2351 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2352 (match_operand:SF 2 "s_register_operand" "")))]
2353 "TARGET_32BIT && TARGET_HARD_FLOAT"
2357 (define_expand "muldf3"
2358 [(set (match_operand:DF 0 "s_register_operand" "")
2359 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2360 (match_operand:DF 2 "s_register_operand" "")))]
2361 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2367 (define_expand "divsf3"
2368 [(set (match_operand:SF 0 "s_register_operand" "")
2369 (div:SF (match_operand:SF 1 "s_register_operand" "")
2370 (match_operand:SF 2 "s_register_operand" "")))]
2371 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2374 (define_expand "divdf3"
2375 [(set (match_operand:DF 0 "s_register_operand" "")
2376 (div:DF (match_operand:DF 1 "s_register_operand" "")
2377 (match_operand:DF 2 "s_register_operand" "")))]
2378 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2381 ;; Boolean and,ior,xor insns
2383 ;; Split up double word logical operations
2385 ;; Split up simple DImode logical operations. Simply perform the logical
2386 ;; operation on the upper and lower halves of the registers.
2388 [(set (match_operand:DI 0 "s_register_operand" "")
2389 (match_operator:DI 6 "logical_binary_operator"
2390 [(match_operand:DI 1 "s_register_operand" "")
2391 (match_operand:DI 2 "s_register_operand" "")]))]
2392 "TARGET_32BIT && reload_completed
2393 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2394 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2395 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2396 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2399 operands[3] = gen_highpart (SImode, operands[0]);
2400 operands[0] = gen_lowpart (SImode, operands[0]);
2401 operands[4] = gen_highpart (SImode, operands[1]);
2402 operands[1] = gen_lowpart (SImode, operands[1]);
2403 operands[5] = gen_highpart (SImode, operands[2]);
2404 operands[2] = gen_lowpart (SImode, operands[2]);
2409 [(set (match_operand:DI 0 "s_register_operand" "")
2410 (match_operator:DI 6 "logical_binary_operator"
2411 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2412 (match_operand:DI 1 "s_register_operand" "")]))]
2413 "TARGET_32BIT && reload_completed"
2414 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2415 (set (match_dup 3) (match_op_dup:SI 6
2416 [(ashiftrt:SI (match_dup 2) (const_int 31))
2420 operands[3] = gen_highpart (SImode, operands[0]);
2421 operands[0] = gen_lowpart (SImode, operands[0]);
2422 operands[4] = gen_highpart (SImode, operands[1]);
2423 operands[1] = gen_lowpart (SImode, operands[1]);
2424 operands[5] = gen_highpart (SImode, operands[2]);
2425 operands[2] = gen_lowpart (SImode, operands[2]);
2429 ;; The zero extend of operand 2 means we can just copy the high part of
2430 ;; operand1 into operand0.
2432 [(set (match_operand:DI 0 "s_register_operand" "")
2434 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2435 (match_operand:DI 1 "s_register_operand" "")))]
2436 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2437 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2438 (set (match_dup 3) (match_dup 4))]
2441 operands[4] = gen_highpart (SImode, operands[1]);
2442 operands[3] = gen_highpart (SImode, operands[0]);
2443 operands[0] = gen_lowpart (SImode, operands[0]);
2444 operands[1] = gen_lowpart (SImode, operands[1]);
2448 ;; The zero extend of operand 2 means we can just copy the high part of
2449 ;; operand1 into operand0.
2451 [(set (match_operand:DI 0 "s_register_operand" "")
2453 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2454 (match_operand:DI 1 "s_register_operand" "")))]
2455 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2456 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2457 (set (match_dup 3) (match_dup 4))]
2460 operands[4] = gen_highpart (SImode, operands[1]);
2461 operands[3] = gen_highpart (SImode, operands[0]);
2462 operands[0] = gen_lowpart (SImode, operands[0]);
2463 operands[1] = gen_lowpart (SImode, operands[1]);
2467 (define_expand "anddi3"
2468 [(set (match_operand:DI 0 "s_register_operand" "")
2469 (and:DI (match_operand:DI 1 "s_register_operand" "")
2470 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2475 (define_insn_and_split "*anddi3_insn"
2476 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
2477 (and:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
2478 (match_operand:DI 2 "arm_anddi_operand_neon" "w ,DL,r ,r ,De,De,w ,DL")))]
2479 "TARGET_32BIT && !TARGET_IWMMXT"
2481 switch (which_alternative)
2483 case 0: /* fall through */
2484 case 6: return "vand\t%P0, %P1, %P2";
2485 case 1: /* fall through */
2486 case 7: return neon_output_logic_immediate ("vand", &operands[2],
2487 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2491 case 5: /* fall through */
2493 default: gcc_unreachable ();
2496 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
2497 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
2498 [(set (match_dup 3) (match_dup 4))
2499 (set (match_dup 5) (match_dup 6))]
2502 operands[3] = gen_lowpart (SImode, operands[0]);
2503 operands[5] = gen_highpart (SImode, operands[0]);
2505 operands[4] = simplify_gen_binary (AND, SImode,
2506 gen_lowpart (SImode, operands[1]),
2507 gen_lowpart (SImode, operands[2]));
2508 operands[6] = simplify_gen_binary (AND, SImode,
2509 gen_highpart (SImode, operands[1]),
2510 gen_highpart_mode (SImode, DImode, operands[2]));
2513 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
2514 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,
2515 avoid_neon_for_64bits,avoid_neon_for_64bits")
2516 (set_attr "length" "*,*,8,8,8,8,*,*")
2520 (define_insn_and_split "*anddi_zesidi_di"
2521 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2522 (and:DI (zero_extend:DI
2523 (match_operand:SI 2 "s_register_operand" "r,r"))
2524 (match_operand:DI 1 "s_register_operand" "0,r")))]
2527 "TARGET_32BIT && reload_completed"
2528 ; The zero extend of operand 2 clears the high word of the output
2530 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2531 (set (match_dup 3) (const_int 0))]
2534 operands[3] = gen_highpart (SImode, operands[0]);
2535 operands[0] = gen_lowpart (SImode, operands[0]);
2536 operands[1] = gen_lowpart (SImode, operands[1]);
2538 [(set_attr "length" "8")]
2541 (define_insn "*anddi_sesdi_di"
2542 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2543 (and:DI (sign_extend:DI
2544 (match_operand:SI 2 "s_register_operand" "r,r"))
2545 (match_operand:DI 1 "s_register_operand" "0,r")))]
2548 [(set_attr "length" "8")]
2551 (define_expand "andsi3"
2552 [(set (match_operand:SI 0 "s_register_operand" "")
2553 (and:SI (match_operand:SI 1 "s_register_operand" "")
2554 (match_operand:SI 2 "reg_or_int_operand" "")))]
2559 if (CONST_INT_P (operands[2]))
2561 if (INTVAL (operands[2]) == 255 && arm_arch6)
2563 operands[1] = convert_to_mode (QImode, operands[1], 1);
2564 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2568 arm_split_constant (AND, SImode, NULL_RTX,
2569 INTVAL (operands[2]), operands[0],
2571 optimize && can_create_pseudo_p ());
2576 else /* TARGET_THUMB1 */
2578 if (!CONST_INT_P (operands[2]))
2580 rtx tmp = force_reg (SImode, operands[2]);
2581 if (rtx_equal_p (operands[0], operands[1]))
2585 operands[2] = operands[1];
2593 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2595 operands[2] = force_reg (SImode,
2596 GEN_INT (~INTVAL (operands[2])));
2598 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2603 for (i = 9; i <= 31; i++)
2605 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2607 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2611 else if ((((HOST_WIDE_INT) 1) << i) - 1
2612 == ~INTVAL (operands[2]))
2614 rtx shift = GEN_INT (i);
2615 rtx reg = gen_reg_rtx (SImode);
2617 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2618 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2624 operands[2] = force_reg (SImode, operands[2]);
2630 ; ??? Check split length for Thumb-2
2631 (define_insn_and_split "*arm_andsi3_insn"
2632 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2633 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2634 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2639 bic%?\\t%0, %1, #%B2
2643 && CONST_INT_P (operands[2])
2644 && !(const_ok_for_arm (INTVAL (operands[2]))
2645 || const_ok_for_arm (~INTVAL (operands[2])))"
2646 [(clobber (const_int 0))]
2648 arm_split_constant (AND, SImode, curr_insn,
2649 INTVAL (operands[2]), operands[0], operands[1], 0);
2652 [(set_attr "length" "4,4,4,4,16")
2653 (set_attr "predicable" "yes")
2654 (set_attr "predicable_short_it" "no,yes,no,no,no")
2656 "arlo_imm,arlo_imm,*,*,arlo_imm")]
2659 (define_insn "*thumb1_andsi3_insn"
2660 [(set (match_operand:SI 0 "register_operand" "=l")
2661 (and:SI (match_operand:SI 1 "register_operand" "%0")
2662 (match_operand:SI 2 "register_operand" "l")))]
2665 [(set_attr "length" "2")
2666 (set_attr "type" "arlo_imm")
2667 (set_attr "conds" "set")])
2669 (define_insn "*andsi3_compare0"
2670 [(set (reg:CC_NOOV CC_REGNUM)
2672 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2673 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2675 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2676 (and:SI (match_dup 1) (match_dup 2)))]
2680 bic%.\\t%0, %1, #%B2
2682 [(set_attr "conds" "set")
2683 (set_attr "type" "arlo_imm,arlo_imm,*")]
2686 (define_insn "*andsi3_compare0_scratch"
2687 [(set (reg:CC_NOOV CC_REGNUM)
2689 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2690 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2692 (clobber (match_scratch:SI 2 "=X,r,X"))]
2696 bic%.\\t%2, %0, #%B1
2698 [(set_attr "conds" "set")
2699 (set_attr "type" "arlo_imm,arlo_imm,*")]
2702 (define_insn "*zeroextractsi_compare0_scratch"
2703 [(set (reg:CC_NOOV CC_REGNUM)
2704 (compare:CC_NOOV (zero_extract:SI
2705 (match_operand:SI 0 "s_register_operand" "r")
2706 (match_operand 1 "const_int_operand" "n")
2707 (match_operand 2 "const_int_operand" "n"))
2710 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2711 && INTVAL (operands[1]) > 0
2712 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2713 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2715 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2716 << INTVAL (operands[2]));
2717 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2720 [(set_attr "conds" "set")
2721 (set_attr "predicable" "yes")
2722 (set_attr "predicable_short_it" "no")
2723 (set_attr "type" "arlo_imm")]
2726 (define_insn_and_split "*ne_zeroextractsi"
2727 [(set (match_operand:SI 0 "s_register_operand" "=r")
2728 (ne:SI (zero_extract:SI
2729 (match_operand:SI 1 "s_register_operand" "r")
2730 (match_operand:SI 2 "const_int_operand" "n")
2731 (match_operand:SI 3 "const_int_operand" "n"))
2733 (clobber (reg:CC CC_REGNUM))]
2735 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2736 && INTVAL (operands[2]) > 0
2737 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2738 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2741 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2742 && INTVAL (operands[2]) > 0
2743 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2744 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2745 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2746 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2748 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2750 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2751 (match_dup 0) (const_int 1)))]
2753 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2754 << INTVAL (operands[3]));
2756 [(set_attr "conds" "clob")
2757 (set (attr "length")
2758 (if_then_else (eq_attr "is_thumb" "yes")
2763 (define_insn_and_split "*ne_zeroextractsi_shifted"
2764 [(set (match_operand:SI 0 "s_register_operand" "=r")
2765 (ne:SI (zero_extract:SI
2766 (match_operand:SI 1 "s_register_operand" "r")
2767 (match_operand:SI 2 "const_int_operand" "n")
2770 (clobber (reg:CC CC_REGNUM))]
2774 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2775 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2777 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2779 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2780 (match_dup 0) (const_int 1)))]
2782 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2784 [(set_attr "conds" "clob")
2785 (set_attr "length" "8")]
2788 (define_insn_and_split "*ite_ne_zeroextractsi"
2789 [(set (match_operand:SI 0 "s_register_operand" "=r")
2790 (if_then_else:SI (ne (zero_extract:SI
2791 (match_operand:SI 1 "s_register_operand" "r")
2792 (match_operand:SI 2 "const_int_operand" "n")
2793 (match_operand:SI 3 "const_int_operand" "n"))
2795 (match_operand:SI 4 "arm_not_operand" "rIK")
2797 (clobber (reg:CC CC_REGNUM))]
2799 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2800 && INTVAL (operands[2]) > 0
2801 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2802 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2803 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2806 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2807 && INTVAL (operands[2]) > 0
2808 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2809 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2810 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2811 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2812 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2814 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2816 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2817 (match_dup 0) (match_dup 4)))]
2819 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2820 << INTVAL (operands[3]));
2822 [(set_attr "conds" "clob")
2823 (set_attr "length" "8")]
2826 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2827 [(set (match_operand:SI 0 "s_register_operand" "=r")
2828 (if_then_else:SI (ne (zero_extract:SI
2829 (match_operand:SI 1 "s_register_operand" "r")
2830 (match_operand:SI 2 "const_int_operand" "n")
2833 (match_operand:SI 3 "arm_not_operand" "rIK")
2835 (clobber (reg:CC CC_REGNUM))]
2836 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2838 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2839 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2840 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2842 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2844 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2845 (match_dup 0) (match_dup 3)))]
2847 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2849 [(set_attr "conds" "clob")
2850 (set_attr "length" "8")]
2854 [(set (match_operand:SI 0 "s_register_operand" "")
2855 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2856 (match_operand:SI 2 "const_int_operand" "")
2857 (match_operand:SI 3 "const_int_operand" "")))
2858 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2860 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2861 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2863 HOST_WIDE_INT temp = INTVAL (operands[2]);
2865 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2866 operands[3] = GEN_INT (32 - temp);
2870 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2872 [(set (match_operand:SI 0 "s_register_operand" "")
2873 (match_operator:SI 1 "shiftable_operator"
2874 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2875 (match_operand:SI 3 "const_int_operand" "")
2876 (match_operand:SI 4 "const_int_operand" ""))
2877 (match_operand:SI 5 "s_register_operand" "")]))
2878 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2880 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2883 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2886 HOST_WIDE_INT temp = INTVAL (operands[3]);
2888 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2889 operands[4] = GEN_INT (32 - temp);
2894 [(set (match_operand:SI 0 "s_register_operand" "")
2895 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2896 (match_operand:SI 2 "const_int_operand" "")
2897 (match_operand:SI 3 "const_int_operand" "")))]
2899 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2900 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2902 HOST_WIDE_INT temp = INTVAL (operands[2]);
2904 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2905 operands[3] = GEN_INT (32 - temp);
2910 [(set (match_operand:SI 0 "s_register_operand" "")
2911 (match_operator:SI 1 "shiftable_operator"
2912 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2913 (match_operand:SI 3 "const_int_operand" "")
2914 (match_operand:SI 4 "const_int_operand" ""))
2915 (match_operand:SI 5 "s_register_operand" "")]))
2916 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2918 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2921 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2924 HOST_WIDE_INT temp = INTVAL (operands[3]);
2926 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2927 operands[4] = GEN_INT (32 - temp);
2931 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2932 ;;; represented by the bitfield, then this will produce incorrect results.
2933 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2934 ;;; which have a real bit-field insert instruction, the truncation happens
2935 ;;; in the bit-field insert instruction itself. Since arm does not have a
2936 ;;; bit-field insert instruction, we would have to emit code here to truncate
2937 ;;; the value before we insert. This loses some of the advantage of having
2938 ;;; this insv pattern, so this pattern needs to be reevalutated.
2940 (define_expand "insv"
2941 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2942 (match_operand 1 "general_operand" "")
2943 (match_operand 2 "general_operand" ""))
2944 (match_operand 3 "reg_or_int_operand" ""))]
2945 "TARGET_ARM || arm_arch_thumb2"
2948 int start_bit = INTVAL (operands[2]);
2949 int width = INTVAL (operands[1]);
2950 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2951 rtx target, subtarget;
2953 if (arm_arch_thumb2)
2955 if (unaligned_access && MEM_P (operands[0])
2956 && s_register_operand (operands[3], GET_MODE (operands[3]))
2957 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2961 if (BYTES_BIG_ENDIAN)
2962 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2967 base_addr = adjust_address (operands[0], SImode,
2968 start_bit / BITS_PER_UNIT);
2969 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2973 rtx tmp = gen_reg_rtx (HImode);
2975 base_addr = adjust_address (operands[0], HImode,
2976 start_bit / BITS_PER_UNIT);
2977 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2978 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2982 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2984 bool use_bfi = TRUE;
2986 if (CONST_INT_P (operands[3]))
2988 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2992 emit_insn (gen_insv_zero (operands[0], operands[1],
2997 /* See if the set can be done with a single orr instruction. */
2998 if (val == mask && const_ok_for_arm (val << start_bit))
3004 if (!REG_P (operands[3]))
3005 operands[3] = force_reg (SImode, operands[3]);
3007 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3016 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3019 target = copy_rtx (operands[0]);
3020 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3021 subreg as the final target. */
3022 if (GET_CODE (target) == SUBREG)
3024 subtarget = gen_reg_rtx (SImode);
3025 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3026 < GET_MODE_SIZE (SImode))
3027 target = SUBREG_REG (target);
3032 if (CONST_INT_P (operands[3]))
3034 /* Since we are inserting a known constant, we may be able to
3035 reduce the number of bits that we have to clear so that
3036 the mask becomes simple. */
3037 /* ??? This code does not check to see if the new mask is actually
3038 simpler. It may not be. */
3039 rtx op1 = gen_reg_rtx (SImode);
3040 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3041 start of this pattern. */
3042 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3043 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3045 emit_insn (gen_andsi3 (op1, operands[0],
3046 gen_int_mode (~mask2, SImode)));
3047 emit_insn (gen_iorsi3 (subtarget, op1,
3048 gen_int_mode (op3_value << start_bit, SImode)));
3050 else if (start_bit == 0
3051 && !(const_ok_for_arm (mask)
3052 || const_ok_for_arm (~mask)))
3054 /* A Trick, since we are setting the bottom bits in the word,
3055 we can shift operand[3] up, operand[0] down, OR them together
3056 and rotate the result back again. This takes 3 insns, and
3057 the third might be mergeable into another op. */
3058 /* The shift up copes with the possibility that operand[3] is
3059 wider than the bitfield. */
3060 rtx op0 = gen_reg_rtx (SImode);
3061 rtx op1 = gen_reg_rtx (SImode);
3063 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3064 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3065 emit_insn (gen_iorsi3 (op1, op1, op0));
3066 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3068 else if ((width + start_bit == 32)
3069 && !(const_ok_for_arm (mask)
3070 || const_ok_for_arm (~mask)))
3072 /* Similar trick, but slightly less efficient. */
3074 rtx op0 = gen_reg_rtx (SImode);
3075 rtx op1 = gen_reg_rtx (SImode);
3077 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3078 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3079 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3080 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3084 rtx op0 = gen_int_mode (mask, SImode);
3085 rtx op1 = gen_reg_rtx (SImode);
3086 rtx op2 = gen_reg_rtx (SImode);
3088 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3090 rtx tmp = gen_reg_rtx (SImode);
3092 emit_insn (gen_movsi (tmp, op0));
3096 /* Mask out any bits in operand[3] that are not needed. */
3097 emit_insn (gen_andsi3 (op1, operands[3], op0));
3099 if (CONST_INT_P (op0)
3100 && (const_ok_for_arm (mask << start_bit)
3101 || const_ok_for_arm (~(mask << start_bit))))
3103 op0 = gen_int_mode (~(mask << start_bit), SImode);
3104 emit_insn (gen_andsi3 (op2, operands[0], op0));
3108 if (CONST_INT_P (op0))
3110 rtx tmp = gen_reg_rtx (SImode);
3112 emit_insn (gen_movsi (tmp, op0));
3117 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3119 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3123 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3125 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3128 if (subtarget != target)
3130 /* If TARGET is still a SUBREG, then it must be wider than a word,
3131 so we must be careful only to set the subword we were asked to. */
3132 if (GET_CODE (target) == SUBREG)
3133 emit_move_insn (target, subtarget);
3135 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3142 (define_insn "insv_zero"
3143 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3144 (match_operand:SI 1 "const_int_operand" "M")
3145 (match_operand:SI 2 "const_int_operand" "M"))
3149 [(set_attr "length" "4")
3150 (set_attr "predicable" "yes")
3151 (set_attr "predicable_short_it" "no")]
3154 (define_insn "insv_t2"
3155 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3156 (match_operand:SI 1 "const_int_operand" "M")
3157 (match_operand:SI 2 "const_int_operand" "M"))
3158 (match_operand:SI 3 "s_register_operand" "r"))]
3160 "bfi%?\t%0, %3, %2, %1"
3161 [(set_attr "length" "4")
3162 (set_attr "predicable" "yes")
3163 (set_attr "predicable_short_it" "no")]
3166 ; constants for op 2 will never be given to these patterns.
3167 (define_insn_and_split "*anddi_notdi_di"
3168 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3169 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
3170 (match_operand:DI 2 "s_register_operand" "r,0")))]
3173 "TARGET_32BIT && reload_completed
3174 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
3175 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
3176 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
3177 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
3180 operands[3] = gen_highpart (SImode, operands[0]);
3181 operands[0] = gen_lowpart (SImode, operands[0]);
3182 operands[4] = gen_highpart (SImode, operands[1]);
3183 operands[1] = gen_lowpart (SImode, operands[1]);
3184 operands[5] = gen_highpart (SImode, operands[2]);
3185 operands[2] = gen_lowpart (SImode, operands[2]);
3187 [(set_attr "length" "8")
3188 (set_attr "predicable" "yes")]
3191 (define_insn_and_split "*anddi_notzesidi_di"
3192 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3193 (and:DI (not:DI (zero_extend:DI
3194 (match_operand:SI 2 "s_register_operand" "r,r")))
3195 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3198 bic%?\\t%Q0, %Q1, %2
3200 ; (not (zero_extend ...)) allows us to just copy the high word from
3201 ; operand1 to operand0.
3204 && operands[0] != operands[1]"
3205 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3206 (set (match_dup 3) (match_dup 4))]
3209 operands[3] = gen_highpart (SImode, operands[0]);
3210 operands[0] = gen_lowpart (SImode, operands[0]);
3211 operands[4] = gen_highpart (SImode, operands[1]);
3212 operands[1] = gen_lowpart (SImode, operands[1]);
3214 [(set_attr "length" "4,8")
3215 (set_attr "predicable" "yes")
3216 (set_attr "predicable_short_it" "no")]
3219 (define_insn_and_split "*anddi_notsesidi_di"
3220 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3221 (and:DI (not:DI (sign_extend:DI
3222 (match_operand:SI 2 "s_register_operand" "r,r")))
3223 (match_operand:DI 1 "s_register_operand" "0,r")))]
3226 "TARGET_32BIT && reload_completed"
3227 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
3228 (set (match_dup 3) (and:SI (not:SI
3229 (ashiftrt:SI (match_dup 2) (const_int 31)))
3233 operands[3] = gen_highpart (SImode, operands[0]);
3234 operands[0] = gen_lowpart (SImode, operands[0]);
3235 operands[4] = gen_highpart (SImode, operands[1]);
3236 operands[1] = gen_lowpart (SImode, operands[1]);
3238 [(set_attr "length" "8")
3239 (set_attr "predicable" "yes")
3240 (set_attr "predicable_short_it" "no")]
3243 (define_insn "andsi_notsi_si"
3244 [(set (match_operand:SI 0 "s_register_operand" "=r")
3245 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3246 (match_operand:SI 1 "s_register_operand" "r")))]
3248 "bic%?\\t%0, %1, %2"
3249 [(set_attr "predicable" "yes")
3250 (set_attr "predicable_short_it" "no")]
3253 (define_insn "thumb1_bicsi3"
3254 [(set (match_operand:SI 0 "register_operand" "=l")
3255 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
3256 (match_operand:SI 2 "register_operand" "0")))]
3259 [(set_attr "length" "2")
3260 (set_attr "conds" "set")])
3262 (define_insn "andsi_not_shiftsi_si"
3263 [(set (match_operand:SI 0 "s_register_operand" "=r")
3264 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3265 [(match_operand:SI 2 "s_register_operand" "r")
3266 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3267 (match_operand:SI 1 "s_register_operand" "r")))]
3269 "bic%?\\t%0, %1, %2%S4"
3270 [(set_attr "predicable" "yes")
3271 (set_attr "shift" "2")
3272 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3273 (const_string "arlo_shift")
3274 (const_string "arlo_shift_reg")))]
3277 (define_insn "*andsi_notsi_si_compare0"
3278 [(set (reg:CC_NOOV CC_REGNUM)
3280 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3281 (match_operand:SI 1 "s_register_operand" "r"))
3283 (set (match_operand:SI 0 "s_register_operand" "=r")
3284 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3286 "bic%.\\t%0, %1, %2"
3287 [(set_attr "conds" "set")]
3290 (define_insn "*andsi_notsi_si_compare0_scratch"
3291 [(set (reg:CC_NOOV CC_REGNUM)
3293 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3294 (match_operand:SI 1 "s_register_operand" "r"))
3296 (clobber (match_scratch:SI 0 "=r"))]
3298 "bic%.\\t%0, %1, %2"
3299 [(set_attr "conds" "set")]
3302 (define_expand "iordi3"
3303 [(set (match_operand:DI 0 "s_register_operand" "")
3304 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3305 (match_operand:DI 2 "neon_logic_op2" "")))]
3310 (define_insn_and_split "*iordi3_insn"
3311 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3312 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3313 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3314 "TARGET_32BIT && !TARGET_IWMMXT"
3316 switch (which_alternative)
3318 case 0: /* fall through */
3319 case 6: return "vorr\t%P0, %P1, %P2";
3320 case 1: /* fall through */
3321 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3322 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3328 default: gcc_unreachable ();
3331 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3332 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3333 [(set (match_dup 3) (match_dup 4))
3334 (set (match_dup 5) (match_dup 6))]
3337 operands[3] = gen_lowpart (SImode, operands[0]);
3338 operands[5] = gen_highpart (SImode, operands[0]);
3340 operands[4] = simplify_gen_binary (IOR, SImode,
3341 gen_lowpart (SImode, operands[1]),
3342 gen_lowpart (SImode, operands[2]));
3343 operands[6] = simplify_gen_binary (IOR, SImode,
3344 gen_highpart (SImode, operands[1]),
3345 gen_highpart_mode (SImode, DImode, operands[2]));
3348 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3349 (set_attr "length" "*,*,8,8,8,8,*,*")
3350 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3353 (define_insn "*iordi_zesidi_di"
3354 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3355 (ior:DI (zero_extend:DI
3356 (match_operand:SI 2 "s_register_operand" "r,r"))
3357 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3360 orr%?\\t%Q0, %Q1, %2
3362 [(set_attr "length" "4,8")
3363 (set_attr "predicable" "yes")
3364 (set_attr "predicable_short_it" "no")]
3367 (define_insn "*iordi_sesidi_di"
3368 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3369 (ior:DI (sign_extend:DI
3370 (match_operand:SI 2 "s_register_operand" "r,r"))
3371 (match_operand:DI 1 "s_register_operand" "0,r")))]
3374 [(set_attr "length" "8")
3375 (set_attr "predicable" "yes")]
3378 (define_expand "iorsi3"
3379 [(set (match_operand:SI 0 "s_register_operand" "")
3380 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3381 (match_operand:SI 2 "reg_or_int_operand" "")))]
3384 if (CONST_INT_P (operands[2]))
3388 arm_split_constant (IOR, SImode, NULL_RTX,
3389 INTVAL (operands[2]), operands[0], operands[1],
3390 optimize && can_create_pseudo_p ());
3393 else /* TARGET_THUMB1 */
3395 rtx tmp = force_reg (SImode, operands[2]);
3396 if (rtx_equal_p (operands[0], operands[1]))
3400 operands[2] = operands[1];
3408 (define_insn_and_split "*iorsi3_insn"
3409 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3410 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3411 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3416 orn%?\\t%0, %1, #%B2
3420 && CONST_INT_P (operands[2])
3421 && !(const_ok_for_arm (INTVAL (operands[2]))
3422 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3423 [(clobber (const_int 0))]
3425 arm_split_constant (IOR, SImode, curr_insn,
3426 INTVAL (operands[2]), operands[0], operands[1], 0);
3429 [(set_attr "length" "4,4,4,4,16")
3430 (set_attr "arch" "32,t2,t2,32,32")
3431 (set_attr "predicable" "yes")
3432 (set_attr "predicable_short_it" "no,yes,no,no,no")
3433 (set_attr "type" "arlo_imm,*,arlo_imm,*,*")]
3436 (define_insn "*thumb1_iorsi3_insn"
3437 [(set (match_operand:SI 0 "register_operand" "=l")
3438 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3439 (match_operand:SI 2 "register_operand" "l")))]
3442 [(set_attr "length" "2")
3443 (set_attr "conds" "set")])
3446 [(match_scratch:SI 3 "r")
3447 (set (match_operand:SI 0 "arm_general_register_operand" "")
3448 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3449 (match_operand:SI 2 "const_int_operand" "")))]
3451 && !const_ok_for_arm (INTVAL (operands[2]))
3452 && const_ok_for_arm (~INTVAL (operands[2]))"
3453 [(set (match_dup 3) (match_dup 2))
3454 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3458 (define_insn "*iorsi3_compare0"
3459 [(set (reg:CC_NOOV CC_REGNUM)
3460 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3461 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3463 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3464 (ior:SI (match_dup 1) (match_dup 2)))]
3466 "orr%.\\t%0, %1, %2"
3467 [(set_attr "conds" "set")
3468 (set_attr "type" "arlo_imm,*")]
3471 (define_insn "*iorsi3_compare0_scratch"
3472 [(set (reg:CC_NOOV CC_REGNUM)
3473 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3474 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3476 (clobber (match_scratch:SI 0 "=r,r"))]
3478 "orr%.\\t%0, %1, %2"
3479 [(set_attr "conds" "set")
3480 (set_attr "type" "arlo_imm,*")]
3483 (define_expand "xordi3"
3484 [(set (match_operand:DI 0 "s_register_operand" "")
3485 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3486 (match_operand:DI 2 "arm_xordi_operand" "")))]
3491 (define_insn_and_split "*xordi3_insn"
3492 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,&r,&r,?w")
3493 (xor:DI (match_operand:DI 1 "s_register_operand" "w ,%0,r ,0 ,r ,w")
3494 (match_operand:DI 2 "arm_xordi_operand" "w ,r ,r ,Dg,Dg,w")))]
3495 "TARGET_32BIT && !TARGET_IWMMXT"
3497 switch (which_alternative)
3502 case 4: /* fall through */
3504 case 0: /* fall through */
3505 case 5: return "veor\t%P0, %P1, %P2";
3506 default: gcc_unreachable ();
3509 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3510 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3511 [(set (match_dup 3) (match_dup 4))
3512 (set (match_dup 5) (match_dup 6))]
3515 operands[3] = gen_lowpart (SImode, operands[0]);
3516 operands[5] = gen_highpart (SImode, operands[0]);
3518 operands[4] = simplify_gen_binary (XOR, SImode,
3519 gen_lowpart (SImode, operands[1]),
3520 gen_lowpart (SImode, operands[2]));
3521 operands[6] = simplify_gen_binary (XOR, SImode,
3522 gen_highpart (SImode, operands[1]),
3523 gen_highpart_mode (SImode, DImode, operands[2]));
3526 [(set_attr "length" "*,8,8,8,8,*")
3527 (set_attr "neon_type" "neon_int_1,*,*,*,*,neon_int_1")
3528 (set_attr "arch" "neon_for_64bits,*,*,*,*,avoid_neon_for_64bits")]
3531 (define_insn "*xordi_zesidi_di"
3532 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3533 (xor:DI (zero_extend:DI
3534 (match_operand:SI 2 "s_register_operand" "r,r"))
3535 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3538 eor%?\\t%Q0, %Q1, %2
3540 [(set_attr "length" "4,8")
3541 (set_attr "predicable" "yes")
3542 (set_attr "predicable_short_it" "no")]
3545 (define_insn "*xordi_sesidi_di"
3546 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3547 (xor:DI (sign_extend:DI
3548 (match_operand:SI 2 "s_register_operand" "r,r"))
3549 (match_operand:DI 1 "s_register_operand" "0,r")))]
3552 [(set_attr "length" "8")
3553 (set_attr "predicable" "yes")]
3556 (define_expand "xorsi3"
3557 [(set (match_operand:SI 0 "s_register_operand" "")
3558 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3559 (match_operand:SI 2 "reg_or_int_operand" "")))]
3561 "if (CONST_INT_P (operands[2]))
3565 arm_split_constant (XOR, SImode, NULL_RTX,
3566 INTVAL (operands[2]), operands[0], operands[1],
3567 optimize && can_create_pseudo_p ());
3570 else /* TARGET_THUMB1 */
3572 rtx tmp = force_reg (SImode, operands[2]);
3573 if (rtx_equal_p (operands[0], operands[1]))
3577 operands[2] = operands[1];
3584 (define_insn_and_split "*arm_xorsi3"
3585 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3586 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3587 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3595 && CONST_INT_P (operands[2])
3596 && !const_ok_for_arm (INTVAL (operands[2]))"
3597 [(clobber (const_int 0))]
3599 arm_split_constant (XOR, SImode, curr_insn,
3600 INTVAL (operands[2]), operands[0], operands[1], 0);
3603 [(set_attr "length" "4,4,4,16")
3604 (set_attr "predicable" "yes")
3605 (set_attr "predicable_short_it" "no,yes,no,no")
3606 (set_attr "type" "arlo_imm,*,*,*")]
3609 (define_insn "*thumb1_xorsi3_insn"
3610 [(set (match_operand:SI 0 "register_operand" "=l")
3611 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3612 (match_operand:SI 2 "register_operand" "l")))]
3615 [(set_attr "length" "2")
3616 (set_attr "conds" "set")
3617 (set_attr "type" "arlo_imm")]
3620 (define_insn "*xorsi3_compare0"
3621 [(set (reg:CC_NOOV CC_REGNUM)
3622 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3623 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3625 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3626 (xor:SI (match_dup 1) (match_dup 2)))]
3628 "eor%.\\t%0, %1, %2"
3629 [(set_attr "conds" "set")
3630 (set_attr "type" "arlo_imm,*")]
3633 (define_insn "*xorsi3_compare0_scratch"
3634 [(set (reg:CC_NOOV CC_REGNUM)
3635 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3636 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3640 [(set_attr "conds" "set")
3641 (set_attr "type" "arlo_imm,*")]
3644 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3645 ; (NOT D) we can sometimes merge the final NOT into one of the following
3649 [(set (match_operand:SI 0 "s_register_operand" "")
3650 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3651 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3652 (match_operand:SI 3 "arm_rhs_operand" "")))
3653 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3655 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3656 (not:SI (match_dup 3))))
3657 (set (match_dup 0) (not:SI (match_dup 4)))]
3661 (define_insn_and_split "*andsi_iorsi3_notsi"
3662 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3663 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3664 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3665 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3667 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3668 "&& reload_completed"
3669 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3670 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3672 [(set_attr "length" "8")
3673 (set_attr "ce_count" "2")
3674 (set_attr "predicable" "yes")
3675 (set_attr "predicable_short_it" "no")]
3678 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3679 ; insns are available?
3681 [(set (match_operand:SI 0 "s_register_operand" "")
3682 (match_operator:SI 1 "logical_binary_operator"
3683 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3684 (match_operand:SI 3 "const_int_operand" "")
3685 (match_operand:SI 4 "const_int_operand" ""))
3686 (match_operator:SI 9 "logical_binary_operator"
3687 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3688 (match_operand:SI 6 "const_int_operand" ""))
3689 (match_operand:SI 7 "s_register_operand" "")])]))
3690 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3692 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3693 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3696 [(ashift:SI (match_dup 2) (match_dup 4))
3700 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3703 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3707 [(set (match_operand:SI 0 "s_register_operand" "")
3708 (match_operator:SI 1 "logical_binary_operator"
3709 [(match_operator:SI 9 "logical_binary_operator"
3710 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3711 (match_operand:SI 6 "const_int_operand" ""))
3712 (match_operand:SI 7 "s_register_operand" "")])
3713 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3714 (match_operand:SI 3 "const_int_operand" "")
3715 (match_operand:SI 4 "const_int_operand" ""))]))
3716 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3718 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3719 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3722 [(ashift:SI (match_dup 2) (match_dup 4))
3726 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3729 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3733 [(set (match_operand:SI 0 "s_register_operand" "")
3734 (match_operator:SI 1 "logical_binary_operator"
3735 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3736 (match_operand:SI 3 "const_int_operand" "")
3737 (match_operand:SI 4 "const_int_operand" ""))
3738 (match_operator:SI 9 "logical_binary_operator"
3739 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3740 (match_operand:SI 6 "const_int_operand" ""))
3741 (match_operand:SI 7 "s_register_operand" "")])]))
3742 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3744 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3745 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3748 [(ashift:SI (match_dup 2) (match_dup 4))
3752 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3755 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3759 [(set (match_operand:SI 0 "s_register_operand" "")
3760 (match_operator:SI 1 "logical_binary_operator"
3761 [(match_operator:SI 9 "logical_binary_operator"
3762 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3763 (match_operand:SI 6 "const_int_operand" ""))
3764 (match_operand:SI 7 "s_register_operand" "")])
3765 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3766 (match_operand:SI 3 "const_int_operand" "")
3767 (match_operand:SI 4 "const_int_operand" ""))]))
3768 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3770 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3771 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3774 [(ashift:SI (match_dup 2) (match_dup 4))
3778 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3781 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3785 ;; Minimum and maximum insns
3787 (define_expand "smaxsi3"
3789 (set (match_operand:SI 0 "s_register_operand" "")
3790 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3791 (match_operand:SI 2 "arm_rhs_operand" "")))
3792 (clobber (reg:CC CC_REGNUM))])]
3795 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3797 /* No need for a clobber of the condition code register here. */
3798 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3799 gen_rtx_SMAX (SImode, operands[1],
3805 (define_insn "*smax_0"
3806 [(set (match_operand:SI 0 "s_register_operand" "=r")
3807 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3810 "bic%?\\t%0, %1, %1, asr #31"
3811 [(set_attr "predicable" "yes")
3812 (set_attr "predicable_short_it" "no")]
3815 (define_insn "*smax_m1"
3816 [(set (match_operand:SI 0 "s_register_operand" "=r")
3817 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3820 "orr%?\\t%0, %1, %1, asr #31"
3821 [(set_attr "predicable" "yes")
3822 (set_attr "predicable_short_it" "no")]
3825 (define_insn_and_split "*arm_smax_insn"
3826 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3827 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3828 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3829 (clobber (reg:CC CC_REGNUM))]
3832 ; cmp\\t%1, %2\;movlt\\t%0, %2
3833 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3835 [(set (reg:CC CC_REGNUM)
3836 (compare:CC (match_dup 1) (match_dup 2)))
3838 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3842 [(set_attr "conds" "clob")
3843 (set_attr "length" "8,12")]
3846 (define_expand "sminsi3"
3848 (set (match_operand:SI 0 "s_register_operand" "")
3849 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3850 (match_operand:SI 2 "arm_rhs_operand" "")))
3851 (clobber (reg:CC CC_REGNUM))])]
3854 if (operands[2] == const0_rtx)
3856 /* No need for a clobber of the condition code register here. */
3857 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3858 gen_rtx_SMIN (SImode, operands[1],
3864 (define_insn "*smin_0"
3865 [(set (match_operand:SI 0 "s_register_operand" "=r")
3866 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3869 "and%?\\t%0, %1, %1, asr #31"
3870 [(set_attr "predicable" "yes")
3871 (set_attr "predicable_short_it" "no")]
3874 (define_insn_and_split "*arm_smin_insn"
3875 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3876 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3877 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3878 (clobber (reg:CC CC_REGNUM))]
3881 ; cmp\\t%1, %2\;movge\\t%0, %2
3882 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3884 [(set (reg:CC CC_REGNUM)
3885 (compare:CC (match_dup 1) (match_dup 2)))
3887 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3891 [(set_attr "conds" "clob")
3892 (set_attr "length" "8,12")]
3895 (define_expand "umaxsi3"
3897 (set (match_operand:SI 0 "s_register_operand" "")
3898 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3899 (match_operand:SI 2 "arm_rhs_operand" "")))
3900 (clobber (reg:CC CC_REGNUM))])]
3905 (define_insn_and_split "*arm_umaxsi3"
3906 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3907 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3908 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3909 (clobber (reg:CC CC_REGNUM))]
3912 ; cmp\\t%1, %2\;movcc\\t%0, %2
3913 ; cmp\\t%1, %2\;movcs\\t%0, %1
3914 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3916 [(set (reg:CC CC_REGNUM)
3917 (compare:CC (match_dup 1) (match_dup 2)))
3919 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3923 [(set_attr "conds" "clob")
3924 (set_attr "length" "8,8,12")]
3927 (define_expand "uminsi3"
3929 (set (match_operand:SI 0 "s_register_operand" "")
3930 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3931 (match_operand:SI 2 "arm_rhs_operand" "")))
3932 (clobber (reg:CC CC_REGNUM))])]
3937 (define_insn_and_split "*arm_uminsi3"
3938 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3939 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3940 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3941 (clobber (reg:CC CC_REGNUM))]
3944 ; cmp\\t%1, %2\;movcs\\t%0, %2
3945 ; cmp\\t%1, %2\;movcc\\t%0, %1
3946 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3948 [(set (reg:CC CC_REGNUM)
3949 (compare:CC (match_dup 1) (match_dup 2)))
3951 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3955 [(set_attr "conds" "clob")
3956 (set_attr "length" "8,8,12")]
3959 (define_insn "*store_minmaxsi"
3960 [(set (match_operand:SI 0 "memory_operand" "=m")
3961 (match_operator:SI 3 "minmax_operator"
3962 [(match_operand:SI 1 "s_register_operand" "r")
3963 (match_operand:SI 2 "s_register_operand" "r")]))
3964 (clobber (reg:CC CC_REGNUM))]
3965 "TARGET_32BIT && optimize_insn_for_size_p()"
3967 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3968 operands[1], operands[2]);
3969 output_asm_insn (\"cmp\\t%1, %2\", operands);
3971 output_asm_insn (\"ite\t%d3\", operands);
3972 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3973 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3976 [(set_attr "conds" "clob")
3977 (set (attr "length")
3978 (if_then_else (eq_attr "is_thumb" "yes")
3981 (set_attr "type" "store1")]
3984 ; Reject the frame pointer in operand[1], since reloading this after
3985 ; it has been eliminated can cause carnage.
3986 (define_insn "*minmax_arithsi"
3987 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3988 (match_operator:SI 4 "shiftable_operator"
3989 [(match_operator:SI 5 "minmax_operator"
3990 [(match_operand:SI 2 "s_register_operand" "r,r")
3991 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3992 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3993 (clobber (reg:CC CC_REGNUM))]
3994 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3997 enum rtx_code code = GET_CODE (operands[4]);
4000 if (which_alternative != 0 || operands[3] != const0_rtx
4001 || (code != PLUS && code != IOR && code != XOR))
4006 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4007 operands[2], operands[3]);
4008 output_asm_insn (\"cmp\\t%2, %3\", operands);
4012 output_asm_insn (\"ite\\t%d5\", operands);
4014 output_asm_insn (\"it\\t%d5\", operands);
4016 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4018 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4021 [(set_attr "conds" "clob")
4022 (set (attr "length")
4023 (if_then_else (eq_attr "is_thumb" "yes")
4028 ; Reject the frame pointer in operand[1], since reloading this after
4029 ; it has been eliminated can cause carnage.
4030 (define_insn_and_split "*minmax_arithsi_non_canon"
4031 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4033 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4034 (match_operator:SI 4 "minmax_operator"
4035 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4036 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4037 (clobber (reg:CC CC_REGNUM))]
4038 "TARGET_32BIT && !arm_eliminable_register (operands[1])
4039 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4041 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4042 [(set (reg:CC CC_REGNUM)
4043 (compare:CC (match_dup 2) (match_dup 3)))
4045 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4047 (minus:SI (match_dup 1)
4049 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4051 (minus:SI (match_dup 1)
4054 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4055 operands[2], operands[3]);
4056 enum rtx_code rc = minmax_code (operands[4]);
4057 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4058 operands[2], operands[3]);
4060 if (mode == CCFPmode || mode == CCFPEmode)
4061 rc = reverse_condition_maybe_unordered (rc);
4063 rc = reverse_condition (rc);
4064 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4066 [(set_attr "conds" "clob")
4067 (set (attr "length")
4068 (if_then_else (eq_attr "is_thumb" "yes")
4073 (define_code_iterator SAT [smin smax])
4074 (define_code_iterator SATrev [smin smax])
4075 (define_code_attr SATlo [(smin "1") (smax "2")])
4076 (define_code_attr SAThi [(smin "2") (smax "1")])
4078 (define_insn "*satsi_<SAT:code>"
4079 [(set (match_operand:SI 0 "s_register_operand" "=r")
4080 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
4081 (match_operand:SI 1 "const_int_operand" "i"))
4082 (match_operand:SI 2 "const_int_operand" "i")))]
4083 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4084 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4088 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4089 &mask, &signed_sat))
4092 operands[1] = GEN_INT (mask);
4094 return "ssat%?\t%0, %1, %3";
4096 return "usat%?\t%0, %1, %3";
4098 [(set_attr "predicable" "yes")
4099 (set_attr "predicable_short_it" "no")]
4102 (define_insn "*satsi_<SAT:code>_shift"
4103 [(set (match_operand:SI 0 "s_register_operand" "=r")
4104 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
4105 [(match_operand:SI 4 "s_register_operand" "r")
4106 (match_operand:SI 5 "const_int_operand" "i")])
4107 (match_operand:SI 1 "const_int_operand" "i"))
4108 (match_operand:SI 2 "const_int_operand" "i")))]
4109 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
4110 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4114 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4115 &mask, &signed_sat))
4118 operands[1] = GEN_INT (mask);
4120 return "ssat%?\t%0, %1, %4%S3";
4122 return "usat%?\t%0, %1, %4%S3";
4124 [(set_attr "predicable" "yes")
4125 (set_attr "predicable_short_it" "no")
4126 (set_attr "shift" "3")
4127 (set_attr "type" "arlo_shift")])
4129 ;; Shift and rotation insns
4131 (define_expand "ashldi3"
4132 [(set (match_operand:DI 0 "s_register_operand" "")
4133 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
4134 (match_operand:SI 2 "general_operand" "")))]
4139 /* Delay the decision whether to use NEON or core-regs until
4140 register allocation. */
4141 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
4146 /* Only the NEON case can handle in-memory shift counts. */
4147 if (!reg_or_int_operand (operands[2], SImode))
4148 operands[2] = force_reg (SImode, operands[2]);
4151 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4152 ; /* No special preparation statements; expand pattern as above. */
4155 rtx scratch1, scratch2;
4157 if (CONST_INT_P (operands[2])
4158 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4160 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
4164 /* Ideally we should use iwmmxt here if we could know that operands[1]
4165 ends up already living in an iwmmxt register. Otherwise it's
4166 cheaper to have the alternate code being generated than moving
4167 values to iwmmxt regs and back. */
4169 /* If we're optimizing for size, we prefer the libgcc calls. */
4170 if (optimize_function_for_size_p (cfun))
4173 /* Expand operation using core-registers.
4174 'FAIL' would achieve the same thing, but this is a bit smarter. */
4175 scratch1 = gen_reg_rtx (SImode);
4176 scratch2 = gen_reg_rtx (SImode);
4177 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4178 operands[2], scratch1, scratch2);
4184 (define_insn_and_split "arm_ashldi3_1bit"
4185 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4186 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
4188 (clobber (reg:CC CC_REGNUM))]
4190 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
4191 "&& reload_completed"
4192 [(parallel [(set (reg:CC CC_REGNUM)
4193 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
4195 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
4196 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
4197 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4199 operands[2] = gen_highpart (SImode, operands[0]);
4200 operands[0] = gen_lowpart (SImode, operands[0]);
4201 operands[3] = gen_highpart (SImode, operands[1]);
4202 operands[1] = gen_lowpart (SImode, operands[1]);
4204 [(set_attr "conds" "clob")
4205 (set_attr "length" "8")]
4208 (define_expand "ashlsi3"
4209 [(set (match_operand:SI 0 "s_register_operand" "")
4210 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
4211 (match_operand:SI 2 "arm_rhs_operand" "")))]
4214 if (CONST_INT_P (operands[2])
4215 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4217 emit_insn (gen_movsi (operands[0], const0_rtx));
4223 (define_insn "*thumb1_ashlsi3"
4224 [(set (match_operand:SI 0 "register_operand" "=l,l")
4225 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
4226 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4229 [(set_attr "length" "2")
4230 (set_attr "type" "shift,shift_reg")
4231 (set_attr "conds" "set")])
4233 (define_expand "ashrdi3"
4234 [(set (match_operand:DI 0 "s_register_operand" "")
4235 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4236 (match_operand:SI 2 "reg_or_int_operand" "")))]
4241 /* Delay the decision whether to use NEON or core-regs until
4242 register allocation. */
4243 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
4247 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4248 ; /* No special preparation statements; expand pattern as above. */
4251 rtx scratch1, scratch2;
4253 if (CONST_INT_P (operands[2])
4254 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4256 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
4260 /* Ideally we should use iwmmxt here if we could know that operands[1]
4261 ends up already living in an iwmmxt register. Otherwise it's
4262 cheaper to have the alternate code being generated than moving
4263 values to iwmmxt regs and back. */
4265 /* If we're optimizing for size, we prefer the libgcc calls. */
4266 if (optimize_function_for_size_p (cfun))
4269 /* Expand operation using core-registers.
4270 'FAIL' would achieve the same thing, but this is a bit smarter. */
4271 scratch1 = gen_reg_rtx (SImode);
4272 scratch2 = gen_reg_rtx (SImode);
4273 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4274 operands[2], scratch1, scratch2);
4280 (define_insn_and_split "arm_ashrdi3_1bit"
4281 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4282 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4284 (clobber (reg:CC CC_REGNUM))]
4286 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
4287 "&& reload_completed"
4288 [(parallel [(set (reg:CC CC_REGNUM)
4289 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
4291 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
4292 (set (match_dup 0) (unspec:SI [(match_dup 1)
4293 (reg:CC_C CC_REGNUM)]
4296 operands[2] = gen_highpart (SImode, operands[0]);
4297 operands[0] = gen_lowpart (SImode, operands[0]);
4298 operands[3] = gen_highpart (SImode, operands[1]);
4299 operands[1] = gen_lowpart (SImode, operands[1]);
4301 [(set_attr "conds" "clob")
4302 (set_attr "length" "8")]
4306 [(set (match_operand:SI 0 "s_register_operand" "=r")
4307 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
4308 (reg:CC_C CC_REGNUM)]
4312 [(set_attr "conds" "use")
4313 (set_attr "type" "mov_shift")]
4316 (define_expand "ashrsi3"
4317 [(set (match_operand:SI 0 "s_register_operand" "")
4318 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4319 (match_operand:SI 2 "arm_rhs_operand" "")))]
4322 if (CONST_INT_P (operands[2])
4323 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4324 operands[2] = GEN_INT (31);
4328 (define_insn "*thumb1_ashrsi3"
4329 [(set (match_operand:SI 0 "register_operand" "=l,l")
4330 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4331 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4334 [(set_attr "length" "2")
4335 (set_attr "type" "shift,shift_reg")
4336 (set_attr "conds" "set")])
4338 (define_expand "lshrdi3"
4339 [(set (match_operand:DI 0 "s_register_operand" "")
4340 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4341 (match_operand:SI 2 "reg_or_int_operand" "")))]
4346 /* Delay the decision whether to use NEON or core-regs until
4347 register allocation. */
4348 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4352 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4353 ; /* No special preparation statements; expand pattern as above. */
4356 rtx scratch1, scratch2;
4358 if (CONST_INT_P (operands[2])
4359 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4361 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4365 /* Ideally we should use iwmmxt here if we could know that operands[1]
4366 ends up already living in an iwmmxt register. Otherwise it's
4367 cheaper to have the alternate code being generated than moving
4368 values to iwmmxt regs and back. */
4370 /* If we're optimizing for size, we prefer the libgcc calls. */
4371 if (optimize_function_for_size_p (cfun))
4374 /* Expand operation using core-registers.
4375 'FAIL' would achieve the same thing, but this is a bit smarter. */
4376 scratch1 = gen_reg_rtx (SImode);
4377 scratch2 = gen_reg_rtx (SImode);
4378 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4379 operands[2], scratch1, scratch2);
4385 (define_insn_and_split "arm_lshrdi3_1bit"
4386 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4387 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4389 (clobber (reg:CC CC_REGNUM))]
4391 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4392 "&& reload_completed"
4393 [(parallel [(set (reg:CC CC_REGNUM)
4394 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4396 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4397 (set (match_dup 0) (unspec:SI [(match_dup 1)
4398 (reg:CC_C CC_REGNUM)]
4401 operands[2] = gen_highpart (SImode, operands[0]);
4402 operands[0] = gen_lowpart (SImode, operands[0]);
4403 operands[3] = gen_highpart (SImode, operands[1]);
4404 operands[1] = gen_lowpart (SImode, operands[1]);
4406 [(set_attr "conds" "clob")
4407 (set_attr "length" "8")]
4410 (define_expand "lshrsi3"
4411 [(set (match_operand:SI 0 "s_register_operand" "")
4412 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4413 (match_operand:SI 2 "arm_rhs_operand" "")))]
4416 if (CONST_INT_P (operands[2])
4417 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4419 emit_insn (gen_movsi (operands[0], const0_rtx));
4425 (define_insn "*thumb1_lshrsi3"
4426 [(set (match_operand:SI 0 "register_operand" "=l,l")
4427 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4428 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4431 [(set_attr "length" "2")
4432 (set_attr "type" "shift,shift_reg")
4433 (set_attr "conds" "set")])
4435 (define_expand "rotlsi3"
4436 [(set (match_operand:SI 0 "s_register_operand" "")
4437 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4438 (match_operand:SI 2 "reg_or_int_operand" "")))]
4441 if (CONST_INT_P (operands[2]))
4442 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4445 rtx reg = gen_reg_rtx (SImode);
4446 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4452 (define_expand "rotrsi3"
4453 [(set (match_operand:SI 0 "s_register_operand" "")
4454 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4455 (match_operand:SI 2 "arm_rhs_operand" "")))]
4460 if (CONST_INT_P (operands[2])
4461 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4462 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4464 else /* TARGET_THUMB1 */
4466 if (CONST_INT_P (operands [2]))
4467 operands [2] = force_reg (SImode, operands[2]);
4472 (define_insn "*thumb1_rotrsi3"
4473 [(set (match_operand:SI 0 "register_operand" "=l")
4474 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4475 (match_operand:SI 2 "register_operand" "l")))]
4478 [(set_attr "type" "shift_reg")
4479 (set_attr "length" "2")]
4482 (define_insn "*arm_shiftsi3"
4483 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
4484 (match_operator:SI 3 "shift_operator"
4485 [(match_operand:SI 1 "s_register_operand" "0,r,r")
4486 (match_operand:SI 2 "reg_or_int_operand" "l,M,r")]))]
4488 "* return arm_output_shift(operands, 0);"
4489 [(set_attr "predicable" "yes")
4490 (set_attr "arch" "t2,*,*")
4491 (set_attr "predicable_short_it" "yes,no,no")
4492 (set_attr "length" "4")
4493 (set_attr "shift" "1")
4494 (set_attr "type" "arlo_shift_reg,arlo_shift,arlo_shift_reg")]
4497 (define_insn "*shiftsi3_compare"
4498 [(set (reg:CC CC_REGNUM)
4499 (compare:CC (match_operator:SI 3 "shift_operator"
4500 [(match_operand:SI 1 "s_register_operand" "r,r")
4501 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4503 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4504 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4506 "* return arm_output_shift(operands, 1);"
4507 [(set_attr "conds" "set")
4508 (set_attr "shift" "1")
4509 (set_attr "type" "arlo_shift,arlo_shift_reg")]
4512 (define_insn "*shiftsi3_compare0"
4513 [(set (reg:CC_NOOV CC_REGNUM)
4514 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4515 [(match_operand:SI 1 "s_register_operand" "r,r")
4516 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4518 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4519 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4521 "* return arm_output_shift(operands, 1);"
4522 [(set_attr "conds" "set")
4523 (set_attr "shift" "1")
4524 (set_attr "type" "arlo_shift,arlo_shift_reg")]
4527 (define_insn "*shiftsi3_compare0_scratch"
4528 [(set (reg:CC_NOOV CC_REGNUM)
4529 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4530 [(match_operand:SI 1 "s_register_operand" "r,r")
4531 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4533 (clobber (match_scratch:SI 0 "=r,r"))]
4535 "* return arm_output_shift(operands, 1);"
4536 [(set_attr "conds" "set")
4537 (set_attr "shift" "1")
4538 (set_attr "type" "shift,shift_reg")]
4541 (define_insn "*not_shiftsi"
4542 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4543 (not:SI (match_operator:SI 3 "shift_operator"
4544 [(match_operand:SI 1 "s_register_operand" "r,r")
4545 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4548 [(set_attr "predicable" "yes")
4549 (set_attr "predicable_short_it" "no")
4550 (set_attr "shift" "1")
4551 (set_attr "arch" "32,a")
4552 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4554 (define_insn "*not_shiftsi_compare0"
4555 [(set (reg:CC_NOOV CC_REGNUM)
4557 (not:SI (match_operator:SI 3 "shift_operator"
4558 [(match_operand:SI 1 "s_register_operand" "r,r")
4559 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4561 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4562 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4565 [(set_attr "conds" "set")
4566 (set_attr "shift" "1")
4567 (set_attr "arch" "32,a")
4568 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4570 (define_insn "*not_shiftsi_compare0_scratch"
4571 [(set (reg:CC_NOOV CC_REGNUM)
4573 (not:SI (match_operator:SI 3 "shift_operator"
4574 [(match_operand:SI 1 "s_register_operand" "r,r")
4575 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4577 (clobber (match_scratch:SI 0 "=r,r"))]
4580 [(set_attr "conds" "set")
4581 (set_attr "shift" "1")
4582 (set_attr "arch" "32,a")
4583 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4585 ;; We don't really have extzv, but defining this using shifts helps
4586 ;; to reduce register pressure later on.
4588 (define_expand "extzv"
4589 [(set (match_operand 0 "s_register_operand" "")
4590 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4591 (match_operand 2 "const_int_operand" "")
4592 (match_operand 3 "const_int_operand" "")))]
4593 "TARGET_THUMB1 || arm_arch_thumb2"
4596 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4597 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4599 if (arm_arch_thumb2)
4601 HOST_WIDE_INT width = INTVAL (operands[2]);
4602 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4604 if (unaligned_access && MEM_P (operands[1])
4605 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4609 if (BYTES_BIG_ENDIAN)
4610 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4615 base_addr = adjust_address (operands[1], SImode,
4616 bitpos / BITS_PER_UNIT);
4617 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4621 rtx dest = operands[0];
4622 rtx tmp = gen_reg_rtx (SImode);
4624 /* We may get a paradoxical subreg here. Strip it off. */
4625 if (GET_CODE (dest) == SUBREG
4626 && GET_MODE (dest) == SImode
4627 && GET_MODE (SUBREG_REG (dest)) == HImode)
4628 dest = SUBREG_REG (dest);
4630 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4633 base_addr = adjust_address (operands[1], HImode,
4634 bitpos / BITS_PER_UNIT);
4635 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4636 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4640 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4642 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4650 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4653 operands[3] = GEN_INT (rshift);
4657 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4661 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4662 operands[3], gen_reg_rtx (SImode)));
4667 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4669 (define_expand "extzv_t1"
4670 [(set (match_operand:SI 4 "s_register_operand" "")
4671 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4672 (match_operand:SI 2 "const_int_operand" "")))
4673 (set (match_operand:SI 0 "s_register_operand" "")
4674 (lshiftrt:SI (match_dup 4)
4675 (match_operand:SI 3 "const_int_operand" "")))]
4679 (define_expand "extv"
4680 [(set (match_operand 0 "s_register_operand" "")
4681 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4682 (match_operand 2 "const_int_operand" "")
4683 (match_operand 3 "const_int_operand" "")))]
4686 HOST_WIDE_INT width = INTVAL (operands[2]);
4687 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4689 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4690 && (bitpos % BITS_PER_UNIT) == 0)
4694 if (BYTES_BIG_ENDIAN)
4695 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4699 base_addr = adjust_address (operands[1], SImode,
4700 bitpos / BITS_PER_UNIT);
4701 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4705 rtx dest = operands[0];
4706 rtx tmp = gen_reg_rtx (SImode);
4708 /* We may get a paradoxical subreg here. Strip it off. */
4709 if (GET_CODE (dest) == SUBREG
4710 && GET_MODE (dest) == SImode
4711 && GET_MODE (SUBREG_REG (dest)) == HImode)
4712 dest = SUBREG_REG (dest);
4714 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4717 base_addr = adjust_address (operands[1], HImode,
4718 bitpos / BITS_PER_UNIT);
4719 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4720 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4725 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4727 else if (GET_MODE (operands[0]) == SImode
4728 && GET_MODE (operands[1]) == SImode)
4730 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4738 ; Helper to expand register forms of extv with the proper modes.
4740 (define_expand "extv_regsi"
4741 [(set (match_operand:SI 0 "s_register_operand" "")
4742 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4743 (match_operand 2 "const_int_operand" "")
4744 (match_operand 3 "const_int_operand" "")))]
4749 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4751 (define_insn "unaligned_loadsi"
4752 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4753 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4754 UNSPEC_UNALIGNED_LOAD))]
4755 "unaligned_access && TARGET_32BIT"
4756 "ldr%?\t%0, %1\t@ unaligned"
4757 [(set_attr "arch" "t2,any")
4758 (set_attr "length" "2,4")
4759 (set_attr "predicable" "yes")
4760 (set_attr "predicable_short_it" "yes,no")
4761 (set_attr "type" "load1")])
4763 (define_insn "unaligned_loadhis"
4764 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4766 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4767 UNSPEC_UNALIGNED_LOAD)))]
4768 "unaligned_access && TARGET_32BIT"
4769 "ldr%(sh%)\t%0, %1\t@ unaligned"
4770 [(set_attr "arch" "t2,any")
4771 (set_attr "length" "2,4")
4772 (set_attr "predicable" "yes")
4773 (set_attr "predicable_short_it" "yes,no")
4774 (set_attr "type" "load_byte")])
4776 (define_insn "unaligned_loadhiu"
4777 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4779 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4780 UNSPEC_UNALIGNED_LOAD)))]
4781 "unaligned_access && TARGET_32BIT"
4782 "ldr%(h%)\t%0, %1\t@ unaligned"
4783 [(set_attr "arch" "t2,any")
4784 (set_attr "length" "2,4")
4785 (set_attr "predicable" "yes")
4786 (set_attr "predicable_short_it" "yes,no")
4787 (set_attr "type" "load_byte")])
4789 (define_insn "unaligned_storesi"
4790 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4791 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4792 UNSPEC_UNALIGNED_STORE))]
4793 "unaligned_access && TARGET_32BIT"
4794 "str%?\t%1, %0\t@ unaligned"
4795 [(set_attr "arch" "t2,any")
4796 (set_attr "length" "2,4")
4797 (set_attr "predicable" "yes")
4798 (set_attr "predicable_short_it" "yes,no")
4799 (set_attr "type" "store1")])
4801 (define_insn "unaligned_storehi"
4802 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4803 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4804 UNSPEC_UNALIGNED_STORE))]
4805 "unaligned_access && TARGET_32BIT"
4806 "str%(h%)\t%1, %0\t@ unaligned"
4807 [(set_attr "arch" "t2,any")
4808 (set_attr "length" "2,4")
4809 (set_attr "predicable" "yes")
4810 (set_attr "predicable_short_it" "yes,no")
4811 (set_attr "type" "store1")])
4813 ;; Unaligned double-word load and store.
4814 ;; Split after reload into two unaligned single-word accesses.
4815 ;; It prevents lower_subreg from splitting some other aligned
4816 ;; double-word accesses too early. Used for internal memcpy.
4818 (define_insn_and_split "unaligned_loaddi"
4819 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4820 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4821 UNSPEC_UNALIGNED_LOAD))]
4822 "unaligned_access && TARGET_32BIT"
4824 "&& reload_completed"
4825 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4826 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4828 operands[2] = gen_highpart (SImode, operands[0]);
4829 operands[0] = gen_lowpart (SImode, operands[0]);
4830 operands[3] = gen_highpart (SImode, operands[1]);
4831 operands[1] = gen_lowpart (SImode, operands[1]);
4833 /* If the first destination register overlaps with the base address,
4834 swap the order in which the loads are emitted. */
4835 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4837 rtx tmp = operands[1];
4838 operands[1] = operands[3];
4841 operands[0] = operands[2];
4845 [(set_attr "arch" "t2,any")
4846 (set_attr "length" "4,8")
4847 (set_attr "predicable" "yes")
4848 (set_attr "type" "load2")])
4850 (define_insn_and_split "unaligned_storedi"
4851 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4852 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4853 UNSPEC_UNALIGNED_STORE))]
4854 "unaligned_access && TARGET_32BIT"
4856 "&& reload_completed"
4857 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4858 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4860 operands[2] = gen_highpart (SImode, operands[0]);
4861 operands[0] = gen_lowpart (SImode, operands[0]);
4862 operands[3] = gen_highpart (SImode, operands[1]);
4863 operands[1] = gen_lowpart (SImode, operands[1]);
4865 [(set_attr "arch" "t2,any")
4866 (set_attr "length" "4,8")
4867 (set_attr "predicable" "yes")
4868 (set_attr "type" "store2")])
4871 (define_insn "*extv_reg"
4872 [(set (match_operand:SI 0 "s_register_operand" "=r")
4873 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4874 (match_operand:SI 2 "const_int_operand" "M")
4875 (match_operand:SI 3 "const_int_operand" "M")))]
4877 "sbfx%?\t%0, %1, %3, %2"
4878 [(set_attr "length" "4")
4879 (set_attr "predicable" "yes")
4880 (set_attr "predicable_short_it" "no")]
4883 (define_insn "extzv_t2"
4884 [(set (match_operand:SI 0 "s_register_operand" "=r")
4885 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4886 (match_operand:SI 2 "const_int_operand" "M")
4887 (match_operand:SI 3 "const_int_operand" "M")))]
4889 "ubfx%?\t%0, %1, %3, %2"
4890 [(set_attr "length" "4")
4891 (set_attr "predicable" "yes")
4892 (set_attr "predicable_short_it" "no")]
4896 ;; Division instructions
4897 (define_insn "divsi3"
4898 [(set (match_operand:SI 0 "s_register_operand" "=r")
4899 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4900 (match_operand:SI 2 "s_register_operand" "r")))]
4902 "sdiv%?\t%0, %1, %2"
4903 [(set_attr "predicable" "yes")
4904 (set_attr "predicable_short_it" "no")
4905 (set_attr "type" "sdiv")]
4908 (define_insn "udivsi3"
4909 [(set (match_operand:SI 0 "s_register_operand" "=r")
4910 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4911 (match_operand:SI 2 "s_register_operand" "r")))]
4913 "udiv%?\t%0, %1, %2"
4914 [(set_attr "predicable" "yes")
4915 (set_attr "predicable_short_it" "no")
4916 (set_attr "type" "udiv")]
4920 ;; Unary arithmetic insns
4922 (define_expand "negdi2"
4924 [(set (match_operand:DI 0 "s_register_operand" "")
4925 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4926 (clobber (reg:CC CC_REGNUM))])]
4931 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4937 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4938 ;; The first alternative allows the common case of a *full* overlap.
4939 (define_insn_and_split "*arm_negdi2"
4940 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4941 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4942 (clobber (reg:CC CC_REGNUM))]
4944 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4945 "&& reload_completed"
4946 [(parallel [(set (reg:CC CC_REGNUM)
4947 (compare:CC (const_int 0) (match_dup 1)))
4948 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4949 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4950 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4952 operands[2] = gen_highpart (SImode, operands[0]);
4953 operands[0] = gen_lowpart (SImode, operands[0]);
4954 operands[3] = gen_highpart (SImode, operands[1]);
4955 operands[1] = gen_lowpart (SImode, operands[1]);
4957 [(set_attr "conds" "clob")
4958 (set_attr "length" "8")]
4961 (define_insn "*thumb1_negdi2"
4962 [(set (match_operand:DI 0 "register_operand" "=&l")
4963 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4964 (clobber (reg:CC CC_REGNUM))]
4966 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4967 [(set_attr "length" "6")]
4970 (define_expand "negsi2"
4971 [(set (match_operand:SI 0 "s_register_operand" "")
4972 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4977 (define_insn "*arm_negsi2"
4978 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4979 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4981 "rsb%?\\t%0, %1, #0"
4982 [(set_attr "predicable" "yes")
4983 (set_attr "predicable_short_it" "yes,no")
4984 (set_attr "arch" "t2,*")
4985 (set_attr "length" "4")]
4988 (define_insn "*thumb1_negsi2"
4989 [(set (match_operand:SI 0 "register_operand" "=l")
4990 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4993 [(set_attr "length" "2")]
4996 (define_expand "negsf2"
4997 [(set (match_operand:SF 0 "s_register_operand" "")
4998 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4999 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
5003 (define_expand "negdf2"
5004 [(set (match_operand:DF 0 "s_register_operand" "")
5005 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
5006 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5009 ;; Negate an extended 32-bit value.
5010 (define_insn_and_split "*negdi_extendsidi"
5011 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
5012 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
5013 (clobber (reg:CC CC_REGNUM))]
5015 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
5016 "&& reload_completed"
5019 operands[2] = gen_highpart (SImode, operands[0]);
5020 operands[0] = gen_lowpart (SImode, operands[0]);
5021 rtx tmp = gen_rtx_SET (VOIDmode,
5023 gen_rtx_MINUS (SImode,
5032 /* Set the flags, to emit the short encoding in Thumb2. */
5033 rtx flags = gen_rtx_SET (VOIDmode,
5034 gen_rtx_REG (CCmode, CC_REGNUM),
5035 gen_rtx_COMPARE (CCmode,
5038 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5043 emit_insn (gen_rtx_SET (VOIDmode,
5045 gen_rtx_ASHIFTRT (SImode,
5050 [(set_attr "length" "8,8,4,4")
5051 (set_attr "arch" "a,a,t2,t2")]
5054 (define_insn_and_split "*negdi_zero_extendsidi"
5055 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
5056 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
5057 (clobber (reg:CC CC_REGNUM))]
5059 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
5060 ;; Don't care what register is input to sbc,
5061 ;; since we just just need to propagate the carry.
5062 "&& reload_completed"
5063 [(parallel [(set (reg:CC CC_REGNUM)
5064 (compare:CC (const_int 0) (match_dup 1)))
5065 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
5066 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
5067 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
5069 operands[2] = gen_highpart (SImode, operands[0]);
5070 operands[0] = gen_lowpart (SImode, operands[0]);
5072 [(set_attr "conds" "clob")
5073 (set_attr "length" "8")] ;; length in thumb is 4
5076 ;; abssi2 doesn't really clobber the condition codes if a different register
5077 ;; is being set. To keep things simple, assume during rtl manipulations that
5078 ;; it does, but tell the final scan operator the truth. Similarly for
5081 (define_expand "abssi2"
5083 [(set (match_operand:SI 0 "s_register_operand" "")
5084 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
5085 (clobber (match_dup 2))])]
5089 operands[2] = gen_rtx_SCRATCH (SImode);
5091 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5094 (define_insn_and_split "*arm_abssi2"
5095 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5096 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5097 (clobber (reg:CC CC_REGNUM))]
5100 "&& reload_completed"
5103 /* if (which_alternative == 0) */
5104 if (REGNO(operands[0]) == REGNO(operands[1]))
5106 /* Emit the pattern:
5107 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5108 [(set (reg:CC CC_REGNUM)
5109 (compare:CC (match_dup 0) (const_int 0)))
5110 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5111 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5113 emit_insn (gen_rtx_SET (VOIDmode,
5114 gen_rtx_REG (CCmode, CC_REGNUM),
5115 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5116 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5117 (gen_rtx_LT (SImode,
5118 gen_rtx_REG (CCmode, CC_REGNUM),
5120 (gen_rtx_SET (VOIDmode,
5122 (gen_rtx_MINUS (SImode,
5129 /* Emit the pattern:
5130 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5132 (xor:SI (match_dup 1)
5133 (ashiftrt:SI (match_dup 1) (const_int 31))))
5135 (minus:SI (match_dup 0)
5136 (ashiftrt:SI (match_dup 1) (const_int 31))))]
5138 emit_insn (gen_rtx_SET (VOIDmode,
5140 gen_rtx_XOR (SImode,
5141 gen_rtx_ASHIFTRT (SImode,
5145 emit_insn (gen_rtx_SET (VOIDmode,
5147 gen_rtx_MINUS (SImode,
5149 gen_rtx_ASHIFTRT (SImode,
5155 [(set_attr "conds" "clob,*")
5156 (set_attr "shift" "1")
5157 (set_attr "predicable" "no, yes")
5158 (set_attr "length" "8")]
5161 (define_insn_and_split "*thumb1_abssi2"
5162 [(set (match_operand:SI 0 "s_register_operand" "=l")
5163 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
5164 (clobber (match_scratch:SI 2 "=&l"))]
5167 "TARGET_THUMB1 && reload_completed"
5168 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5169 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
5170 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5172 [(set_attr "length" "6")]
5175 (define_insn_and_split "*arm_neg_abssi2"
5176 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5177 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5178 (clobber (reg:CC CC_REGNUM))]
5181 "&& reload_completed"
5184 /* if (which_alternative == 0) */
5185 if (REGNO (operands[0]) == REGNO (operands[1]))
5187 /* Emit the pattern:
5188 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5190 emit_insn (gen_rtx_SET (VOIDmode,
5191 gen_rtx_REG (CCmode, CC_REGNUM),
5192 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5193 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5195 gen_rtx_REG (CCmode, CC_REGNUM),
5197 gen_rtx_SET (VOIDmode,
5199 (gen_rtx_MINUS (SImode,
5205 /* Emit the pattern:
5206 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5208 emit_insn (gen_rtx_SET (VOIDmode,
5210 gen_rtx_XOR (SImode,
5211 gen_rtx_ASHIFTRT (SImode,
5215 emit_insn (gen_rtx_SET (VOIDmode,
5217 gen_rtx_MINUS (SImode,
5218 gen_rtx_ASHIFTRT (SImode,
5225 [(set_attr "conds" "clob,*")
5226 (set_attr "shift" "1")
5227 (set_attr "predicable" "no, yes")
5228 (set_attr "length" "8")]
5231 (define_insn_and_split "*thumb1_neg_abssi2"
5232 [(set (match_operand:SI 0 "s_register_operand" "=l")
5233 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
5234 (clobber (match_scratch:SI 2 "=&l"))]
5237 "TARGET_THUMB1 && reload_completed"
5238 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
5239 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
5240 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
5242 [(set_attr "length" "6")]
5245 (define_expand "abssf2"
5246 [(set (match_operand:SF 0 "s_register_operand" "")
5247 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
5248 "TARGET_32BIT && TARGET_HARD_FLOAT"
5251 (define_expand "absdf2"
5252 [(set (match_operand:DF 0 "s_register_operand" "")
5253 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
5254 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5257 (define_expand "sqrtsf2"
5258 [(set (match_operand:SF 0 "s_register_operand" "")
5259 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
5260 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
5263 (define_expand "sqrtdf2"
5264 [(set (match_operand:DF 0 "s_register_operand" "")
5265 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
5266 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5269 (define_insn_and_split "one_cmpldi2"
5270 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
5271 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
5278 "TARGET_32BIT && reload_completed
5279 && arm_general_register_operand (operands[0], DImode)"
5280 [(set (match_dup 0) (not:SI (match_dup 1)))
5281 (set (match_dup 2) (not:SI (match_dup 3)))]
5284 operands[2] = gen_highpart (SImode, operands[0]);
5285 operands[0] = gen_lowpart (SImode, operands[0]);
5286 operands[3] = gen_highpart (SImode, operands[1]);
5287 operands[1] = gen_lowpart (SImode, operands[1]);
5289 [(set_attr "length" "*,8,8,*")
5290 (set_attr "predicable" "no,yes,yes,no")
5291 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
5292 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
5295 (define_expand "one_cmplsi2"
5296 [(set (match_operand:SI 0 "s_register_operand" "")
5297 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
5302 (define_insn "*arm_one_cmplsi2"
5303 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
5304 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5307 [(set_attr "predicable" "yes")
5308 (set_attr "predicable_short_it" "yes,no")
5309 (set_attr "arch" "t2,*")
5310 (set_attr "length" "4")
5311 (set_attr "type" "mvn_reg")]
5314 (define_insn "*thumb1_one_cmplsi2"
5315 [(set (match_operand:SI 0 "register_operand" "=l")
5316 (not:SI (match_operand:SI 1 "register_operand" "l")))]
5319 [(set_attr "length" "2")
5320 (set_attr "type" "mvn_reg")]
5323 (define_insn "*notsi_compare0"
5324 [(set (reg:CC_NOOV CC_REGNUM)
5325 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5327 (set (match_operand:SI 0 "s_register_operand" "=r")
5328 (not:SI (match_dup 1)))]
5331 [(set_attr "conds" "set")
5332 (set_attr "type" "mvn_reg")]
5335 (define_insn "*notsi_compare0_scratch"
5336 [(set (reg:CC_NOOV CC_REGNUM)
5337 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5339 (clobber (match_scratch:SI 0 "=r"))]
5342 [(set_attr "conds" "set")
5343 (set_attr "type" "mvn_reg")]
5346 ;; Fixed <--> Floating conversion insns
5348 (define_expand "floatsihf2"
5349 [(set (match_operand:HF 0 "general_operand" "")
5350 (float:HF (match_operand:SI 1 "general_operand" "")))]
5354 rtx op1 = gen_reg_rtx (SFmode);
5355 expand_float (op1, operands[1], 0);
5356 op1 = convert_to_mode (HFmode, op1, 0);
5357 emit_move_insn (operands[0], op1);
5362 (define_expand "floatdihf2"
5363 [(set (match_operand:HF 0 "general_operand" "")
5364 (float:HF (match_operand:DI 1 "general_operand" "")))]
5368 rtx op1 = gen_reg_rtx (SFmode);
5369 expand_float (op1, operands[1], 0);
5370 op1 = convert_to_mode (HFmode, op1, 0);
5371 emit_move_insn (operands[0], op1);
5376 (define_expand "floatsisf2"
5377 [(set (match_operand:SF 0 "s_register_operand" "")
5378 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5379 "TARGET_32BIT && TARGET_HARD_FLOAT"
5383 (define_expand "floatsidf2"
5384 [(set (match_operand:DF 0 "s_register_operand" "")
5385 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5386 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5390 (define_expand "fix_trunchfsi2"
5391 [(set (match_operand:SI 0 "general_operand" "")
5392 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5396 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5397 expand_fix (operands[0], op1, 0);
5402 (define_expand "fix_trunchfdi2"
5403 [(set (match_operand:DI 0 "general_operand" "")
5404 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5408 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5409 expand_fix (operands[0], op1, 0);
5414 (define_expand "fix_truncsfsi2"
5415 [(set (match_operand:SI 0 "s_register_operand" "")
5416 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5417 "TARGET_32BIT && TARGET_HARD_FLOAT"
5421 (define_expand "fix_truncdfsi2"
5422 [(set (match_operand:SI 0 "s_register_operand" "")
5423 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5424 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5430 (define_expand "truncdfsf2"
5431 [(set (match_operand:SF 0 "s_register_operand" "")
5433 (match_operand:DF 1 "s_register_operand" "")))]
5434 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5438 /* DFmode -> HFmode conversions have to go through SFmode. */
5439 (define_expand "truncdfhf2"
5440 [(set (match_operand:HF 0 "general_operand" "")
5442 (match_operand:DF 1 "general_operand" "")))]
5447 op1 = convert_to_mode (SFmode, operands[1], 0);
5448 op1 = convert_to_mode (HFmode, op1, 0);
5449 emit_move_insn (operands[0], op1);
5454 ;; Zero and sign extension instructions.
5456 (define_insn "zero_extend<mode>di2"
5457 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5458 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5459 "<qhs_zextenddi_cstr>")))]
5460 "TARGET_32BIT <qhs_zextenddi_cond>"
5462 [(set_attr "length" "8,4,8,8")
5463 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5464 (set_attr "ce_count" "2")
5465 (set_attr "predicable" "yes")]
5468 (define_insn "extend<mode>di2"
5469 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5470 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5471 "<qhs_extenddi_cstr>")))]
5472 "TARGET_32BIT <qhs_sextenddi_cond>"
5474 [(set_attr "length" "8,4,8,8,8")
5475 (set_attr "ce_count" "2")
5476 (set_attr "shift" "1")
5477 (set_attr "predicable" "yes")
5478 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5481 ;; Splits for all extensions to DImode
5483 [(set (match_operand:DI 0 "s_register_operand" "")
5484 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5485 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5486 [(set (match_dup 0) (match_dup 1))]
5488 rtx lo_part = gen_lowpart (SImode, operands[0]);
5489 enum machine_mode src_mode = GET_MODE (operands[1]);
5491 if (REG_P (operands[0])
5492 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5493 emit_clobber (operands[0]);
5494 if (!REG_P (lo_part) || src_mode != SImode
5495 || !rtx_equal_p (lo_part, operands[1]))
5497 if (src_mode == SImode)
5498 emit_move_insn (lo_part, operands[1]);
5500 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5501 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5502 operands[1] = lo_part;
5504 operands[0] = gen_highpart (SImode, operands[0]);
5505 operands[1] = const0_rtx;
5509 [(set (match_operand:DI 0 "s_register_operand" "")
5510 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5511 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5512 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5514 rtx lo_part = gen_lowpart (SImode, operands[0]);
5515 enum machine_mode src_mode = GET_MODE (operands[1]);
5517 if (REG_P (operands[0])
5518 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5519 emit_clobber (operands[0]);
5521 if (!REG_P (lo_part) || src_mode != SImode
5522 || !rtx_equal_p (lo_part, operands[1]))
5524 if (src_mode == SImode)
5525 emit_move_insn (lo_part, operands[1]);
5527 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5528 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5529 operands[1] = lo_part;
5531 operands[0] = gen_highpart (SImode, operands[0]);
5534 (define_expand "zero_extendhisi2"
5535 [(set (match_operand:SI 0 "s_register_operand" "")
5536 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5539 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5541 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5544 if (!arm_arch6 && !MEM_P (operands[1]))
5546 rtx t = gen_lowpart (SImode, operands[1]);
5547 rtx tmp = gen_reg_rtx (SImode);
5548 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5549 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5555 [(set (match_operand:SI 0 "s_register_operand" "")
5556 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5557 "!TARGET_THUMB2 && !arm_arch6"
5558 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5559 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5561 operands[2] = gen_lowpart (SImode, operands[1]);
5564 (define_insn "*thumb1_zero_extendhisi2"
5565 [(set (match_operand:SI 0 "register_operand" "=l,l")
5566 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5571 if (which_alternative == 0 && arm_arch6)
5572 return "uxth\t%0, %1";
5573 if (which_alternative == 0)
5576 mem = XEXP (operands[1], 0);
5578 if (GET_CODE (mem) == CONST)
5579 mem = XEXP (mem, 0);
5581 if (GET_CODE (mem) == PLUS)
5583 rtx a = XEXP (mem, 0);
5585 /* This can happen due to bugs in reload. */
5586 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5589 ops[0] = operands[0];
5592 output_asm_insn ("mov\t%0, %1", ops);
5594 XEXP (mem, 0) = operands[0];
5598 return "ldrh\t%0, %1";
5600 [(set_attr_alternative "length"
5601 [(if_then_else (eq_attr "is_arch6" "yes")
5602 (const_int 2) (const_int 4))
5604 (set_attr "type" "extend,load_byte")]
5607 (define_insn "*arm_zero_extendhisi2"
5608 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5609 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5610 "TARGET_ARM && arm_arch4 && !arm_arch6"
5614 [(set_attr "type" "arlo_shift,load_byte")
5615 (set_attr "predicable" "yes")]
5618 (define_insn "*arm_zero_extendhisi2_v6"
5619 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5620 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5621 "TARGET_ARM && arm_arch6"
5625 [(set_attr "predicable" "yes")
5626 (set_attr "type" "extend,load_byte")]
5629 (define_insn "*arm_zero_extendhisi2addsi"
5630 [(set (match_operand:SI 0 "s_register_operand" "=r")
5631 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5632 (match_operand:SI 2 "s_register_operand" "r")))]
5634 "uxtah%?\\t%0, %2, %1"
5635 [(set_attr "type" "arlo_shift")
5636 (set_attr "predicable" "yes")
5637 (set_attr "predicable_short_it" "no")]
5640 (define_expand "zero_extendqisi2"
5641 [(set (match_operand:SI 0 "s_register_operand" "")
5642 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5645 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5647 emit_insn (gen_andsi3 (operands[0],
5648 gen_lowpart (SImode, operands[1]),
5652 if (!arm_arch6 && !MEM_P (operands[1]))
5654 rtx t = gen_lowpart (SImode, operands[1]);
5655 rtx tmp = gen_reg_rtx (SImode);
5656 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5657 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5663 [(set (match_operand:SI 0 "s_register_operand" "")
5664 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5666 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5667 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5669 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5672 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5677 (define_insn "*thumb1_zero_extendqisi2"
5678 [(set (match_operand:SI 0 "register_operand" "=l,l")
5679 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5680 "TARGET_THUMB1 && !arm_arch6"
5684 [(set_attr "length" "4,2")
5685 (set_attr "type" "arlo_shift,load_byte")
5686 (set_attr "pool_range" "*,32")]
5689 (define_insn "*thumb1_zero_extendqisi2_v6"
5690 [(set (match_operand:SI 0 "register_operand" "=l,l")
5691 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5692 "TARGET_THUMB1 && arm_arch6"
5696 [(set_attr "length" "2")
5697 (set_attr "type" "extend,load_byte")]
5700 (define_insn "*arm_zero_extendqisi2"
5701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5702 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5703 "TARGET_ARM && !arm_arch6"
5706 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5707 [(set_attr "length" "8,4")
5708 (set_attr "type" "arlo_shift,load_byte")
5709 (set_attr "predicable" "yes")]
5712 (define_insn "*arm_zero_extendqisi2_v6"
5713 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5714 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5715 "TARGET_ARM && arm_arch6"
5718 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5719 [(set_attr "type" "extend,load_byte")
5720 (set_attr "predicable" "yes")]
5723 (define_insn "*arm_zero_extendqisi2addsi"
5724 [(set (match_operand:SI 0 "s_register_operand" "=r")
5725 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5726 (match_operand:SI 2 "s_register_operand" "r")))]
5728 "uxtab%?\\t%0, %2, %1"
5729 [(set_attr "predicable" "yes")
5730 (set_attr "predicable_short_it" "no")
5731 (set_attr "type" "arlo_shift")]
5735 [(set (match_operand:SI 0 "s_register_operand" "")
5736 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5737 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5738 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5739 [(set (match_dup 2) (match_dup 1))
5740 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5745 [(set (match_operand:SI 0 "s_register_operand" "")
5746 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5747 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5748 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5749 [(set (match_dup 2) (match_dup 1))
5750 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5756 [(set (match_operand:SI 0 "s_register_operand" "")
5757 (ior_xor:SI (and:SI (ashift:SI
5758 (match_operand:SI 1 "s_register_operand" "")
5759 (match_operand:SI 2 "const_int_operand" ""))
5760 (match_operand:SI 3 "const_int_operand" ""))
5762 (match_operator 5 "subreg_lowpart_operator"
5763 [(match_operand:SI 4 "s_register_operand" "")]))))]
5765 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5766 == (GET_MODE_MASK (GET_MODE (operands[5]))
5767 & (GET_MODE_MASK (GET_MODE (operands[5]))
5768 << (INTVAL (operands[2])))))"
5769 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5771 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5772 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5775 (define_insn "*compareqi_eq0"
5776 [(set (reg:CC_Z CC_REGNUM)
5777 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5781 [(set_attr "conds" "set")
5782 (set_attr "predicable" "yes")
5783 (set_attr "predicable_short_it" "no")]
5786 (define_expand "extendhisi2"
5787 [(set (match_operand:SI 0 "s_register_operand" "")
5788 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5793 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5796 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5798 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5802 if (!arm_arch6 && !MEM_P (operands[1]))
5804 rtx t = gen_lowpart (SImode, operands[1]);
5805 rtx tmp = gen_reg_rtx (SImode);
5806 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5807 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5814 [(set (match_operand:SI 0 "register_operand" "")
5815 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5816 (clobber (match_scratch:SI 2 ""))])]
5818 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5819 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5821 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5824 ;; We used to have an early-clobber on the scratch register here.
5825 ;; However, there's a bug somewhere in reload which means that this
5826 ;; can be partially ignored during spill allocation if the memory
5827 ;; address also needs reloading; this causes us to die later on when
5828 ;; we try to verify the operands. Fortunately, we don't really need
5829 ;; the early-clobber: we can always use operand 0 if operand 2
5830 ;; overlaps the address.
5831 (define_insn "thumb1_extendhisi2"
5832 [(set (match_operand:SI 0 "register_operand" "=l,l")
5833 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5834 (clobber (match_scratch:SI 2 "=X,l"))]
5841 if (which_alternative == 0 && !arm_arch6)
5843 if (which_alternative == 0)
5844 return \"sxth\\t%0, %1\";
5846 mem = XEXP (operands[1], 0);
5848 /* This code used to try to use 'V', and fix the address only if it was
5849 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5850 range of QImode offsets, and offsettable_address_p does a QImode
5853 if (GET_CODE (mem) == CONST)
5854 mem = XEXP (mem, 0);
5856 if (GET_CODE (mem) == LABEL_REF)
5857 return \"ldr\\t%0, %1\";
5859 if (GET_CODE (mem) == PLUS)
5861 rtx a = XEXP (mem, 0);
5862 rtx b = XEXP (mem, 1);
5864 if (GET_CODE (a) == LABEL_REF
5866 return \"ldr\\t%0, %1\";
5869 return \"ldrsh\\t%0, %1\";
5877 ops[2] = const0_rtx;
5880 gcc_assert (REG_P (ops[1]));
5882 ops[0] = operands[0];
5883 if (reg_mentioned_p (operands[2], ops[1]))
5886 ops[3] = operands[2];
5887 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5890 [(set_attr_alternative "length"
5891 [(if_then_else (eq_attr "is_arch6" "yes")
5892 (const_int 2) (const_int 4))
5894 (set_attr "type" "extend,load_byte")
5895 (set_attr "pool_range" "*,1018")]
5898 ;; This pattern will only be used when ldsh is not available
5899 (define_expand "extendhisi2_mem"
5900 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5902 (zero_extend:SI (match_dup 7)))
5903 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5904 (set (match_operand:SI 0 "" "")
5905 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5910 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5912 mem1 = change_address (operands[1], QImode, addr);
5913 mem2 = change_address (operands[1], QImode,
5914 plus_constant (Pmode, addr, 1));
5915 operands[0] = gen_lowpart (SImode, operands[0]);
5917 operands[2] = gen_reg_rtx (SImode);
5918 operands[3] = gen_reg_rtx (SImode);
5919 operands[6] = gen_reg_rtx (SImode);
5922 if (BYTES_BIG_ENDIAN)
5924 operands[4] = operands[2];
5925 operands[5] = operands[3];
5929 operands[4] = operands[3];
5930 operands[5] = operands[2];
5936 [(set (match_operand:SI 0 "register_operand" "")
5937 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5939 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5940 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5942 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5945 (define_insn "*arm_extendhisi2"
5946 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5947 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5948 "TARGET_ARM && arm_arch4 && !arm_arch6"
5952 [(set_attr "length" "8,4")
5953 (set_attr "type" "arlo_shift,load_byte")
5954 (set_attr "predicable" "yes")
5955 (set_attr "pool_range" "*,256")
5956 (set_attr "neg_pool_range" "*,244")]
5959 ;; ??? Check Thumb-2 pool range
5960 (define_insn "*arm_extendhisi2_v6"
5961 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5962 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5963 "TARGET_32BIT && arm_arch6"
5967 [(set_attr "type" "extend,load_byte")
5968 (set_attr "predicable" "yes")
5969 (set_attr "predicable_short_it" "no")
5970 (set_attr "pool_range" "*,256")
5971 (set_attr "neg_pool_range" "*,244")]
5974 (define_insn "*arm_extendhisi2addsi"
5975 [(set (match_operand:SI 0 "s_register_operand" "=r")
5976 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5977 (match_operand:SI 2 "s_register_operand" "r")))]
5979 "sxtah%?\\t%0, %2, %1"
5982 (define_expand "extendqihi2"
5984 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5986 (set (match_operand:HI 0 "s_register_operand" "")
5987 (ashiftrt:SI (match_dup 2)
5992 if (arm_arch4 && MEM_P (operands[1]))
5994 emit_insn (gen_rtx_SET (VOIDmode,
5996 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5999 if (!s_register_operand (operands[1], QImode))
6000 operands[1] = copy_to_mode_reg (QImode, operands[1]);
6001 operands[0] = gen_lowpart (SImode, operands[0]);
6002 operands[1] = gen_lowpart (SImode, operands[1]);
6003 operands[2] = gen_reg_rtx (SImode);
6007 (define_insn "*arm_extendqihi_insn"
6008 [(set (match_operand:HI 0 "s_register_operand" "=r")
6009 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
6010 "TARGET_ARM && arm_arch4"
6011 "ldr%(sb%)\\t%0, %1"
6012 [(set_attr "type" "load_byte")
6013 (set_attr "predicable" "yes")
6014 (set_attr "pool_range" "256")
6015 (set_attr "neg_pool_range" "244")]
6018 (define_expand "extendqisi2"
6019 [(set (match_operand:SI 0 "s_register_operand" "")
6020 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
6023 if (!arm_arch4 && MEM_P (operands[1]))
6024 operands[1] = copy_to_mode_reg (QImode, operands[1]);
6026 if (!arm_arch6 && !MEM_P (operands[1]))
6028 rtx t = gen_lowpart (SImode, operands[1]);
6029 rtx tmp = gen_reg_rtx (SImode);
6030 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
6031 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
6037 [(set (match_operand:SI 0 "register_operand" "")
6038 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
6040 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
6041 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
6043 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
6046 (define_insn "*arm_extendqisi"
6047 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6048 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6049 "TARGET_ARM && arm_arch4 && !arm_arch6"
6053 [(set_attr "length" "8,4")
6054 (set_attr "type" "arlo_shift,load_byte")
6055 (set_attr "predicable" "yes")
6056 (set_attr "pool_range" "*,256")
6057 (set_attr "neg_pool_range" "*,244")]
6060 (define_insn "*arm_extendqisi_v6"
6061 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
6063 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
6064 "TARGET_ARM && arm_arch6"
6068 [(set_attr "type" "extend,load_byte")
6069 (set_attr "predicable" "yes")
6070 (set_attr "pool_range" "*,256")
6071 (set_attr "neg_pool_range" "*,244")]
6074 (define_insn "*arm_extendqisi2addsi"
6075 [(set (match_operand:SI 0 "s_register_operand" "=r")
6076 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
6077 (match_operand:SI 2 "s_register_operand" "r")))]
6079 "sxtab%?\\t%0, %2, %1"
6080 [(set_attr "type" "arlo_shift")
6081 (set_attr "predicable" "yes")
6082 (set_attr "predicable_short_it" "no")]
6086 [(set (match_operand:SI 0 "register_operand" "")
6087 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
6088 "TARGET_THUMB1 && reload_completed"
6089 [(set (match_dup 0) (match_dup 2))
6090 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
6092 rtx addr = XEXP (operands[1], 0);
6094 if (GET_CODE (addr) == CONST)
6095 addr = XEXP (addr, 0);
6097 if (GET_CODE (addr) == PLUS
6098 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6099 /* No split necessary. */
6102 if (GET_CODE (addr) == PLUS
6103 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
6106 if (reg_overlap_mentioned_p (operands[0], addr))
6108 rtx t = gen_lowpart (QImode, operands[0]);
6109 emit_move_insn (t, operands[1]);
6110 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
6116 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
6117 operands[2] = const0_rtx;
6119 else if (GET_CODE (addr) != PLUS)
6121 else if (REG_P (XEXP (addr, 0)))
6123 operands[2] = XEXP (addr, 1);
6124 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
6128 operands[2] = XEXP (addr, 0);
6129 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
6132 operands[3] = change_address (operands[1], QImode, addr);
6136 [(set (match_operand:SI 0 "register_operand" "")
6137 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
6138 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
6139 (set (match_operand:SI 3 "register_operand" "")
6140 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
6142 && GET_CODE (XEXP (operands[4], 0)) == PLUS
6143 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
6144 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
6145 && (peep2_reg_dead_p (3, operands[0])
6146 || rtx_equal_p (operands[0], operands[3]))
6147 && (peep2_reg_dead_p (3, operands[2])
6148 || rtx_equal_p (operands[2], operands[3]))"
6149 [(set (match_dup 2) (match_dup 1))
6150 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
6152 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
6153 operands[4] = change_address (operands[4], QImode, addr);
6156 (define_insn "thumb1_extendqisi2"
6157 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
6158 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
6163 if (which_alternative == 0 && arm_arch6)
6164 return "sxtb\\t%0, %1";
6165 if (which_alternative == 0)
6168 addr = XEXP (operands[1], 0);
6169 if (GET_CODE (addr) == PLUS
6170 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
6171 return "ldrsb\\t%0, %1";
6175 [(set_attr_alternative "length"
6176 [(if_then_else (eq_attr "is_arch6" "yes")
6177 (const_int 2) (const_int 4))
6179 (if_then_else (eq_attr "is_arch6" "yes")
6180 (const_int 4) (const_int 6))])
6181 (set_attr "type" "extend,load_byte,load_byte")]
6184 (define_expand "extendsfdf2"
6185 [(set (match_operand:DF 0 "s_register_operand" "")
6186 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
6187 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6191 /* HFmode -> DFmode conversions have to go through SFmode. */
6192 (define_expand "extendhfdf2"
6193 [(set (match_operand:DF 0 "general_operand" "")
6194 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
6199 op1 = convert_to_mode (SFmode, operands[1], 0);
6200 op1 = convert_to_mode (DFmode, op1, 0);
6201 emit_insn (gen_movdf (operands[0], op1));
6206 ;; Move insns (including loads and stores)
6208 ;; XXX Just some ideas about movti.
6209 ;; I don't think these are a good idea on the arm, there just aren't enough
6211 ;;(define_expand "loadti"
6212 ;; [(set (match_operand:TI 0 "s_register_operand" "")
6213 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
6216 ;;(define_expand "storeti"
6217 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
6218 ;; (match_operand:TI 1 "s_register_operand" ""))]
6221 ;;(define_expand "movti"
6222 ;; [(set (match_operand:TI 0 "general_operand" "")
6223 ;; (match_operand:TI 1 "general_operand" ""))]
6229 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
6230 ;; operands[1] = copy_to_reg (operands[1]);
6231 ;; if (MEM_P (operands[0]))
6232 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6233 ;; else if (MEM_P (operands[1]))
6234 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6238 ;; emit_insn (insn);
6242 ;; Recognize garbage generated above.
6245 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6246 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6250 ;; register mem = (which_alternative < 3);
6251 ;; register const char *template;
6253 ;; operands[mem] = XEXP (operands[mem], 0);
6254 ;; switch (which_alternative)
6256 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6257 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
6258 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
6259 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
6260 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
6261 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
6263 ;; output_asm_insn (template, operands);
6267 (define_expand "movdi"
6268 [(set (match_operand:DI 0 "general_operand" "")
6269 (match_operand:DI 1 "general_operand" ""))]
6272 if (can_create_pseudo_p ())
6274 if (!REG_P (operands[0]))
6275 operands[1] = force_reg (DImode, operands[1]);
6280 (define_insn "*arm_movdi"
6281 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
6282 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
6284 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6286 && ( register_operand (operands[0], DImode)
6287 || register_operand (operands[1], DImode))"
6289 switch (which_alternative)
6296 return output_move_double (operands, true, NULL);
6299 [(set_attr "length" "8,12,16,8,8")
6300 (set_attr "type" "*,*,*,load2,store2")
6301 (set_attr "arm_pool_range" "*,*,*,1020,*")
6302 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6303 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6304 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6308 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6309 (match_operand:ANY64 1 "const_double_operand" ""))]
6312 && (arm_const_double_inline_cost (operands[1])
6313 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
6316 arm_split_constant (SET, SImode, curr_insn,
6317 INTVAL (gen_lowpart (SImode, operands[1])),
6318 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6319 arm_split_constant (SET, SImode, curr_insn,
6320 INTVAL (gen_highpart_mode (SImode,
6321 GET_MODE (operands[0]),
6323 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6328 ; If optimizing for size, or if we have load delay slots, then
6329 ; we want to split the constant into two separate operations.
6330 ; In both cases this may split a trivial part into a single data op
6331 ; leaving a single complex constant to load. We can also get longer
6332 ; offsets in a LDR which means we get better chances of sharing the pool
6333 ; entries. Finally, we can normally do a better job of scheduling
6334 ; LDR instructions than we can with LDM.
6335 ; This pattern will only match if the one above did not.
6337 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6338 (match_operand:ANY64 1 "const_double_operand" ""))]
6339 "TARGET_ARM && reload_completed
6340 && arm_const_double_by_parts (operands[1])"
6341 [(set (match_dup 0) (match_dup 1))
6342 (set (match_dup 2) (match_dup 3))]
6344 operands[2] = gen_highpart (SImode, operands[0]);
6345 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6347 operands[0] = gen_lowpart (SImode, operands[0]);
6348 operands[1] = gen_lowpart (SImode, operands[1]);
6353 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6354 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6355 "TARGET_EITHER && reload_completed"
6356 [(set (match_dup 0) (match_dup 1))
6357 (set (match_dup 2) (match_dup 3))]
6359 operands[2] = gen_highpart (SImode, operands[0]);
6360 operands[3] = gen_highpart (SImode, operands[1]);
6361 operands[0] = gen_lowpart (SImode, operands[0]);
6362 operands[1] = gen_lowpart (SImode, operands[1]);
6364 /* Handle a partial overlap. */
6365 if (rtx_equal_p (operands[0], operands[3]))
6367 rtx tmp0 = operands[0];
6368 rtx tmp1 = operands[1];
6370 operands[0] = operands[2];
6371 operands[1] = operands[3];
6378 ;; We can't actually do base+index doubleword loads if the index and
6379 ;; destination overlap. Split here so that we at least have chance to
6382 [(set (match_operand:DI 0 "s_register_operand" "")
6383 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6384 (match_operand:SI 2 "s_register_operand" ""))))]
6386 && reg_overlap_mentioned_p (operands[0], operands[1])
6387 && reg_overlap_mentioned_p (operands[0], operands[2])"
6389 (plus:SI (match_dup 1)
6392 (mem:DI (match_dup 4)))]
6394 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6398 ;;; ??? This should have alternatives for constants.
6399 ;;; ??? This was originally identical to the movdf_insn pattern.
6400 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6401 ;;; thumb_reorg with a memory reference.
6402 (define_insn "*thumb1_movdi_insn"
6403 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6404 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6406 && ( register_operand (operands[0], DImode)
6407 || register_operand (operands[1], DImode))"
6410 switch (which_alternative)
6414 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6415 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6416 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6418 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6420 operands[1] = GEN_INT (- INTVAL (operands[1]));
6421 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6423 return \"ldmia\\t%1, {%0, %H0}\";
6425 return \"stmia\\t%0, {%1, %H1}\";
6427 return thumb_load_double_from_address (operands);
6429 operands[2] = gen_rtx_MEM (SImode,
6430 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6431 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6434 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6435 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6436 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6439 [(set_attr "length" "4,4,6,2,2,6,4,4")
6440 (set_attr "type" "*,mov_reg,*,load2,store2,load2,store2,mov_reg")
6441 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6444 (define_expand "movsi"
6445 [(set (match_operand:SI 0 "general_operand" "")
6446 (match_operand:SI 1 "general_operand" ""))]
6450 rtx base, offset, tmp;
6454 /* Everything except mem = const or mem = mem can be done easily. */
6455 if (MEM_P (operands[0]))
6456 operands[1] = force_reg (SImode, operands[1]);
6457 if (arm_general_register_operand (operands[0], SImode)
6458 && CONST_INT_P (operands[1])
6459 && !(const_ok_for_arm (INTVAL (operands[1]))
6460 || const_ok_for_arm (~INTVAL (operands[1]))))
6462 arm_split_constant (SET, SImode, NULL_RTX,
6463 INTVAL (operands[1]), operands[0], NULL_RTX,
6464 optimize && can_create_pseudo_p ());
6468 else /* TARGET_THUMB1... */
6470 if (can_create_pseudo_p ())
6472 if (!REG_P (operands[0]))
6473 operands[1] = force_reg (SImode, operands[1]);
6477 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6479 split_const (operands[1], &base, &offset);
6480 if (GET_CODE (base) == SYMBOL_REF
6481 && !offset_within_block_p (base, INTVAL (offset)))
6483 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6484 emit_move_insn (tmp, base);
6485 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6490 /* Recognize the case where operand[1] is a reference to thread-local
6491 data and load its address to a register. */
6492 if (arm_tls_referenced_p (operands[1]))
6494 rtx tmp = operands[1];
6497 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6499 addend = XEXP (XEXP (tmp, 0), 1);
6500 tmp = XEXP (XEXP (tmp, 0), 0);
6503 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6504 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6506 tmp = legitimize_tls_address (tmp,
6507 !can_create_pseudo_p () ? operands[0] : 0);
6510 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6511 tmp = force_operand (tmp, operands[0]);
6516 && (CONSTANT_P (operands[1])
6517 || symbol_mentioned_p (operands[1])
6518 || label_mentioned_p (operands[1])))
6519 operands[1] = legitimize_pic_address (operands[1], SImode,
6520 (!can_create_pseudo_p ()
6527 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6528 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6529 ;; so this does not matter.
6530 (define_insn "*arm_movt"
6531 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6532 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6533 (match_operand:SI 2 "general_operand" "i")))]
6535 "movt%?\t%0, #:upper16:%c2"
6536 [(set_attr "predicable" "yes")
6537 (set_attr "predicable_short_it" "no")
6538 (set_attr "length" "4")]
6541 (define_insn "*arm_movsi_insn"
6542 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6543 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6544 "TARGET_ARM && ! TARGET_IWMMXT
6545 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6546 && ( register_operand (operands[0], SImode)
6547 || register_operand (operands[1], SImode))"
6555 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load1,store1")
6556 (set_attr "predicable" "yes")
6557 (set_attr "pool_range" "*,*,*,*,4096,*")
6558 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6562 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6563 (match_operand:SI 1 "const_int_operand" ""))]
6565 && (!(const_ok_for_arm (INTVAL (operands[1]))
6566 || const_ok_for_arm (~INTVAL (operands[1]))))"
6567 [(clobber (const_int 0))]
6569 arm_split_constant (SET, SImode, NULL_RTX,
6570 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6575 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6576 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6577 ;; and lo_sum would be merged back into memory load at cprop. However,
6578 ;; if the default is to prefer movt/movw rather than a load from the constant
6579 ;; pool, the performance is better.
6581 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6582 (match_operand:SI 1 "general_operand" ""))]
6584 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6585 && !flag_pic && !target_word_relocations
6586 && !arm_tls_referenced_p (operands[1])"
6587 [(clobber (const_int 0))]
6589 arm_emit_movpair (operands[0], operands[1]);
6593 (define_insn "*thumb1_movsi_insn"
6594 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6595 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6597 && ( register_operand (operands[0], SImode)
6598 || register_operand (operands[1], SImode))"
6609 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6610 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6611 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6612 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6615 [(set (match_operand:SI 0 "register_operand" "")
6616 (match_operand:SI 1 "const_int_operand" ""))]
6617 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6618 [(set (match_dup 2) (match_dup 1))
6619 (set (match_dup 0) (neg:SI (match_dup 2)))]
6622 operands[1] = GEN_INT (- INTVAL (operands[1]));
6623 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6628 [(set (match_operand:SI 0 "register_operand" "")
6629 (match_operand:SI 1 "const_int_operand" ""))]
6630 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6631 [(set (match_dup 2) (match_dup 1))
6632 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6635 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6636 unsigned HOST_WIDE_INT mask = 0xff;
6639 for (i = 0; i < 25; i++)
6640 if ((val & (mask << i)) == val)
6643 /* Don't split if the shift is zero. */
6647 operands[1] = GEN_INT (val >> i);
6648 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6649 operands[3] = GEN_INT (i);
6653 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6655 [(set (match_operand:SI 0 "register_operand" "")
6656 (match_operand:SI 1 "const_int_operand" ""))]
6657 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6658 [(set (match_dup 2) (match_dup 1))
6659 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6662 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6663 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6664 operands[3] = GEN_INT (255);
6668 ;; When generating pic, we need to load the symbol offset into a register.
6669 ;; So that the optimizer does not confuse this with a normal symbol load
6670 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6671 ;; since that is the only type of relocation we can use.
6673 ;; Wrap calculation of the whole PIC address in a single pattern for the
6674 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6675 ;; a PIC address involves two loads from memory, so we want to CSE it
6676 ;; as often as possible.
6677 ;; This pattern will be split into one of the pic_load_addr_* patterns
6678 ;; and a move after GCSE optimizations.
6680 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6681 (define_expand "calculate_pic_address"
6682 [(set (match_operand:SI 0 "register_operand" "")
6683 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6684 (unspec:SI [(match_operand:SI 2 "" "")]
6689 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6691 [(set (match_operand:SI 0 "register_operand" "")
6692 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6693 (unspec:SI [(match_operand:SI 2 "" "")]
6696 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6697 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6698 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6701 ;; operand1 is the memory address to go into
6702 ;; pic_load_addr_32bit.
6703 ;; operand2 is the PIC label to be emitted
6704 ;; from pic_add_dot_plus_eight.
6705 ;; We do this to allow hoisting of the entire insn.
6706 (define_insn_and_split "pic_load_addr_unified"
6707 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6708 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6709 (match_operand:SI 2 "" "")]
6710 UNSPEC_PIC_UNIFIED))]
6713 "&& reload_completed"
6714 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6715 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6716 (match_dup 2)] UNSPEC_PIC_BASE))]
6717 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6718 [(set_attr "type" "load1,load1,load1")
6719 (set_attr "pool_range" "4096,4094,1022")
6720 (set_attr "neg_pool_range" "4084,0,0")
6721 (set_attr "arch" "a,t2,t1")
6722 (set_attr "length" "8,6,4")]
6725 ;; The rather odd constraints on the following are to force reload to leave
6726 ;; the insn alone, and to force the minipool generation pass to then move
6727 ;; the GOT symbol to memory.
6729 (define_insn "pic_load_addr_32bit"
6730 [(set (match_operand:SI 0 "s_register_operand" "=r")
6731 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6732 "TARGET_32BIT && flag_pic"
6734 [(set_attr "type" "load1")
6735 (set (attr "pool_range")
6736 (if_then_else (eq_attr "is_thumb" "no")
6739 (set (attr "neg_pool_range")
6740 (if_then_else (eq_attr "is_thumb" "no")
6745 (define_insn "pic_load_addr_thumb1"
6746 [(set (match_operand:SI 0 "s_register_operand" "=l")
6747 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6748 "TARGET_THUMB1 && flag_pic"
6750 [(set_attr "type" "load1")
6751 (set (attr "pool_range") (const_int 1018))]
6754 (define_insn "pic_add_dot_plus_four"
6755 [(set (match_operand:SI 0 "register_operand" "=r")
6756 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6758 (match_operand 2 "" "")]
6762 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6763 INTVAL (operands[2]));
6764 return \"add\\t%0, %|pc\";
6766 [(set_attr "length" "2")]
6769 (define_insn "pic_add_dot_plus_eight"
6770 [(set (match_operand:SI 0 "register_operand" "=r")
6771 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6773 (match_operand 2 "" "")]
6777 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6778 INTVAL (operands[2]));
6779 return \"add%?\\t%0, %|pc, %1\";
6781 [(set_attr "predicable" "yes")]
6784 (define_insn "tls_load_dot_plus_eight"
6785 [(set (match_operand:SI 0 "register_operand" "=r")
6786 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6788 (match_operand 2 "" "")]
6792 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6793 INTVAL (operands[2]));
6794 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6796 [(set_attr "predicable" "yes")]
6799 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6800 ;; followed by a load. These sequences can be crunched down to
6801 ;; tls_load_dot_plus_eight by a peephole.
6804 [(set (match_operand:SI 0 "register_operand" "")
6805 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6807 (match_operand 1 "" "")]
6809 (set (match_operand:SI 2 "arm_general_register_operand" "")
6810 (mem:SI (match_dup 0)))]
6811 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6813 (mem:SI (unspec:SI [(match_dup 3)
6820 (define_insn "pic_offset_arm"
6821 [(set (match_operand:SI 0 "register_operand" "=r")
6822 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6823 (unspec:SI [(match_operand:SI 2 "" "X")]
6824 UNSPEC_PIC_OFFSET))))]
6825 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6826 "ldr%?\\t%0, [%1,%2]"
6827 [(set_attr "type" "load1")]
6830 (define_expand "builtin_setjmp_receiver"
6831 [(label_ref (match_operand 0 "" ""))]
6835 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6837 if (arm_pic_register != INVALID_REGNUM)
6838 arm_load_pic_register (1UL << 3);
6842 ;; If copying one reg to another we can set the condition codes according to
6843 ;; its value. Such a move is common after a return from subroutine and the
6844 ;; result is being tested against zero.
6846 (define_insn "*movsi_compare0"
6847 [(set (reg:CC CC_REGNUM)
6848 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6850 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6856 [(set_attr "conds" "set")
6857 (set_attr "type" "arlo_imm,arlo_imm")]
6860 ;; Subroutine to store a half word from a register into memory.
6861 ;; Operand 0 is the source register (HImode)
6862 ;; Operand 1 is the destination address in a register (SImode)
6864 ;; In both this routine and the next, we must be careful not to spill
6865 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6866 ;; can generate unrecognizable rtl.
6868 (define_expand "storehi"
6869 [;; store the low byte
6870 (set (match_operand 1 "" "") (match_dup 3))
6871 ;; extract the high byte
6873 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6874 ;; store the high byte
6875 (set (match_dup 4) (match_dup 5))]
6879 rtx op1 = operands[1];
6880 rtx addr = XEXP (op1, 0);
6881 enum rtx_code code = GET_CODE (addr);
6883 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6885 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6887 operands[4] = adjust_address (op1, QImode, 1);
6888 operands[1] = adjust_address (operands[1], QImode, 0);
6889 operands[3] = gen_lowpart (QImode, operands[0]);
6890 operands[0] = gen_lowpart (SImode, operands[0]);
6891 operands[2] = gen_reg_rtx (SImode);
6892 operands[5] = gen_lowpart (QImode, operands[2]);
6896 (define_expand "storehi_bigend"
6897 [(set (match_dup 4) (match_dup 3))
6899 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6900 (set (match_operand 1 "" "") (match_dup 5))]
6904 rtx op1 = operands[1];
6905 rtx addr = XEXP (op1, 0);
6906 enum rtx_code code = GET_CODE (addr);
6908 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6910 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6912 operands[4] = adjust_address (op1, QImode, 1);
6913 operands[1] = adjust_address (operands[1], QImode, 0);
6914 operands[3] = gen_lowpart (QImode, operands[0]);
6915 operands[0] = gen_lowpart (SImode, operands[0]);
6916 operands[2] = gen_reg_rtx (SImode);
6917 operands[5] = gen_lowpart (QImode, operands[2]);
6921 ;; Subroutine to store a half word integer constant into memory.
6922 (define_expand "storeinthi"
6923 [(set (match_operand 0 "" "")
6924 (match_operand 1 "" ""))
6925 (set (match_dup 3) (match_dup 2))]
6929 HOST_WIDE_INT value = INTVAL (operands[1]);
6930 rtx addr = XEXP (operands[0], 0);
6931 rtx op0 = operands[0];
6932 enum rtx_code code = GET_CODE (addr);
6934 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6936 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6938 operands[1] = gen_reg_rtx (SImode);
6939 if (BYTES_BIG_ENDIAN)
6941 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6942 if ((value & 255) == ((value >> 8) & 255))
6943 operands[2] = operands[1];
6946 operands[2] = gen_reg_rtx (SImode);
6947 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6952 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6953 if ((value & 255) == ((value >> 8) & 255))
6954 operands[2] = operands[1];
6957 operands[2] = gen_reg_rtx (SImode);
6958 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6962 operands[3] = adjust_address (op0, QImode, 1);
6963 operands[0] = adjust_address (operands[0], QImode, 0);
6964 operands[2] = gen_lowpart (QImode, operands[2]);
6965 operands[1] = gen_lowpart (QImode, operands[1]);
6969 (define_expand "storehi_single_op"
6970 [(set (match_operand:HI 0 "memory_operand" "")
6971 (match_operand:HI 1 "general_operand" ""))]
6972 "TARGET_32BIT && arm_arch4"
6974 if (!s_register_operand (operands[1], HImode))
6975 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6979 (define_expand "movhi"
6980 [(set (match_operand:HI 0 "general_operand" "")
6981 (match_operand:HI 1 "general_operand" ""))]
6986 if (can_create_pseudo_p ())
6988 if (MEM_P (operands[0]))
6992 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6995 if (CONST_INT_P (operands[1]))
6996 emit_insn (gen_storeinthi (operands[0], operands[1]));
6999 if (MEM_P (operands[1]))
7000 operands[1] = force_reg (HImode, operands[1]);
7001 if (BYTES_BIG_ENDIAN)
7002 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
7004 emit_insn (gen_storehi (operands[1], operands[0]));
7008 /* Sign extend a constant, and keep it in an SImode reg. */
7009 else if (CONST_INT_P (operands[1]))
7011 rtx reg = gen_reg_rtx (SImode);
7012 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7014 /* If the constant is already valid, leave it alone. */
7015 if (!const_ok_for_arm (val))
7017 /* If setting all the top bits will make the constant
7018 loadable in a single instruction, then set them.
7019 Otherwise, sign extend the number. */
7021 if (const_ok_for_arm (~(val | ~0xffff)))
7023 else if (val & 0x8000)
7027 emit_insn (gen_movsi (reg, GEN_INT (val)));
7028 operands[1] = gen_lowpart (HImode, reg);
7030 else if (arm_arch4 && optimize && can_create_pseudo_p ()
7031 && MEM_P (operands[1]))
7033 rtx reg = gen_reg_rtx (SImode);
7035 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7036 operands[1] = gen_lowpart (HImode, reg);
7038 else if (!arm_arch4)
7040 if (MEM_P (operands[1]))
7043 rtx offset = const0_rtx;
7044 rtx reg = gen_reg_rtx (SImode);
7046 if ((REG_P (base = XEXP (operands[1], 0))
7047 || (GET_CODE (base) == PLUS
7048 && (CONST_INT_P (offset = XEXP (base, 1)))
7049 && ((INTVAL(offset) & 1) != 1)
7050 && REG_P (base = XEXP (base, 0))))
7051 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
7055 new_rtx = widen_memory_access (operands[1], SImode,
7056 ((INTVAL (offset) & ~3)
7057 - INTVAL (offset)));
7058 emit_insn (gen_movsi (reg, new_rtx));
7059 if (((INTVAL (offset) & 2) != 0)
7060 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
7062 rtx reg2 = gen_reg_rtx (SImode);
7064 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
7069 emit_insn (gen_movhi_bytes (reg, operands[1]));
7071 operands[1] = gen_lowpart (HImode, reg);
7075 /* Handle loading a large integer during reload. */
7076 else if (CONST_INT_P (operands[1])
7077 && !const_ok_for_arm (INTVAL (operands[1]))
7078 && !const_ok_for_arm (~INTVAL (operands[1])))
7080 /* Writing a constant to memory needs a scratch, which should
7081 be handled with SECONDARY_RELOADs. */
7082 gcc_assert (REG_P (operands[0]));
7084 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7085 emit_insn (gen_movsi (operands[0], operands[1]));
7089 else if (TARGET_THUMB2)
7091 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
7092 if (can_create_pseudo_p ())
7094 if (!REG_P (operands[0]))
7095 operands[1] = force_reg (HImode, operands[1]);
7096 /* Zero extend a constant, and keep it in an SImode reg. */
7097 else if (CONST_INT_P (operands[1]))
7099 rtx reg = gen_reg_rtx (SImode);
7100 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
7102 emit_insn (gen_movsi (reg, GEN_INT (val)));
7103 operands[1] = gen_lowpart (HImode, reg);
7107 else /* TARGET_THUMB1 */
7109 if (can_create_pseudo_p ())
7111 if (CONST_INT_P (operands[1]))
7113 rtx reg = gen_reg_rtx (SImode);
7115 emit_insn (gen_movsi (reg, operands[1]));
7116 operands[1] = gen_lowpart (HImode, reg);
7119 /* ??? We shouldn't really get invalid addresses here, but this can
7120 happen if we are passed a SP (never OK for HImode/QImode) or
7121 virtual register (also rejected as illegitimate for HImode/QImode)
7122 relative address. */
7123 /* ??? This should perhaps be fixed elsewhere, for instance, in
7124 fixup_stack_1, by checking for other kinds of invalid addresses,
7125 e.g. a bare reference to a virtual register. This may confuse the
7126 alpha though, which must handle this case differently. */
7127 if (MEM_P (operands[0])
7128 && !memory_address_p (GET_MODE (operands[0]),
7129 XEXP (operands[0], 0)))
7131 = replace_equiv_address (operands[0],
7132 copy_to_reg (XEXP (operands[0], 0)));
7134 if (MEM_P (operands[1])
7135 && !memory_address_p (GET_MODE (operands[1]),
7136 XEXP (operands[1], 0)))
7138 = replace_equiv_address (operands[1],
7139 copy_to_reg (XEXP (operands[1], 0)));
7141 if (MEM_P (operands[1]) && optimize > 0)
7143 rtx reg = gen_reg_rtx (SImode);
7145 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7146 operands[1] = gen_lowpart (HImode, reg);
7149 if (MEM_P (operands[0]))
7150 operands[1] = force_reg (HImode, operands[1]);
7152 else if (CONST_INT_P (operands[1])
7153 && !satisfies_constraint_I (operands[1]))
7155 /* Handle loading a large integer during reload. */
7157 /* Writing a constant to memory needs a scratch, which should
7158 be handled with SECONDARY_RELOADs. */
7159 gcc_assert (REG_P (operands[0]));
7161 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7162 emit_insn (gen_movsi (operands[0], operands[1]));
7169 (define_insn "*thumb1_movhi_insn"
7170 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7171 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
7173 && ( register_operand (operands[0], HImode)
7174 || register_operand (operands[1], HImode))"
7176 switch (which_alternative)
7178 case 0: return \"add %0, %1, #0\";
7179 case 2: return \"strh %1, %0\";
7180 case 3: return \"mov %0, %1\";
7181 case 4: return \"mov %0, %1\";
7182 case 5: return \"mov %0, %1\";
7183 default: gcc_unreachable ();
7185 /* The stack pointer can end up being taken as an index register.
7186 Catch this case here and deal with it. */
7187 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
7188 && REG_P (XEXP (XEXP (operands[1], 0), 0))
7189 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
7192 ops[0] = operands[0];
7193 ops[1] = XEXP (XEXP (operands[1], 0), 0);
7195 output_asm_insn (\"mov %0, %1\", ops);
7197 XEXP (XEXP (operands[1], 0), 0) = operands[0];
7200 return \"ldrh %0, %1\";
7202 [(set_attr "length" "2,4,2,2,2,2")
7203 (set_attr "type" "*,load1,store1,*,*,*")
7204 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7207 (define_expand "movhi_bytes"
7208 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7210 (zero_extend:SI (match_dup 6)))
7211 (set (match_operand:SI 0 "" "")
7212 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7217 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7219 mem1 = change_address (operands[1], QImode, addr);
7220 mem2 = change_address (operands[1], QImode,
7221 plus_constant (Pmode, addr, 1));
7222 operands[0] = gen_lowpart (SImode, operands[0]);
7224 operands[2] = gen_reg_rtx (SImode);
7225 operands[3] = gen_reg_rtx (SImode);
7228 if (BYTES_BIG_ENDIAN)
7230 operands[4] = operands[2];
7231 operands[5] = operands[3];
7235 operands[4] = operands[3];
7236 operands[5] = operands[2];
7241 (define_expand "movhi_bigend"
7243 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
7246 (ashiftrt:SI (match_dup 2) (const_int 16)))
7247 (set (match_operand:HI 0 "s_register_operand" "")
7251 operands[2] = gen_reg_rtx (SImode);
7252 operands[3] = gen_reg_rtx (SImode);
7253 operands[4] = gen_lowpart (HImode, operands[3]);
7257 ;; Pattern to recognize insn generated default case above
7258 (define_insn "*movhi_insn_arch4"
7259 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
7260 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
7263 && (register_operand (operands[0], HImode)
7264 || register_operand (operands[1], HImode))"
7266 mov%?\\t%0, %1\\t%@ movhi
7267 mvn%?\\t%0, #%B1\\t%@ movhi
7268 str%(h%)\\t%1, %0\\t%@ movhi
7269 ldr%(h%)\\t%0, %1\\t%@ movhi"
7270 [(set_attr "predicable" "yes")
7271 (set_attr "pool_range" "*,*,*,256")
7272 (set_attr "neg_pool_range" "*,*,*,244")
7273 (set_attr_alternative "type"
7274 [(if_then_else (match_operand 1 "const_int_operand" "")
7275 (const_string "mov_imm" )
7276 (const_string "mov_reg"))
7277 (const_string "mvn_imm")
7278 (const_string "store1")
7279 (const_string "load1")])]
7282 (define_insn "*movhi_bytes"
7283 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7284 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
7287 mov%?\\t%0, %1\\t%@ movhi
7288 mov%?\\t%0, %1\\t%@ movhi
7289 mvn%?\\t%0, #%B1\\t%@ movhi"
7290 [(set_attr "predicable" "yes")
7291 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7294 (define_expand "thumb_movhi_clobber"
7295 [(set (match_operand:HI 0 "memory_operand" "")
7296 (match_operand:HI 1 "register_operand" ""))
7297 (clobber (match_operand:DI 2 "register_operand" ""))]
7300 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
7301 && REGNO (operands[1]) <= LAST_LO_REGNUM)
7303 emit_insn (gen_movhi (operands[0], operands[1]));
7306 /* XXX Fixme, need to handle other cases here as well. */
7311 ;; We use a DImode scratch because we may occasionally need an additional
7312 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
7313 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
7314 (define_expand "reload_outhi"
7315 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7316 (match_operand:HI 1 "s_register_operand" "r")
7317 (match_operand:DI 2 "s_register_operand" "=&l")])]
7320 arm_reload_out_hi (operands);
7322 thumb_reload_out_hi (operands);
7327 (define_expand "reload_inhi"
7328 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7329 (match_operand:HI 1 "arm_reload_memory_operand" "o")
7330 (match_operand:DI 2 "s_register_operand" "=&r")])]
7334 arm_reload_in_hi (operands);
7336 thumb_reload_out_hi (operands);
7340 (define_expand "movqi"
7341 [(set (match_operand:QI 0 "general_operand" "")
7342 (match_operand:QI 1 "general_operand" ""))]
7345 /* Everything except mem = const or mem = mem can be done easily */
7347 if (can_create_pseudo_p ())
7349 if (CONST_INT_P (operands[1]))
7351 rtx reg = gen_reg_rtx (SImode);
7353 /* For thumb we want an unsigned immediate, then we are more likely
7354 to be able to use a movs insn. */
7356 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7358 emit_insn (gen_movsi (reg, operands[1]));
7359 operands[1] = gen_lowpart (QImode, reg);
7364 /* ??? We shouldn't really get invalid addresses here, but this can
7365 happen if we are passed a SP (never OK for HImode/QImode) or
7366 virtual register (also rejected as illegitimate for HImode/QImode)
7367 relative address. */
7368 /* ??? This should perhaps be fixed elsewhere, for instance, in
7369 fixup_stack_1, by checking for other kinds of invalid addresses,
7370 e.g. a bare reference to a virtual register. This may confuse the
7371 alpha though, which must handle this case differently. */
7372 if (MEM_P (operands[0])
7373 && !memory_address_p (GET_MODE (operands[0]),
7374 XEXP (operands[0], 0)))
7376 = replace_equiv_address (operands[0],
7377 copy_to_reg (XEXP (operands[0], 0)));
7378 if (MEM_P (operands[1])
7379 && !memory_address_p (GET_MODE (operands[1]),
7380 XEXP (operands[1], 0)))
7382 = replace_equiv_address (operands[1],
7383 copy_to_reg (XEXP (operands[1], 0)));
7386 if (MEM_P (operands[1]) && optimize > 0)
7388 rtx reg = gen_reg_rtx (SImode);
7390 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7391 operands[1] = gen_lowpart (QImode, reg);
7394 if (MEM_P (operands[0]))
7395 operands[1] = force_reg (QImode, operands[1]);
7397 else if (TARGET_THUMB
7398 && CONST_INT_P (operands[1])
7399 && !satisfies_constraint_I (operands[1]))
7401 /* Handle loading a large integer during reload. */
7403 /* Writing a constant to memory needs a scratch, which should
7404 be handled with SECONDARY_RELOADs. */
7405 gcc_assert (REG_P (operands[0]));
7407 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7408 emit_insn (gen_movsi (operands[0], operands[1]));
7414 (define_insn "*arm_movqi_insn"
7415 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7416 (match_operand:QI 1 "general_operand" "r,r,I,Py,K,Uu,l,m,r"))]
7418 && ( register_operand (operands[0], QImode)
7419 || register_operand (operands[1], QImode))"
7430 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load1,store1,load1,store1")
7431 (set_attr "predicable" "yes")
7432 (set_attr "predicable_short_it" "yes,yes,yes,no,no,no,no,no,no")
7433 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7434 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7437 (define_insn "*thumb1_movqi_insn"
7438 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7439 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7441 && ( register_operand (operands[0], QImode)
7442 || register_operand (operands[1], QImode))"
7450 [(set_attr "length" "2")
7451 (set_attr "type" "arlo_imm,load1,store1,mov_reg,mov_imm,mov_imm")
7452 (set_attr "pool_range" "*,32,*,*,*,*")
7453 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7456 (define_expand "movhf"
7457 [(set (match_operand:HF 0 "general_operand" "")
7458 (match_operand:HF 1 "general_operand" ""))]
7463 if (MEM_P (operands[0]))
7464 operands[1] = force_reg (HFmode, operands[1]);
7466 else /* TARGET_THUMB1 */
7468 if (can_create_pseudo_p ())
7470 if (!REG_P (operands[0]))
7471 operands[1] = force_reg (HFmode, operands[1]);
7477 (define_insn "*arm32_movhf"
7478 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7479 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7480 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16) && !arm_restrict_it
7481 && ( s_register_operand (operands[0], HFmode)
7482 || s_register_operand (operands[1], HFmode))"
7484 switch (which_alternative)
7486 case 0: /* ARM register from memory */
7487 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7488 case 1: /* memory from ARM register */
7489 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7490 case 2: /* ARM register from ARM register */
7491 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7492 case 3: /* ARM register from constant */
7498 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7499 bits = real_to_target (NULL, &r, HFmode);
7500 ops[0] = operands[0];
7501 ops[1] = GEN_INT (bits);
7502 ops[2] = GEN_INT (bits & 0xff00);
7503 ops[3] = GEN_INT (bits & 0x00ff);
7505 if (arm_arch_thumb2)
7506 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7508 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7515 [(set_attr "conds" "unconditional")
7516 (set_attr "type" "load1,store1,mov_reg,mov_reg")
7517 (set_attr "length" "4,4,4,8")
7518 (set_attr "predicable" "yes")]
7521 (define_insn "*thumb1_movhf"
7522 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7523 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7525 && ( s_register_operand (operands[0], HFmode)
7526 || s_register_operand (operands[1], HFmode))"
7528 switch (which_alternative)
7533 gcc_assert (MEM_P (operands[1]));
7534 addr = XEXP (operands[1], 0);
7535 if (GET_CODE (addr) == LABEL_REF
7536 || (GET_CODE (addr) == CONST
7537 && GET_CODE (XEXP (addr, 0)) == PLUS
7538 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7539 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7541 /* Constant pool entry. */
7542 return \"ldr\\t%0, %1\";
7544 return \"ldrh\\t%0, %1\";
7546 case 2: return \"strh\\t%1, %0\";
7547 default: return \"mov\\t%0, %1\";
7550 [(set_attr "length" "2")
7551 (set_attr "type" "mov_reg,load1,store1,mov_reg,mov_reg")
7552 (set_attr "pool_range" "*,1018,*,*,*")
7553 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7555 (define_expand "movsf"
7556 [(set (match_operand:SF 0 "general_operand" "")
7557 (match_operand:SF 1 "general_operand" ""))]
7562 if (MEM_P (operands[0]))
7563 operands[1] = force_reg (SFmode, operands[1]);
7565 else /* TARGET_THUMB1 */
7567 if (can_create_pseudo_p ())
7569 if (!REG_P (operands[0]))
7570 operands[1] = force_reg (SFmode, operands[1]);
7576 ;; Transform a floating-point move of a constant into a core register into
7577 ;; an SImode operation.
7579 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7580 (match_operand:SF 1 "immediate_operand" ""))]
7583 && CONST_DOUBLE_P (operands[1])"
7584 [(set (match_dup 2) (match_dup 3))]
7586 operands[2] = gen_lowpart (SImode, operands[0]);
7587 operands[3] = gen_lowpart (SImode, operands[1]);
7588 if (operands[2] == 0 || operands[3] == 0)
7593 (define_insn "*arm_movsf_soft_insn"
7594 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7595 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7597 && TARGET_SOFT_FLOAT
7598 && (!MEM_P (operands[0])
7599 || register_operand (operands[1], SFmode))"
7602 ldr%?\\t%0, %1\\t%@ float
7603 str%?\\t%1, %0\\t%@ float"
7604 [(set_attr "predicable" "yes")
7605 (set_attr "predicable_short_it" "no")
7606 (set_attr "type" "mov_reg,load1,store1")
7607 (set_attr "arm_pool_range" "*,4096,*")
7608 (set_attr "thumb2_pool_range" "*,4094,*")
7609 (set_attr "arm_neg_pool_range" "*,4084,*")
7610 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7613 ;;; ??? This should have alternatives for constants.
7614 (define_insn "*thumb1_movsf_insn"
7615 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7616 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7618 && ( register_operand (operands[0], SFmode)
7619 || register_operand (operands[1], SFmode))"
7628 [(set_attr "length" "2")
7629 (set_attr "type" "*,load1,store1,load1,store1,mov_reg,mov_reg")
7630 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7631 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7634 (define_expand "movdf"
7635 [(set (match_operand:DF 0 "general_operand" "")
7636 (match_operand:DF 1 "general_operand" ""))]
7641 if (MEM_P (operands[0]))
7642 operands[1] = force_reg (DFmode, operands[1]);
7644 else /* TARGET_THUMB */
7646 if (can_create_pseudo_p ())
7648 if (!REG_P (operands[0]))
7649 operands[1] = force_reg (DFmode, operands[1]);
7655 ;; Reloading a df mode value stored in integer regs to memory can require a
7657 (define_expand "reload_outdf"
7658 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7659 (match_operand:DF 1 "s_register_operand" "r")
7660 (match_operand:SI 2 "s_register_operand" "=&r")]
7664 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7667 operands[2] = XEXP (operands[0], 0);
7668 else if (code == POST_INC || code == PRE_DEC)
7670 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7671 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7672 emit_insn (gen_movdi (operands[0], operands[1]));
7675 else if (code == PRE_INC)
7677 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7679 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7682 else if (code == POST_DEC)
7683 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7685 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7686 XEXP (XEXP (operands[0], 0), 1)));
7688 emit_insn (gen_rtx_SET (VOIDmode,
7689 replace_equiv_address (operands[0], operands[2]),
7692 if (code == POST_DEC)
7693 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7699 (define_insn "*movdf_soft_insn"
7700 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7701 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7702 "TARGET_32BIT && TARGET_SOFT_FLOAT
7703 && ( register_operand (operands[0], DFmode)
7704 || register_operand (operands[1], DFmode))"
7706 switch (which_alternative)
7713 return output_move_double (operands, true, NULL);
7716 [(set_attr "length" "8,12,16,8,8")
7717 (set_attr "type" "*,*,*,load2,store2")
7718 (set_attr "arm_pool_range" "*,*,*,1020,*")
7719 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7720 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7721 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7724 ;;; ??? This should have alternatives for constants.
7725 ;;; ??? This was originally identical to the movdi_insn pattern.
7726 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7727 ;;; thumb_reorg with a memory reference.
7728 (define_insn "*thumb_movdf_insn"
7729 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7730 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7732 && ( register_operand (operands[0], DFmode)
7733 || register_operand (operands[1], DFmode))"
7735 switch (which_alternative)
7739 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7740 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7741 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7743 return \"ldmia\\t%1, {%0, %H0}\";
7745 return \"stmia\\t%0, {%1, %H1}\";
7747 return thumb_load_double_from_address (operands);
7749 operands[2] = gen_rtx_MEM (SImode,
7750 plus_constant (Pmode,
7751 XEXP (operands[0], 0), 4));
7752 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7755 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7756 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7757 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7760 [(set_attr "length" "4,2,2,6,4,4")
7761 (set_attr "type" "*,load2,store2,load2,store2,mov_reg")
7762 (set_attr "pool_range" "*,*,*,1018,*,*")]
7766 ;; load- and store-multiple insns
7767 ;; The arm can load/store any set of registers, provided that they are in
7768 ;; ascending order, but these expanders assume a contiguous set.
7770 (define_expand "load_multiple"
7771 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7772 (match_operand:SI 1 "" ""))
7773 (use (match_operand:SI 2 "" ""))])]
7776 HOST_WIDE_INT offset = 0;
7778 /* Support only fixed point registers. */
7779 if (!CONST_INT_P (operands[2])
7780 || INTVAL (operands[2]) > 14
7781 || INTVAL (operands[2]) < 2
7782 || !MEM_P (operands[1])
7783 || !REG_P (operands[0])
7784 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7785 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7789 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7790 INTVAL (operands[2]),
7791 force_reg (SImode, XEXP (operands[1], 0)),
7792 FALSE, operands[1], &offset);
7795 (define_expand "store_multiple"
7796 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7797 (match_operand:SI 1 "" ""))
7798 (use (match_operand:SI 2 "" ""))])]
7801 HOST_WIDE_INT offset = 0;
7803 /* Support only fixed point registers. */
7804 if (!CONST_INT_P (operands[2])
7805 || INTVAL (operands[2]) > 14
7806 || INTVAL (operands[2]) < 2
7807 || !REG_P (operands[1])
7808 || !MEM_P (operands[0])
7809 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7810 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7814 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7815 INTVAL (operands[2]),
7816 force_reg (SImode, XEXP (operands[0], 0)),
7817 FALSE, operands[0], &offset);
7821 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7822 ;; We could let this apply for blocks of less than this, but it clobbers so
7823 ;; many registers that there is then probably a better way.
7825 (define_expand "movmemqi"
7826 [(match_operand:BLK 0 "general_operand" "")
7827 (match_operand:BLK 1 "general_operand" "")
7828 (match_operand:SI 2 "const_int_operand" "")
7829 (match_operand:SI 3 "const_int_operand" "")]
7834 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7835 && !optimize_function_for_size_p (cfun))
7837 if (gen_movmem_ldrd_strd (operands))
7842 if (arm_gen_movmemqi (operands))
7846 else /* TARGET_THUMB1 */
7848 if ( INTVAL (operands[3]) != 4
7849 || INTVAL (operands[2]) > 48)
7852 thumb_expand_movmemqi (operands);
7858 ;; Thumb block-move insns
7860 (define_insn "movmem12b"
7861 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7862 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7863 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7864 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7865 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7866 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7867 (set (match_operand:SI 0 "register_operand" "=l")
7868 (plus:SI (match_dup 2) (const_int 12)))
7869 (set (match_operand:SI 1 "register_operand" "=l")
7870 (plus:SI (match_dup 3) (const_int 12)))
7871 (clobber (match_scratch:SI 4 "=&l"))
7872 (clobber (match_scratch:SI 5 "=&l"))
7873 (clobber (match_scratch:SI 6 "=&l"))]
7875 "* return thumb_output_move_mem_multiple (3, operands);"
7876 [(set_attr "length" "4")
7877 ; This isn't entirely accurate... It loads as well, but in terms of
7878 ; scheduling the following insn it is better to consider it as a store
7879 (set_attr "type" "store3")]
7882 (define_insn "movmem8b"
7883 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7884 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7885 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7886 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7887 (set (match_operand:SI 0 "register_operand" "=l")
7888 (plus:SI (match_dup 2) (const_int 8)))
7889 (set (match_operand:SI 1 "register_operand" "=l")
7890 (plus:SI (match_dup 3) (const_int 8)))
7891 (clobber (match_scratch:SI 4 "=&l"))
7892 (clobber (match_scratch:SI 5 "=&l"))]
7894 "* return thumb_output_move_mem_multiple (2, operands);"
7895 [(set_attr "length" "4")
7896 ; This isn't entirely accurate... It loads as well, but in terms of
7897 ; scheduling the following insn it is better to consider it as a store
7898 (set_attr "type" "store2")]
7903 ;; Compare & branch insns
7904 ;; The range calculations are based as follows:
7905 ;; For forward branches, the address calculation returns the address of
7906 ;; the next instruction. This is 2 beyond the branch instruction.
7907 ;; For backward branches, the address calculation returns the address of
7908 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7909 ;; instruction for the shortest sequence, and 4 before the branch instruction
7910 ;; if we have to jump around an unconditional branch.
7911 ;; To the basic branch range the PC offset must be added (this is +4).
7912 ;; So for forward branches we have
7913 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7914 ;; And for backward branches we have
7915 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7917 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7918 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7920 (define_expand "cbranchsi4"
7921 [(set (pc) (if_then_else
7922 (match_operator 0 "expandable_comparison_operator"
7923 [(match_operand:SI 1 "s_register_operand" "")
7924 (match_operand:SI 2 "nonmemory_operand" "")])
7925 (label_ref (match_operand 3 "" ""))
7931 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7933 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7937 if (thumb1_cmpneg_operand (operands[2], SImode))
7939 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7940 operands[3], operands[0]));
7943 if (!thumb1_cmp_operand (operands[2], SImode))
7944 operands[2] = force_reg (SImode, operands[2]);
7947 ;; A pattern to recognize a special situation and optimize for it.
7948 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7949 ;; due to the available addressing modes. Hence, convert a signed comparison
7950 ;; with zero into an unsigned comparison with 127 if possible.
7951 (define_expand "cbranchqi4"
7952 [(set (pc) (if_then_else
7953 (match_operator 0 "lt_ge_comparison_operator"
7954 [(match_operand:QI 1 "memory_operand" "")
7955 (match_operand:QI 2 "const0_operand" "")])
7956 (label_ref (match_operand 3 "" ""))
7961 xops[1] = gen_reg_rtx (SImode);
7962 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7963 xops[2] = GEN_INT (127);
7964 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7965 VOIDmode, xops[1], xops[2]);
7966 xops[3] = operands[3];
7967 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7971 (define_expand "cbranchsf4"
7972 [(set (pc) (if_then_else
7973 (match_operator 0 "expandable_comparison_operator"
7974 [(match_operand:SF 1 "s_register_operand" "")
7975 (match_operand:SF 2 "arm_float_compare_operand" "")])
7976 (label_ref (match_operand 3 "" ""))
7978 "TARGET_32BIT && TARGET_HARD_FLOAT"
7979 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7980 operands[3])); DONE;"
7983 (define_expand "cbranchdf4"
7984 [(set (pc) (if_then_else
7985 (match_operator 0 "expandable_comparison_operator"
7986 [(match_operand:DF 1 "s_register_operand" "")
7987 (match_operand:DF 2 "arm_float_compare_operand" "")])
7988 (label_ref (match_operand 3 "" ""))
7990 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7991 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7992 operands[3])); DONE;"
7995 (define_expand "cbranchdi4"
7996 [(set (pc) (if_then_else
7997 (match_operator 0 "expandable_comparison_operator"
7998 [(match_operand:DI 1 "s_register_operand" "")
7999 (match_operand:DI 2 "cmpdi_operand" "")])
8000 (label_ref (match_operand 3 "" ""))
8004 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
8006 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
8012 (define_insn "cbranchsi4_insn"
8013 [(set (pc) (if_then_else
8014 (match_operator 0 "arm_comparison_operator"
8015 [(match_operand:SI 1 "s_register_operand" "l,l*h")
8016 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
8017 (label_ref (match_operand 3 "" ""))
8021 rtx t = cfun->machine->thumb1_cc_insn;
8024 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
8025 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
8027 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
8029 if (!noov_comparison_operator (operands[0], VOIDmode))
8032 else if (cfun->machine->thumb1_cc_mode != CCmode)
8037 output_asm_insn ("cmp\t%1, %2", operands);
8038 cfun->machine->thumb1_cc_insn = insn;
8039 cfun->machine->thumb1_cc_op0 = operands[1];
8040 cfun->machine->thumb1_cc_op1 = operands[2];
8041 cfun->machine->thumb1_cc_mode = CCmode;
8044 /* Ensure we emit the right type of condition code on the jump. */
8045 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
8048 switch (get_attr_length (insn))
8050 case 4: return \"b%d0\\t%l3\";
8051 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8052 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8055 [(set (attr "far_jump")
8057 (eq_attr "length" "8")
8058 (const_string "yes")
8059 (const_string "no")))
8060 (set (attr "length")
8062 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8063 (le (minus (match_dup 3) (pc)) (const_int 256)))
8066 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8067 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8072 (define_insn "cbranchsi4_scratch"
8073 [(set (pc) (if_then_else
8074 (match_operator 4 "arm_comparison_operator"
8075 [(match_operand:SI 1 "s_register_operand" "l,0")
8076 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
8077 (label_ref (match_operand 3 "" ""))
8079 (clobber (match_scratch:SI 0 "=l,l"))]
8082 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
8084 switch (get_attr_length (insn))
8086 case 4: return \"b%d4\\t%l3\";
8087 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8088 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8091 [(set (attr "far_jump")
8093 (eq_attr "length" "8")
8094 (const_string "yes")
8095 (const_string "no")))
8096 (set (attr "length")
8098 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8099 (le (minus (match_dup 3) (pc)) (const_int 256)))
8102 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8103 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8108 (define_insn "*negated_cbranchsi4"
8111 (match_operator 0 "equality_operator"
8112 [(match_operand:SI 1 "s_register_operand" "l")
8113 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
8114 (label_ref (match_operand 3 "" ""))
8118 output_asm_insn (\"cmn\\t%1, %2\", operands);
8119 switch (get_attr_length (insn))
8121 case 4: return \"b%d0\\t%l3\";
8122 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8123 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8126 [(set (attr "far_jump")
8128 (eq_attr "length" "8")
8129 (const_string "yes")
8130 (const_string "no")))
8131 (set (attr "length")
8133 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8134 (le (minus (match_dup 3) (pc)) (const_int 256)))
8137 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8138 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8143 (define_insn "*tbit_cbranch"
8146 (match_operator 0 "equality_operator"
8147 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8149 (match_operand:SI 2 "const_int_operand" "i"))
8151 (label_ref (match_operand 3 "" ""))
8153 (clobber (match_scratch:SI 4 "=l"))]
8158 op[0] = operands[4];
8159 op[1] = operands[1];
8160 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
8162 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8163 switch (get_attr_length (insn))
8165 case 4: return \"b%d0\\t%l3\";
8166 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8167 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8170 [(set (attr "far_jump")
8172 (eq_attr "length" "8")
8173 (const_string "yes")
8174 (const_string "no")))
8175 (set (attr "length")
8177 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8178 (le (minus (match_dup 3) (pc)) (const_int 256)))
8181 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8182 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8187 (define_insn "*tlobits_cbranch"
8190 (match_operator 0 "equality_operator"
8191 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
8192 (match_operand:SI 2 "const_int_operand" "i")
8195 (label_ref (match_operand 3 "" ""))
8197 (clobber (match_scratch:SI 4 "=l"))]
8202 op[0] = operands[4];
8203 op[1] = operands[1];
8204 op[2] = GEN_INT (32 - INTVAL (operands[2]));
8206 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
8207 switch (get_attr_length (insn))
8209 case 4: return \"b%d0\\t%l3\";
8210 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
8211 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
8214 [(set (attr "far_jump")
8216 (eq_attr "length" "8")
8217 (const_string "yes")
8218 (const_string "no")))
8219 (set (attr "length")
8221 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
8222 (le (minus (match_dup 3) (pc)) (const_int 256)))
8225 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
8226 (le (minus (match_dup 3) (pc)) (const_int 2048)))
8231 (define_insn "*tstsi3_cbranch"
8234 (match_operator 3 "equality_operator"
8235 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
8236 (match_operand:SI 1 "s_register_operand" "l"))
8238 (label_ref (match_operand 2 "" ""))
8243 output_asm_insn (\"tst\\t%0, %1\", operands);
8244 switch (get_attr_length (insn))
8246 case 4: return \"b%d3\\t%l2\";
8247 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
8248 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
8251 [(set (attr "far_jump")
8253 (eq_attr "length" "8")
8254 (const_string "yes")
8255 (const_string "no")))
8256 (set (attr "length")
8258 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
8259 (le (minus (match_dup 2) (pc)) (const_int 256)))
8262 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
8263 (le (minus (match_dup 2) (pc)) (const_int 2048)))
8268 (define_insn "*cbranchne_decr1"
8270 (if_then_else (match_operator 3 "equality_operator"
8271 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
8273 (label_ref (match_operand 4 "" ""))
8275 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
8276 (plus:SI (match_dup 2) (const_int -1)))
8277 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
8282 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
8284 VOIDmode, operands[2], const1_rtx);
8285 cond[1] = operands[4];
8287 if (which_alternative == 0)
8288 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
8289 else if (which_alternative == 1)
8291 /* We must provide an alternative for a hi reg because reload
8292 cannot handle output reloads on a jump instruction, but we
8293 can't subtract into that. Fortunately a mov from lo to hi
8294 does not clobber the condition codes. */
8295 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8296 output_asm_insn (\"mov\\t%0, %1\", operands);
8300 /* Similarly, but the target is memory. */
8301 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
8302 output_asm_insn (\"str\\t%1, %0\", operands);
8305 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
8308 output_asm_insn (\"b%d0\\t%l1\", cond);
8311 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8312 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
8314 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
8315 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8319 [(set (attr "far_jump")
8321 (ior (and (eq (symbol_ref ("which_alternative"))
8323 (eq_attr "length" "8"))
8324 (eq_attr "length" "10"))
8325 (const_string "yes")
8326 (const_string "no")))
8327 (set_attr_alternative "length"
8331 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8332 (le (minus (match_dup 4) (pc)) (const_int 256)))
8335 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8336 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8341 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8342 (le (minus (match_dup 4) (pc)) (const_int 256)))
8345 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8346 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8351 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8352 (le (minus (match_dup 4) (pc)) (const_int 256)))
8355 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8356 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8361 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8362 (le (minus (match_dup 4) (pc)) (const_int 256)))
8365 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8366 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8371 (define_insn "*addsi3_cbranch"
8374 (match_operator 4 "arm_comparison_operator"
8376 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8377 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8379 (label_ref (match_operand 5 "" ""))
8382 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8383 (plus:SI (match_dup 2) (match_dup 3)))
8384 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8386 && (GET_CODE (operands[4]) == EQ
8387 || GET_CODE (operands[4]) == NE
8388 || GET_CODE (operands[4]) == GE
8389 || GET_CODE (operands[4]) == LT)"
8394 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8395 cond[1] = operands[2];
8396 cond[2] = operands[3];
8398 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8399 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8401 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8403 if (which_alternative >= 2
8404 && which_alternative < 4)
8405 output_asm_insn (\"mov\\t%0, %1\", operands);
8406 else if (which_alternative >= 4)
8407 output_asm_insn (\"str\\t%1, %0\", operands);
8409 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8412 return \"b%d4\\t%l5\";
8414 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8416 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8420 [(set (attr "far_jump")
8422 (ior (and (lt (symbol_ref ("which_alternative"))
8424 (eq_attr "length" "8"))
8425 (eq_attr "length" "10"))
8426 (const_string "yes")
8427 (const_string "no")))
8428 (set (attr "length")
8430 (lt (symbol_ref ("which_alternative"))
8433 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8434 (le (minus (match_dup 5) (pc)) (const_int 256)))
8437 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8438 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8442 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8443 (le (minus (match_dup 5) (pc)) (const_int 256)))
8446 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8447 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8452 (define_insn "*addsi3_cbranch_scratch"
8455 (match_operator 3 "arm_comparison_operator"
8457 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8458 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8460 (label_ref (match_operand 4 "" ""))
8462 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8464 && (GET_CODE (operands[3]) == EQ
8465 || GET_CODE (operands[3]) == NE
8466 || GET_CODE (operands[3]) == GE
8467 || GET_CODE (operands[3]) == LT)"
8470 switch (which_alternative)
8473 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8476 output_asm_insn (\"cmn\t%1, %2\", operands);
8479 if (INTVAL (operands[2]) < 0)
8480 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8482 output_asm_insn (\"add\t%0, %1, %2\", operands);
8485 if (INTVAL (operands[2]) < 0)
8486 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8488 output_asm_insn (\"add\t%0, %0, %2\", operands);
8492 switch (get_attr_length (insn))
8495 return \"b%d3\\t%l4\";
8497 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8499 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8503 [(set (attr "far_jump")
8505 (eq_attr "length" "8")
8506 (const_string "yes")
8507 (const_string "no")))
8508 (set (attr "length")
8510 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8511 (le (minus (match_dup 4) (pc)) (const_int 256)))
8514 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8515 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8521 ;; Comparison and test insns
8523 (define_insn "*arm_cmpsi_insn"
8524 [(set (reg:CC CC_REGNUM)
8525 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8526 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8533 [(set_attr "conds" "set")
8534 (set_attr "arch" "t2,t2,any,any")
8535 (set_attr "length" "2,2,4,4")
8536 (set_attr "predicable" "yes")
8537 (set_attr "type" "*,*,*,arlo_imm")]
8540 (define_insn "*cmpsi_shiftsi"
8541 [(set (reg:CC CC_REGNUM)
8542 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8543 (match_operator:SI 3 "shift_operator"
8544 [(match_operand:SI 1 "s_register_operand" "r,r")
8545 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8548 [(set_attr "conds" "set")
8549 (set_attr "shift" "1")
8550 (set_attr "arch" "32,a")
8551 (set_attr "type" "arlo_shift,arlo_shift_reg")])
8553 (define_insn "*cmpsi_shiftsi_swp"
8554 [(set (reg:CC_SWP CC_REGNUM)
8555 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8556 [(match_operand:SI 1 "s_register_operand" "r,r")
8557 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8558 (match_operand:SI 0 "s_register_operand" "r,r")))]
8561 [(set_attr "conds" "set")
8562 (set_attr "shift" "1")
8563 (set_attr "arch" "32,a")
8564 (set_attr "type" "arlo_shift,arlo_shift_reg")])
8566 (define_insn "*arm_cmpsi_negshiftsi_si"
8567 [(set (reg:CC_Z CC_REGNUM)
8569 (neg:SI (match_operator:SI 1 "shift_operator"
8570 [(match_operand:SI 2 "s_register_operand" "r")
8571 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8572 (match_operand:SI 0 "s_register_operand" "r")))]
8575 [(set_attr "conds" "set")
8576 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8577 (const_string "arlo_shift")
8578 (const_string "arlo_shift_reg")))
8579 (set_attr "predicable" "yes")]
8582 ;; DImode comparisons. The generic code generates branches that
8583 ;; if-conversion can not reduce to a conditional compare, so we do
8586 (define_insn_and_split "*arm_cmpdi_insn"
8587 [(set (reg:CC_NCV CC_REGNUM)
8588 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8589 (match_operand:DI 1 "arm_di_operand" "rDi")))
8590 (clobber (match_scratch:SI 2 "=r"))]
8592 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8593 "&& reload_completed"
8594 [(set (reg:CC CC_REGNUM)
8595 (compare:CC (match_dup 0) (match_dup 1)))
8596 (parallel [(set (reg:CC CC_REGNUM)
8597 (compare:CC (match_dup 3) (match_dup 4)))
8599 (minus:SI (match_dup 5)
8600 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8602 operands[3] = gen_highpart (SImode, operands[0]);
8603 operands[0] = gen_lowpart (SImode, operands[0]);
8604 if (CONST_INT_P (operands[1]))
8606 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8609 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8613 operands[4] = gen_highpart (SImode, operands[1]);
8614 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8616 operands[1] = gen_lowpart (SImode, operands[1]);
8617 operands[2] = gen_lowpart (SImode, operands[2]);
8619 [(set_attr "conds" "set")
8620 (set_attr "length" "8")]
8623 (define_insn_and_split "*arm_cmpdi_unsigned"
8624 [(set (reg:CC_CZ CC_REGNUM)
8625 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r")
8626 (match_operand:DI 1 "arm_di_operand" "Py,r,rDi")))]
8629 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8630 "&& reload_completed"
8631 [(set (reg:CC CC_REGNUM)
8632 (compare:CC (match_dup 2) (match_dup 3)))
8633 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8634 (set (reg:CC CC_REGNUM)
8635 (compare:CC (match_dup 0) (match_dup 1))))]
8637 operands[2] = gen_highpart (SImode, operands[0]);
8638 operands[0] = gen_lowpart (SImode, operands[0]);
8639 if (CONST_INT_P (operands[1]))
8640 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8642 operands[3] = gen_highpart (SImode, operands[1]);
8643 operands[1] = gen_lowpart (SImode, operands[1]);
8645 [(set_attr "conds" "set")
8646 (set_attr "enabled_for_depr_it" "yes,yes,no")
8647 (set_attr "arch" "t2,t2,*")
8648 (set_attr "length" "6,6,8")]
8651 (define_insn "*arm_cmpdi_zero"
8652 [(set (reg:CC_Z CC_REGNUM)
8653 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8655 (clobber (match_scratch:SI 1 "=r"))]
8657 "orr%.\\t%1, %Q0, %R0"
8658 [(set_attr "conds" "set")]
8661 (define_insn "*thumb_cmpdi_zero"
8662 [(set (reg:CC_Z CC_REGNUM)
8663 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8665 (clobber (match_scratch:SI 1 "=l"))]
8667 "orr\\t%1, %Q0, %R0"
8668 [(set_attr "conds" "set")
8669 (set_attr "length" "2")]
8672 ; This insn allows redundant compares to be removed by cse, nothing should
8673 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8674 ; is deleted later on. The match_dup will match the mode here, so that
8675 ; mode changes of the condition codes aren't lost by this even though we don't
8676 ; specify what they are.
8678 (define_insn "*deleted_compare"
8679 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8681 "\\t%@ deleted compare"
8682 [(set_attr "conds" "set")
8683 (set_attr "length" "0")]
8687 ;; Conditional branch insns
8689 (define_expand "cbranch_cc"
8691 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8692 (match_operand 2 "" "")])
8693 (label_ref (match_operand 3 "" ""))
8696 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8697 operands[1], operands[2], NULL_RTX);
8698 operands[2] = const0_rtx;"
8702 ;; Patterns to match conditional branch insns.
8705 (define_insn "arm_cond_branch"
8707 (if_then_else (match_operator 1 "arm_comparison_operator"
8708 [(match_operand 2 "cc_register" "") (const_int 0)])
8709 (label_ref (match_operand 0 "" ""))
8713 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8715 arm_ccfsm_state += 2;
8718 return \"b%d1\\t%l0\";
8720 [(set_attr "conds" "use")
8721 (set_attr "type" "branch")
8722 (set (attr "length")
8724 (and (match_test "TARGET_THUMB2")
8725 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8726 (le (minus (match_dup 0) (pc)) (const_int 256))))
8731 (define_insn "*arm_cond_branch_reversed"
8733 (if_then_else (match_operator 1 "arm_comparison_operator"
8734 [(match_operand 2 "cc_register" "") (const_int 0)])
8736 (label_ref (match_operand 0 "" ""))))]
8739 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8741 arm_ccfsm_state += 2;
8744 return \"b%D1\\t%l0\";
8746 [(set_attr "conds" "use")
8747 (set_attr "type" "branch")
8748 (set (attr "length")
8750 (and (match_test "TARGET_THUMB2")
8751 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8752 (le (minus (match_dup 0) (pc)) (const_int 256))))
8761 (define_expand "cstore_cc"
8762 [(set (match_operand:SI 0 "s_register_operand" "")
8763 (match_operator:SI 1 "" [(match_operand 2 "" "")
8764 (match_operand 3 "" "")]))]
8766 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8767 operands[2], operands[3], NULL_RTX);
8768 operands[3] = const0_rtx;"
8771 (define_insn_and_split "*mov_scc"
8772 [(set (match_operand:SI 0 "s_register_operand" "=r")
8773 (match_operator:SI 1 "arm_comparison_operator"
8774 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8776 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8779 (if_then_else:SI (match_dup 1)
8783 [(set_attr "conds" "use")
8784 (set_attr "length" "8")]
8787 (define_insn_and_split "*mov_negscc"
8788 [(set (match_operand:SI 0 "s_register_operand" "=r")
8789 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8790 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8792 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8795 (if_then_else:SI (match_dup 1)
8799 operands[3] = GEN_INT (~0);
8801 [(set_attr "conds" "use")
8802 (set_attr "length" "8")]
8805 (define_insn_and_split "*mov_notscc"
8806 [(set (match_operand:SI 0 "s_register_operand" "=r")
8807 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8808 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8810 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8813 (if_then_else:SI (match_dup 1)
8817 operands[3] = GEN_INT (~1);
8818 operands[4] = GEN_INT (~0);
8820 [(set_attr "conds" "use")
8821 (set_attr "length" "8")]
8824 (define_expand "cstoresi4"
8825 [(set (match_operand:SI 0 "s_register_operand" "")
8826 (match_operator:SI 1 "expandable_comparison_operator"
8827 [(match_operand:SI 2 "s_register_operand" "")
8828 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8829 "TARGET_32BIT || TARGET_THUMB1"
8831 rtx op3, scratch, scratch2;
8835 if (!arm_add_operand (operands[3], SImode))
8836 operands[3] = force_reg (SImode, operands[3]);
8837 emit_insn (gen_cstore_cc (operands[0], operands[1],
8838 operands[2], operands[3]));
8842 if (operands[3] == const0_rtx)
8844 switch (GET_CODE (operands[1]))
8847 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8851 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8855 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8856 NULL_RTX, 0, OPTAB_WIDEN);
8857 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8858 NULL_RTX, 0, OPTAB_WIDEN);
8859 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8860 operands[0], 1, OPTAB_WIDEN);
8864 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8866 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8867 NULL_RTX, 1, OPTAB_WIDEN);
8871 scratch = expand_binop (SImode, ashr_optab, operands[2],
8872 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8873 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8874 NULL_RTX, 0, OPTAB_WIDEN);
8875 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8879 /* LT is handled by generic code. No need for unsigned with 0. */
8886 switch (GET_CODE (operands[1]))
8889 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8890 NULL_RTX, 0, OPTAB_WIDEN);
8891 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8895 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8896 NULL_RTX, 0, OPTAB_WIDEN);
8897 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8901 op3 = force_reg (SImode, operands[3]);
8903 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8904 NULL_RTX, 1, OPTAB_WIDEN);
8905 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8906 NULL_RTX, 0, OPTAB_WIDEN);
8907 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8913 if (!thumb1_cmp_operand (op3, SImode))
8914 op3 = force_reg (SImode, op3);
8915 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8916 NULL_RTX, 0, OPTAB_WIDEN);
8917 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8918 NULL_RTX, 1, OPTAB_WIDEN);
8919 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8924 op3 = force_reg (SImode, operands[3]);
8925 scratch = force_reg (SImode, const0_rtx);
8926 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8932 if (!thumb1_cmp_operand (op3, SImode))
8933 op3 = force_reg (SImode, op3);
8934 scratch = force_reg (SImode, const0_rtx);
8935 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8941 if (!thumb1_cmp_operand (op3, SImode))
8942 op3 = force_reg (SImode, op3);
8943 scratch = gen_reg_rtx (SImode);
8944 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8948 op3 = force_reg (SImode, operands[3]);
8949 scratch = gen_reg_rtx (SImode);
8950 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8953 /* No good sequences for GT, LT. */
8960 (define_expand "cstoresf4"
8961 [(set (match_operand:SI 0 "s_register_operand" "")
8962 (match_operator:SI 1 "expandable_comparison_operator"
8963 [(match_operand:SF 2 "s_register_operand" "")
8964 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8965 "TARGET_32BIT && TARGET_HARD_FLOAT"
8966 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8967 operands[2], operands[3])); DONE;"
8970 (define_expand "cstoredf4"
8971 [(set (match_operand:SI 0 "s_register_operand" "")
8972 (match_operator:SI 1 "expandable_comparison_operator"
8973 [(match_operand:DF 2 "s_register_operand" "")
8974 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8975 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8976 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8977 operands[2], operands[3])); DONE;"
8980 (define_expand "cstoredi4"
8981 [(set (match_operand:SI 0 "s_register_operand" "")
8982 (match_operator:SI 1 "expandable_comparison_operator"
8983 [(match_operand:DI 2 "s_register_operand" "")
8984 (match_operand:DI 3 "cmpdi_operand" "")]))]
8987 if (!arm_validize_comparison (&operands[1],
8991 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8997 (define_expand "cstoresi_eq0_thumb1"
8999 [(set (match_operand:SI 0 "s_register_operand" "")
9000 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9002 (clobber (match_dup:SI 2))])]
9004 "operands[2] = gen_reg_rtx (SImode);"
9007 (define_expand "cstoresi_ne0_thumb1"
9009 [(set (match_operand:SI 0 "s_register_operand" "")
9010 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9012 (clobber (match_dup:SI 2))])]
9014 "operands[2] = gen_reg_rtx (SImode);"
9017 (define_insn "*cstoresi_eq0_thumb1_insn"
9018 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
9019 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
9021 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
9024 neg\\t%0, %1\;adc\\t%0, %0, %1
9025 neg\\t%2, %1\;adc\\t%0, %1, %2"
9026 [(set_attr "length" "4")]
9029 (define_insn "*cstoresi_ne0_thumb1_insn"
9030 [(set (match_operand:SI 0 "s_register_operand" "=l")
9031 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
9033 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
9035 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
9036 [(set_attr "length" "4")]
9039 ;; Used as part of the expansion of thumb ltu and gtu sequences
9040 (define_insn "cstoresi_nltu_thumb1"
9041 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
9042 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
9043 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
9045 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
9046 [(set_attr "length" "4")]
9049 (define_insn_and_split "cstoresi_ltu_thumb1"
9050 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
9051 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
9052 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
9057 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
9058 (set (match_dup 0) (neg:SI (match_dup 3)))]
9059 "operands[3] = gen_reg_rtx (SImode);"
9060 [(set_attr "length" "4")]
9063 ;; Used as part of the expansion of thumb les sequence.
9064 (define_insn "thumb1_addsi3_addgeu"
9065 [(set (match_operand:SI 0 "s_register_operand" "=l")
9066 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
9067 (match_operand:SI 2 "s_register_operand" "l"))
9068 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
9069 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
9071 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
9072 [(set_attr "length" "4")]
9076 ;; Conditional move insns
9078 (define_expand "movsicc"
9079 [(set (match_operand:SI 0 "s_register_operand" "")
9080 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
9081 (match_operand:SI 2 "arm_not_operand" "")
9082 (match_operand:SI 3 "arm_not_operand" "")))]
9089 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9090 &XEXP (operands[1], 1)))
9093 code = GET_CODE (operands[1]);
9094 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9095 XEXP (operands[1], 1), NULL_RTX);
9096 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9100 (define_expand "movsfcc"
9101 [(set (match_operand:SF 0 "s_register_operand" "")
9102 (if_then_else:SF (match_operand 1 "arm_cond_move_operator" "")
9103 (match_operand:SF 2 "s_register_operand" "")
9104 (match_operand:SF 3 "s_register_operand" "")))]
9105 "TARGET_32BIT && TARGET_HARD_FLOAT"
9108 enum rtx_code code = GET_CODE (operands[1]);
9111 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9112 &XEXP (operands[1], 1)))
9115 code = GET_CODE (operands[1]);
9116 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9117 XEXP (operands[1], 1), NULL_RTX);
9118 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9122 (define_expand "movdfcc"
9123 [(set (match_operand:DF 0 "s_register_operand" "")
9124 (if_then_else:DF (match_operand 1 "arm_cond_move_operator" "")
9125 (match_operand:DF 2 "s_register_operand" "")
9126 (match_operand:DF 3 "s_register_operand" "")))]
9127 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
9130 enum rtx_code code = GET_CODE (operands[1]);
9133 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
9134 &XEXP (operands[1], 1)))
9136 code = GET_CODE (operands[1]);
9137 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
9138 XEXP (operands[1], 1), NULL_RTX);
9139 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
9143 (define_insn "*cmov<mode>"
9144 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
9145 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
9146 [(match_operand 2 "cc_register" "") (const_int 0)])
9147 (match_operand:SDF 3 "s_register_operand"
9149 (match_operand:SDF 4 "s_register_operand"
9150 "<F_constraint>")))]
9151 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
9154 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
9161 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
9166 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
9172 [(set_attr "conds" "use")
9173 (set_attr "type" "f_sel<vfp_type>")]
9176 (define_insn_and_split "*movsicc_insn"
9177 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
9179 (match_operator 3 "arm_comparison_operator"
9180 [(match_operand 4 "cc_register" "") (const_int 0)])
9181 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
9182 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
9193 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
9194 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
9195 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
9196 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
9197 "&& reload_completed"
9200 enum rtx_code rev_code;
9201 enum machine_mode mode;
9204 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9206 gen_rtx_SET (VOIDmode,
9210 rev_code = GET_CODE (operands[3]);
9211 mode = GET_MODE (operands[4]);
9212 if (mode == CCFPmode || mode == CCFPEmode)
9213 rev_code = reverse_condition_maybe_unordered (rev_code);
9215 rev_code = reverse_condition (rev_code);
9217 rev_cond = gen_rtx_fmt_ee (rev_code,
9221 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9223 gen_rtx_SET (VOIDmode,
9228 [(set_attr "length" "4,4,4,4,8,8,8,8")
9229 (set_attr "conds" "use")
9230 (set_attr_alternative "type"
9231 [(if_then_else (match_operand 2 "const_int_operand" "")
9232 (const_string "mov_imm")
9233 (const_string "mov_reg"))
9234 (const_string "mvn_imm")
9235 (if_then_else (match_operand 1 "const_int_operand" "")
9236 (const_string "mov_imm")
9237 (const_string "mov_reg"))
9238 (const_string "mvn_imm")
9239 (const_string "mov_reg")
9240 (const_string "mov_reg")
9241 (const_string "mov_reg")
9242 (const_string "mov_reg")])]
9245 (define_insn "*movsfcc_soft_insn"
9246 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
9247 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
9248 [(match_operand 4 "cc_register" "") (const_int 0)])
9249 (match_operand:SF 1 "s_register_operand" "0,r")
9250 (match_operand:SF 2 "s_register_operand" "r,0")))]
9251 "TARGET_ARM && TARGET_SOFT_FLOAT"
9255 [(set_attr "conds" "use")
9256 (set_attr "type" "mov_reg")]
9260 ;; Jump and linkage insns
9262 (define_expand "jump"
9264 (label_ref (match_operand 0 "" "")))]
9269 (define_insn "*arm_jump"
9271 (label_ref (match_operand 0 "" "")))]
9275 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
9277 arm_ccfsm_state += 2;
9280 return \"b%?\\t%l0\";
9283 [(set_attr "predicable" "yes")
9284 (set (attr "length")
9286 (and (match_test "TARGET_THUMB2")
9287 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9288 (le (minus (match_dup 0) (pc)) (const_int 2048))))
9293 (define_insn "*thumb_jump"
9295 (label_ref (match_operand 0 "" "")))]
9298 if (get_attr_length (insn) == 2)
9300 return \"bl\\t%l0\\t%@ far jump\";
9302 [(set (attr "far_jump")
9304 (eq_attr "length" "4")
9305 (const_string "yes")
9306 (const_string "no")))
9307 (set (attr "length")
9309 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
9310 (le (minus (match_dup 0) (pc)) (const_int 2048)))
9315 (define_expand "call"
9316 [(parallel [(call (match_operand 0 "memory_operand" "")
9317 (match_operand 1 "general_operand" ""))
9318 (use (match_operand 2 "" ""))
9319 (clobber (reg:SI LR_REGNUM))])]
9325 /* In an untyped call, we can get NULL for operand 2. */
9326 if (operands[2] == NULL_RTX)
9327 operands[2] = const0_rtx;
9329 /* Decide if we should generate indirect calls by loading the
9330 32-bit address of the callee into a register before performing the
9332 callee = XEXP (operands[0], 0);
9333 if (GET_CODE (callee) == SYMBOL_REF
9334 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9336 XEXP (operands[0], 0) = force_reg (Pmode, callee);
9338 pat = gen_call_internal (operands[0], operands[1], operands[2]);
9339 arm_emit_call_insn (pat, XEXP (operands[0], 0));
9344 (define_expand "call_internal"
9345 [(parallel [(call (match_operand 0 "memory_operand" "")
9346 (match_operand 1 "general_operand" ""))
9347 (use (match_operand 2 "" ""))
9348 (clobber (reg:SI LR_REGNUM))])])
9350 (define_insn "*call_reg_armv5"
9351 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9352 (match_operand 1 "" ""))
9353 (use (match_operand 2 "" ""))
9354 (clobber (reg:SI LR_REGNUM))]
9355 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9357 [(set_attr "type" "call")]
9360 (define_insn "*call_reg_arm"
9361 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9362 (match_operand 1 "" ""))
9363 (use (match_operand 2 "" ""))
9364 (clobber (reg:SI LR_REGNUM))]
9365 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9367 return output_call (operands);
9369 ;; length is worst case, normally it is only two
9370 [(set_attr "length" "12")
9371 (set_attr "type" "call")]
9375 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9376 ;; considered a function call by the branch predictor of some cores (PR40887).
9377 ;; Falls back to blx rN (*call_reg_armv5).
9379 (define_insn "*call_mem"
9380 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9381 (match_operand 1 "" ""))
9382 (use (match_operand 2 "" ""))
9383 (clobber (reg:SI LR_REGNUM))]
9384 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9386 return output_call_mem (operands);
9388 [(set_attr "length" "12")
9389 (set_attr "type" "call")]
9392 (define_insn "*call_reg_thumb1_v5"
9393 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9394 (match_operand 1 "" ""))
9395 (use (match_operand 2 "" ""))
9396 (clobber (reg:SI LR_REGNUM))]
9397 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9399 [(set_attr "length" "2")
9400 (set_attr "type" "call")]
9403 (define_insn "*call_reg_thumb1"
9404 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9405 (match_operand 1 "" ""))
9406 (use (match_operand 2 "" ""))
9407 (clobber (reg:SI LR_REGNUM))]
9408 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9411 if (!TARGET_CALLER_INTERWORKING)
9412 return thumb_call_via_reg (operands[0]);
9413 else if (operands[1] == const0_rtx)
9414 return \"bl\\t%__interwork_call_via_%0\";
9415 else if (frame_pointer_needed)
9416 return \"bl\\t%__interwork_r7_call_via_%0\";
9418 return \"bl\\t%__interwork_r11_call_via_%0\";
9420 [(set_attr "type" "call")]
9423 (define_expand "call_value"
9424 [(parallel [(set (match_operand 0 "" "")
9425 (call (match_operand 1 "memory_operand" "")
9426 (match_operand 2 "general_operand" "")))
9427 (use (match_operand 3 "" ""))
9428 (clobber (reg:SI LR_REGNUM))])]
9434 /* In an untyped call, we can get NULL for operand 2. */
9435 if (operands[3] == 0)
9436 operands[3] = const0_rtx;
9438 /* Decide if we should generate indirect calls by loading the
9439 32-bit address of the callee into a register before performing the
9441 callee = XEXP (operands[1], 0);
9442 if (GET_CODE (callee) == SYMBOL_REF
9443 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9445 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9447 pat = gen_call_value_internal (operands[0], operands[1],
9448 operands[2], operands[3]);
9449 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9454 (define_expand "call_value_internal"
9455 [(parallel [(set (match_operand 0 "" "")
9456 (call (match_operand 1 "memory_operand" "")
9457 (match_operand 2 "general_operand" "")))
9458 (use (match_operand 3 "" ""))
9459 (clobber (reg:SI LR_REGNUM))])])
9461 (define_insn "*call_value_reg_armv5"
9462 [(set (match_operand 0 "" "")
9463 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9464 (match_operand 2 "" "")))
9465 (use (match_operand 3 "" ""))
9466 (clobber (reg:SI LR_REGNUM))]
9467 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9469 [(set_attr "type" "call")]
9472 (define_insn "*call_value_reg_arm"
9473 [(set (match_operand 0 "" "")
9474 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9475 (match_operand 2 "" "")))
9476 (use (match_operand 3 "" ""))
9477 (clobber (reg:SI LR_REGNUM))]
9478 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9480 return output_call (&operands[1]);
9482 [(set_attr "length" "12")
9483 (set_attr "type" "call")]
9486 ;; Note: see *call_mem
9488 (define_insn "*call_value_mem"
9489 [(set (match_operand 0 "" "")
9490 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9491 (match_operand 2 "" "")))
9492 (use (match_operand 3 "" ""))
9493 (clobber (reg:SI LR_REGNUM))]
9494 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9495 && !SIBLING_CALL_P (insn)"
9497 return output_call_mem (&operands[1]);
9499 [(set_attr "length" "12")
9500 (set_attr "type" "call")]
9503 (define_insn "*call_value_reg_thumb1_v5"
9504 [(set (match_operand 0 "" "")
9505 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9506 (match_operand 2 "" "")))
9507 (use (match_operand 3 "" ""))
9508 (clobber (reg:SI LR_REGNUM))]
9509 "TARGET_THUMB1 && arm_arch5"
9511 [(set_attr "length" "2")
9512 (set_attr "type" "call")]
9515 (define_insn "*call_value_reg_thumb1"
9516 [(set (match_operand 0 "" "")
9517 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9518 (match_operand 2 "" "")))
9519 (use (match_operand 3 "" ""))
9520 (clobber (reg:SI LR_REGNUM))]
9521 "TARGET_THUMB1 && !arm_arch5"
9524 if (!TARGET_CALLER_INTERWORKING)
9525 return thumb_call_via_reg (operands[1]);
9526 else if (operands[2] == const0_rtx)
9527 return \"bl\\t%__interwork_call_via_%1\";
9528 else if (frame_pointer_needed)
9529 return \"bl\\t%__interwork_r7_call_via_%1\";
9531 return \"bl\\t%__interwork_r11_call_via_%1\";
9533 [(set_attr "type" "call")]
9536 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9537 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9539 (define_insn "*call_symbol"
9540 [(call (mem:SI (match_operand:SI 0 "" ""))
9541 (match_operand 1 "" ""))
9542 (use (match_operand 2 "" ""))
9543 (clobber (reg:SI LR_REGNUM))]
9545 && !SIBLING_CALL_P (insn)
9546 && (GET_CODE (operands[0]) == SYMBOL_REF)
9547 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9550 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9552 [(set_attr "type" "call")]
9555 (define_insn "*call_value_symbol"
9556 [(set (match_operand 0 "" "")
9557 (call (mem:SI (match_operand:SI 1 "" ""))
9558 (match_operand:SI 2 "" "")))
9559 (use (match_operand 3 "" ""))
9560 (clobber (reg:SI LR_REGNUM))]
9562 && !SIBLING_CALL_P (insn)
9563 && (GET_CODE (operands[1]) == SYMBOL_REF)
9564 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9567 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9569 [(set_attr "type" "call")]
9572 (define_insn "*call_insn"
9573 [(call (mem:SI (match_operand:SI 0 "" ""))
9574 (match_operand:SI 1 "" ""))
9575 (use (match_operand 2 "" ""))
9576 (clobber (reg:SI LR_REGNUM))]
9578 && GET_CODE (operands[0]) == SYMBOL_REF
9579 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9581 [(set_attr "length" "4")
9582 (set_attr "type" "call")]
9585 (define_insn "*call_value_insn"
9586 [(set (match_operand 0 "" "")
9587 (call (mem:SI (match_operand 1 "" ""))
9588 (match_operand 2 "" "")))
9589 (use (match_operand 3 "" ""))
9590 (clobber (reg:SI LR_REGNUM))]
9592 && GET_CODE (operands[1]) == SYMBOL_REF
9593 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9595 [(set_attr "length" "4")
9596 (set_attr "type" "call")]
9599 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9600 (define_expand "sibcall"
9601 [(parallel [(call (match_operand 0 "memory_operand" "")
9602 (match_operand 1 "general_operand" ""))
9604 (use (match_operand 2 "" ""))])]
9608 if (!REG_P (XEXP (operands[0], 0))
9609 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9610 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9612 if (operands[2] == NULL_RTX)
9613 operands[2] = const0_rtx;
9617 (define_expand "sibcall_value"
9618 [(parallel [(set (match_operand 0 "" "")
9619 (call (match_operand 1 "memory_operand" "")
9620 (match_operand 2 "general_operand" "")))
9622 (use (match_operand 3 "" ""))])]
9626 if (!REG_P (XEXP (operands[1], 0)) &&
9627 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9628 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9630 if (operands[3] == NULL_RTX)
9631 operands[3] = const0_rtx;
9635 (define_insn "*sibcall_insn"
9636 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
9637 (match_operand 1 "" ""))
9639 (use (match_operand 2 "" ""))]
9640 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9642 if (which_alternative == 1)
9643 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9646 if (arm_arch5 || arm_arch4t)
9647 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
9649 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9652 [(set_attr "type" "call")]
9655 (define_insn "*sibcall_value_insn"
9656 [(set (match_operand 0 "" "")
9657 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
9658 (match_operand 2 "" "")))
9660 (use (match_operand 3 "" ""))]
9661 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9663 if (which_alternative == 1)
9664 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9667 if (arm_arch5 || arm_arch4t)
9668 return \"bx%?\\t%1\";
9670 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9673 [(set_attr "type" "call")]
9676 (define_expand "<return_str>return"
9678 "(TARGET_ARM || (TARGET_THUMB2
9679 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9680 && !IS_STACKALIGN (arm_current_func_type ())))
9681 <return_cond_false>"
9686 thumb2_expand_return (<return_simple_p>);
9693 ;; Often the return insn will be the same as loading from memory, so set attr
9694 (define_insn "*arm_return"
9696 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9699 if (arm_ccfsm_state == 2)
9701 arm_ccfsm_state += 2;
9704 return output_return_instruction (const_true_rtx, true, false, false);
9706 [(set_attr "type" "load1")
9707 (set_attr "length" "12")
9708 (set_attr "predicable" "yes")]
9711 (define_insn "*cond_<return_str>return"
9713 (if_then_else (match_operator 0 "arm_comparison_operator"
9714 [(match_operand 1 "cc_register" "") (const_int 0)])
9717 "TARGET_ARM <return_cond_true>"
9720 if (arm_ccfsm_state == 2)
9722 arm_ccfsm_state += 2;
9725 return output_return_instruction (operands[0], true, false,
9728 [(set_attr "conds" "use")
9729 (set_attr "length" "12")
9730 (set_attr "type" "load1")]
9733 (define_insn "*cond_<return_str>return_inverted"
9735 (if_then_else (match_operator 0 "arm_comparison_operator"
9736 [(match_operand 1 "cc_register" "") (const_int 0)])
9739 "TARGET_ARM <return_cond_true>"
9742 if (arm_ccfsm_state == 2)
9744 arm_ccfsm_state += 2;
9747 return output_return_instruction (operands[0], true, true,
9750 [(set_attr "conds" "use")
9751 (set_attr "length" "12")
9752 (set_attr "type" "load1")]
9755 (define_insn "*arm_simple_return"
9760 if (arm_ccfsm_state == 2)
9762 arm_ccfsm_state += 2;
9765 return output_return_instruction (const_true_rtx, true, false, true);
9767 [(set_attr "type" "branch")
9768 (set_attr "length" "4")
9769 (set_attr "predicable" "yes")]
9772 ;; Generate a sequence of instructions to determine if the processor is
9773 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9776 (define_expand "return_addr_mask"
9778 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9780 (set (match_operand:SI 0 "s_register_operand" "")
9781 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9783 (const_int 67108860)))] ; 0x03fffffc
9786 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9789 (define_insn "*check_arch2"
9790 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9791 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9794 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9795 [(set_attr "length" "8")
9796 (set_attr "conds" "set")]
9799 ;; Call subroutine returning any type.
9801 (define_expand "untyped_call"
9802 [(parallel [(call (match_operand 0 "" "")
9804 (match_operand 1 "" "")
9805 (match_operand 2 "" "")])]
9810 rtx par = gen_rtx_PARALLEL (VOIDmode,
9811 rtvec_alloc (XVECLEN (operands[2], 0)));
9812 rtx addr = gen_reg_rtx (Pmode);
9816 emit_move_insn (addr, XEXP (operands[1], 0));
9817 mem = change_address (operands[1], BLKmode, addr);
9819 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9821 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9823 /* Default code only uses r0 as a return value, but we could
9824 be using anything up to 4 registers. */
9825 if (REGNO (src) == R0_REGNUM)
9826 src = gen_rtx_REG (TImode, R0_REGNUM);
9828 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9830 size += GET_MODE_SIZE (GET_MODE (src));
9833 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9838 for (i = 0; i < XVECLEN (par, 0); i++)
9840 HOST_WIDE_INT offset = 0;
9841 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9844 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9846 mem = change_address (mem, GET_MODE (reg), NULL);
9847 if (REGNO (reg) == R0_REGNUM)
9849 /* On thumb we have to use a write-back instruction. */
9850 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9851 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9852 size = TARGET_ARM ? 16 : 0;
9856 emit_move_insn (mem, reg);
9857 size = GET_MODE_SIZE (GET_MODE (reg));
9861 /* The optimizer does not know that the call sets the function value
9862 registers we stored in the result block. We avoid problems by
9863 claiming that all hard registers are used and clobbered at this
9865 emit_insn (gen_blockage ());
9871 (define_expand "untyped_return"
9872 [(match_operand:BLK 0 "memory_operand" "")
9873 (match_operand 1 "" "")]
9878 rtx addr = gen_reg_rtx (Pmode);
9882 emit_move_insn (addr, XEXP (operands[0], 0));
9883 mem = change_address (operands[0], BLKmode, addr);
9885 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9887 HOST_WIDE_INT offset = 0;
9888 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9891 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9893 mem = change_address (mem, GET_MODE (reg), NULL);
9894 if (REGNO (reg) == R0_REGNUM)
9896 /* On thumb we have to use a write-back instruction. */
9897 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9898 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9899 size = TARGET_ARM ? 16 : 0;
9903 emit_move_insn (reg, mem);
9904 size = GET_MODE_SIZE (GET_MODE (reg));
9908 /* Emit USE insns before the return. */
9909 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9910 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9912 /* Construct the return. */
9913 expand_naked_return ();
9919 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9920 ;; all of memory. This blocks insns from being moved across this point.
9922 (define_insn "blockage"
9923 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9926 [(set_attr "length" "0")
9927 (set_attr "type" "block")]
9930 (define_expand "casesi"
9931 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9932 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9933 (match_operand:SI 2 "const_int_operand" "") ; total range
9934 (match_operand:SI 3 "" "") ; table label
9935 (match_operand:SI 4 "" "")] ; Out of range label
9936 "TARGET_32BIT || optimize_size || flag_pic"
9939 enum insn_code code;
9940 if (operands[1] != const0_rtx)
9942 rtx reg = gen_reg_rtx (SImode);
9944 emit_insn (gen_addsi3 (reg, operands[0],
9945 gen_int_mode (-INTVAL (operands[1]),
9951 code = CODE_FOR_arm_casesi_internal;
9952 else if (TARGET_THUMB1)
9953 code = CODE_FOR_thumb1_casesi_internal_pic;
9955 code = CODE_FOR_thumb2_casesi_internal_pic;
9957 code = CODE_FOR_thumb2_casesi_internal;
9959 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9960 operands[2] = force_reg (SImode, operands[2]);
9962 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9963 operands[3], operands[4]));
9968 ;; The USE in this pattern is needed to tell flow analysis that this is
9969 ;; a CASESI insn. It has no other purpose.
9970 (define_insn "arm_casesi_internal"
9971 [(parallel [(set (pc)
9973 (leu (match_operand:SI 0 "s_register_operand" "r")
9974 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9975 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9976 (label_ref (match_operand 2 "" ""))))
9977 (label_ref (match_operand 3 "" ""))))
9978 (clobber (reg:CC CC_REGNUM))
9979 (use (label_ref (match_dup 2)))])]
9983 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9984 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9986 [(set_attr "conds" "clob")
9987 (set_attr "length" "12")]
9990 (define_expand "thumb1_casesi_internal_pic"
9991 [(match_operand:SI 0 "s_register_operand" "")
9992 (match_operand:SI 1 "thumb1_cmp_operand" "")
9993 (match_operand 2 "" "")
9994 (match_operand 3 "" "")]
9998 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9999 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
10001 reg0 = gen_rtx_REG (SImode, 0);
10002 emit_move_insn (reg0, operands[0]);
10003 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
10008 (define_insn "thumb1_casesi_dispatch"
10009 [(parallel [(set (pc) (unspec [(reg:SI 0)
10010 (label_ref (match_operand 0 "" ""))
10011 ;; (label_ref (match_operand 1 "" ""))
10013 UNSPEC_THUMB1_CASESI))
10014 (clobber (reg:SI IP_REGNUM))
10015 (clobber (reg:SI LR_REGNUM))])]
10017 "* return thumb1_output_casesi(operands);"
10018 [(set_attr "length" "4")]
10021 (define_expand "indirect_jump"
10023 (match_operand:SI 0 "s_register_operand" ""))]
10026 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
10027 address and use bx. */
10031 tmp = gen_reg_rtx (SImode);
10032 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
10038 ;; NB Never uses BX.
10039 (define_insn "*arm_indirect_jump"
10041 (match_operand:SI 0 "s_register_operand" "r"))]
10043 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
10044 [(set_attr "predicable" "yes")]
10047 (define_insn "*load_indirect_jump"
10049 (match_operand:SI 0 "memory_operand" "m"))]
10051 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
10052 [(set_attr "type" "load1")
10053 (set_attr "pool_range" "4096")
10054 (set_attr "neg_pool_range" "4084")
10055 (set_attr "predicable" "yes")]
10058 ;; NB Never uses BX.
10059 (define_insn "*thumb1_indirect_jump"
10061 (match_operand:SI 0 "register_operand" "l*r"))]
10064 [(set_attr "conds" "clob")
10065 (set_attr "length" "2")]
10075 if (TARGET_UNIFIED_ASM)
10078 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
10079 return \"mov\\tr8, r8\";
10081 [(set (attr "length")
10082 (if_then_else (eq_attr "is_thumb" "yes")
10088 ;; Patterns to allow combination of arithmetic, cond code and shifts
10090 (define_insn "*arith_shiftsi"
10091 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10092 (match_operator:SI 1 "shiftable_operator"
10093 [(match_operator:SI 3 "shift_operator"
10094 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
10095 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
10096 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
10098 "%i1%?\\t%0, %2, %4%S3"
10099 [(set_attr "predicable" "yes")
10100 (set_attr "shift" "4")
10101 (set_attr "arch" "a,t2,t2,a")
10102 ;; Thumb2 doesn't allow the stack pointer to be used for
10103 ;; operand1 for all operations other than add and sub. In this case
10104 ;; the minus operation is a candidate for an rsub and hence needs
10106 ;; We have to make sure to disable the fourth alternative if
10107 ;; the shift_operator is MULT, since otherwise the insn will
10108 ;; also match a multiply_accumulate pattern and validate_change
10109 ;; will allow a replacement of the constant with a register
10110 ;; despite the checks done in shift_operator.
10111 (set_attr_alternative "insn_enabled"
10112 [(const_string "yes")
10114 (match_operand:SI 1 "add_operator" "")
10115 (const_string "yes") (const_string "no"))
10116 (const_string "yes")
10118 (match_operand:SI 3 "mult_operator" "")
10119 (const_string "no") (const_string "yes"))])
10120 (set_attr "type" "arlo_shift,arlo_shift,arlo_shift,arlo_shift_reg")])
10123 [(set (match_operand:SI 0 "s_register_operand" "")
10124 (match_operator:SI 1 "shiftable_operator"
10125 [(match_operator:SI 2 "shiftable_operator"
10126 [(match_operator:SI 3 "shift_operator"
10127 [(match_operand:SI 4 "s_register_operand" "")
10128 (match_operand:SI 5 "reg_or_int_operand" "")])
10129 (match_operand:SI 6 "s_register_operand" "")])
10130 (match_operand:SI 7 "arm_rhs_operand" "")]))
10131 (clobber (match_operand:SI 8 "s_register_operand" ""))]
10133 [(set (match_dup 8)
10134 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10137 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
10140 (define_insn "*arith_shiftsi_compare0"
10141 [(set (reg:CC_NOOV CC_REGNUM)
10143 (match_operator:SI 1 "shiftable_operator"
10144 [(match_operator:SI 3 "shift_operator"
10145 [(match_operand:SI 4 "s_register_operand" "r,r")
10146 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10147 (match_operand:SI 2 "s_register_operand" "r,r")])
10149 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10150 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
10153 "%i1%.\\t%0, %2, %4%S3"
10154 [(set_attr "conds" "set")
10155 (set_attr "shift" "4")
10156 (set_attr "arch" "32,a")
10157 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10159 (define_insn "*arith_shiftsi_compare0_scratch"
10160 [(set (reg:CC_NOOV CC_REGNUM)
10162 (match_operator:SI 1 "shiftable_operator"
10163 [(match_operator:SI 3 "shift_operator"
10164 [(match_operand:SI 4 "s_register_operand" "r,r")
10165 (match_operand:SI 5 "shift_amount_operand" "M,r")])
10166 (match_operand:SI 2 "s_register_operand" "r,r")])
10168 (clobber (match_scratch:SI 0 "=r,r"))]
10170 "%i1%.\\t%0, %2, %4%S3"
10171 [(set_attr "conds" "set")
10172 (set_attr "shift" "4")
10173 (set_attr "arch" "32,a")
10174 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10176 (define_insn "*sub_shiftsi"
10177 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10178 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10179 (match_operator:SI 2 "shift_operator"
10180 [(match_operand:SI 3 "s_register_operand" "r,r")
10181 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
10183 "sub%?\\t%0, %1, %3%S2"
10184 [(set_attr "predicable" "yes")
10185 (set_attr "shift" "3")
10186 (set_attr "arch" "32,a")
10187 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10189 (define_insn "*sub_shiftsi_compare0"
10190 [(set (reg:CC_NOOV CC_REGNUM)
10192 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10193 (match_operator:SI 2 "shift_operator"
10194 [(match_operand:SI 3 "s_register_operand" "r,r")
10195 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10197 (set (match_operand:SI 0 "s_register_operand" "=r,r")
10198 (minus:SI (match_dup 1)
10199 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
10201 "sub%.\\t%0, %1, %3%S2"
10202 [(set_attr "conds" "set")
10203 (set_attr "shift" "3")
10204 (set_attr "arch" "32,a")
10205 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10207 (define_insn "*sub_shiftsi_compare0_scratch"
10208 [(set (reg:CC_NOOV CC_REGNUM)
10210 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
10211 (match_operator:SI 2 "shift_operator"
10212 [(match_operand:SI 3 "s_register_operand" "r,r")
10213 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
10215 (clobber (match_scratch:SI 0 "=r,r"))]
10217 "sub%.\\t%0, %1, %3%S2"
10218 [(set_attr "conds" "set")
10219 (set_attr "shift" "3")
10220 (set_attr "arch" "32,a")
10221 (set_attr "type" "arlo_shift,arlo_shift_reg")])
10224 (define_insn_and_split "*and_scc"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r")
10226 (and:SI (match_operator:SI 1 "arm_comparison_operator"
10227 [(match_operand 2 "cc_register" "") (const_int 0)])
10228 (match_operand:SI 3 "s_register_operand" "r")))]
10230 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
10231 "&& reload_completed"
10232 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
10233 (cond_exec (match_dup 4) (set (match_dup 0)
10234 (and:SI (match_dup 3) (const_int 1))))]
10236 enum machine_mode mode = GET_MODE (operands[2]);
10237 enum rtx_code rc = GET_CODE (operands[1]);
10239 /* Note that operands[4] is the same as operands[1],
10240 but with VOIDmode as the result. */
10241 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10242 if (mode == CCFPmode || mode == CCFPEmode)
10243 rc = reverse_condition_maybe_unordered (rc);
10245 rc = reverse_condition (rc);
10246 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10248 [(set_attr "conds" "use")
10249 (set_attr "type" "mov_reg")
10250 (set_attr "length" "8")]
10253 (define_insn_and_split "*ior_scc"
10254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10255 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
10256 [(match_operand 2 "cc_register" "") (const_int 0)])
10257 (match_operand:SI 3 "s_register_operand" "0,?r")))]
10260 orr%d1\\t%0, %3, #1
10262 "&& reload_completed
10263 && REGNO (operands [0]) != REGNO (operands[3])"
10264 ;; && which_alternative == 1
10265 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
10266 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
10267 (cond_exec (match_dup 4) (set (match_dup 0)
10268 (ior:SI (match_dup 3) (const_int 1))))]
10270 enum machine_mode mode = GET_MODE (operands[2]);
10271 enum rtx_code rc = GET_CODE (operands[1]);
10273 /* Note that operands[4] is the same as operands[1],
10274 but with VOIDmode as the result. */
10275 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10276 if (mode == CCFPmode || mode == CCFPEmode)
10277 rc = reverse_condition_maybe_unordered (rc);
10279 rc = reverse_condition (rc);
10280 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
10282 [(set_attr "conds" "use")
10283 (set_attr "length" "4,8")]
10286 ; A series of splitters for the compare_scc pattern below. Note that
10287 ; order is important.
10289 [(set (match_operand:SI 0 "s_register_operand" "")
10290 (lt:SI (match_operand:SI 1 "s_register_operand" "")
10292 (clobber (reg:CC CC_REGNUM))]
10293 "TARGET_32BIT && reload_completed"
10294 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
10297 [(set (match_operand:SI 0 "s_register_operand" "")
10298 (ge:SI (match_operand:SI 1 "s_register_operand" "")
10300 (clobber (reg:CC CC_REGNUM))]
10301 "TARGET_32BIT && reload_completed"
10302 [(set (match_dup 0) (not:SI (match_dup 1)))
10303 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
10306 [(set (match_operand:SI 0 "s_register_operand" "")
10307 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10309 (clobber (reg:CC CC_REGNUM))]
10310 "arm_arch5 && TARGET_32BIT"
10311 [(set (match_dup 0) (clz:SI (match_dup 1)))
10312 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10316 [(set (match_operand:SI 0 "s_register_operand" "")
10317 (eq:SI (match_operand:SI 1 "s_register_operand" "")
10319 (clobber (reg:CC CC_REGNUM))]
10320 "TARGET_32BIT && reload_completed"
10322 [(set (reg:CC CC_REGNUM)
10323 (compare:CC (const_int 1) (match_dup 1)))
10325 (minus:SI (const_int 1) (match_dup 1)))])
10326 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
10327 (set (match_dup 0) (const_int 0)))])
10330 [(set (match_operand:SI 0 "s_register_operand" "")
10331 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10332 (match_operand:SI 2 "const_int_operand" "")))
10333 (clobber (reg:CC CC_REGNUM))]
10334 "TARGET_32BIT && reload_completed"
10336 [(set (reg:CC CC_REGNUM)
10337 (compare:CC (match_dup 1) (match_dup 2)))
10338 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
10339 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
10340 (set (match_dup 0) (const_int 1)))]
10342 operands[3] = GEN_INT (-INTVAL (operands[2]));
10346 [(set (match_operand:SI 0 "s_register_operand" "")
10347 (ne:SI (match_operand:SI 1 "s_register_operand" "")
10348 (match_operand:SI 2 "arm_add_operand" "")))
10349 (clobber (reg:CC CC_REGNUM))]
10350 "TARGET_32BIT && reload_completed"
10352 [(set (reg:CC_NOOV CC_REGNUM)
10353 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10355 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10356 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10357 (set (match_dup 0) (const_int 1)))])
10359 (define_insn_and_split "*compare_scc"
10360 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10361 (match_operator:SI 1 "arm_comparison_operator"
10362 [(match_operand:SI 2 "s_register_operand" "r,r")
10363 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10364 (clobber (reg:CC CC_REGNUM))]
10367 "&& reload_completed"
10368 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10369 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10370 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10373 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10374 operands[2], operands[3]);
10375 enum rtx_code rc = GET_CODE (operands[1]);
10377 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10379 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10380 if (mode == CCFPmode || mode == CCFPEmode)
10381 rc = reverse_condition_maybe_unordered (rc);
10383 rc = reverse_condition (rc);
10384 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10387 ;; Attempt to improve the sequence generated by the compare_scc splitters
10388 ;; not to use conditional execution.
10390 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
10394 [(set (reg:CC CC_REGNUM)
10395 (compare:CC (match_operand:SI 1 "register_operand" "")
10397 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10398 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10399 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10400 (set (match_dup 0) (const_int 1)))]
10401 "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10402 [(set (match_dup 0) (clz:SI (match_dup 1)))
10403 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10406 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
10408 ;; adc Rd, Rd, reg1
10410 [(set (reg:CC CC_REGNUM)
10411 (compare:CC (match_operand:SI 1 "register_operand" "")
10413 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10414 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10415 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10416 (set (match_dup 0) (const_int 1)))
10417 (match_scratch:SI 2 "r")]
10418 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10420 [(set (reg:CC CC_REGNUM)
10421 (compare:CC (const_int 0) (match_dup 1)))
10422 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
10424 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
10425 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10428 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5
10429 ;; sub Rd, Reg1, reg2
10433 [(set (reg:CC CC_REGNUM)
10434 (compare:CC (match_operand:SI 1 "register_operand" "")
10435 (match_operand:SI 2 "arm_rhs_operand" "")))
10436 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10437 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10438 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10439 (set (match_dup 0) (const_int 1)))]
10440 "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10441 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
10442 (set (match_dup 0) (clz:SI (match_dup 0)))
10443 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
10447 ;; Rd = (eq (reg1) (reg2/imm)) // ! ARMv5
10448 ;; sub T1, Reg1, reg2
10452 [(set (reg:CC CC_REGNUM)
10453 (compare:CC (match_operand:SI 1 "register_operand" "")
10454 (match_operand:SI 2 "arm_rhs_operand" "")))
10455 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10456 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10457 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10458 (set (match_dup 0) (const_int 1)))
10459 (match_scratch:SI 3 "r")]
10460 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
10461 [(set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))
10463 [(set (reg:CC CC_REGNUM)
10464 (compare:CC (const_int 0) (match_dup 3)))
10465 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10467 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10468 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
10471 (define_insn "*cond_move"
10472 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10473 (if_then_else:SI (match_operator 3 "equality_operator"
10474 [(match_operator 4 "arm_comparison_operator"
10475 [(match_operand 5 "cc_register" "") (const_int 0)])
10477 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10478 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10481 if (GET_CODE (operands[3]) == NE)
10483 if (which_alternative != 1)
10484 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10485 if (which_alternative != 0)
10486 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10489 if (which_alternative != 0)
10490 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10491 if (which_alternative != 1)
10492 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10495 [(set_attr "conds" "use")
10496 (set_attr "type" "mov_reg")
10497 (set_attr "length" "4,4,8")]
10500 (define_insn "*cond_arith"
10501 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10502 (match_operator:SI 5 "shiftable_operator"
10503 [(match_operator:SI 4 "arm_comparison_operator"
10504 [(match_operand:SI 2 "s_register_operand" "r,r")
10505 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10506 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10507 (clobber (reg:CC CC_REGNUM))]
10510 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10511 return \"%i5\\t%0, %1, %2, lsr #31\";
10513 output_asm_insn (\"cmp\\t%2, %3\", operands);
10514 if (GET_CODE (operands[5]) == AND)
10515 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10516 else if (GET_CODE (operands[5]) == MINUS)
10517 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10518 else if (which_alternative != 0)
10519 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10520 return \"%i5%d4\\t%0, %1, #1\";
10522 [(set_attr "conds" "clob")
10523 (set_attr "length" "12")]
10526 (define_insn "*cond_sub"
10527 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10528 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10529 (match_operator:SI 4 "arm_comparison_operator"
10530 [(match_operand:SI 2 "s_register_operand" "r,r")
10531 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10532 (clobber (reg:CC CC_REGNUM))]
10535 output_asm_insn (\"cmp\\t%2, %3\", operands);
10536 if (which_alternative != 0)
10537 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10538 return \"sub%d4\\t%0, %1, #1\";
10540 [(set_attr "conds" "clob")
10541 (set_attr "length" "8,12")]
10544 (define_insn "*cmp_ite0"
10545 [(set (match_operand 6 "dominant_cc_register" "")
10548 (match_operator 4 "arm_comparison_operator"
10549 [(match_operand:SI 0 "s_register_operand"
10550 "l,l,l,r,r,r,r,r,r")
10551 (match_operand:SI 1 "arm_add_operand"
10552 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10553 (match_operator:SI 5 "arm_comparison_operator"
10554 [(match_operand:SI 2 "s_register_operand"
10555 "l,r,r,l,l,r,r,r,r")
10556 (match_operand:SI 3 "arm_add_operand"
10557 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10563 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10565 {\"cmp%d5\\t%0, %1\",
10566 \"cmp%d4\\t%2, %3\"},
10567 {\"cmn%d5\\t%0, #%n1\",
10568 \"cmp%d4\\t%2, %3\"},
10569 {\"cmp%d5\\t%0, %1\",
10570 \"cmn%d4\\t%2, #%n3\"},
10571 {\"cmn%d5\\t%0, #%n1\",
10572 \"cmn%d4\\t%2, #%n3\"}
10574 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10579 \"cmn\\t%0, #%n1\"},
10580 {\"cmn\\t%2, #%n3\",
10582 {\"cmn\\t%2, #%n3\",
10583 \"cmn\\t%0, #%n1\"}
10585 static const char * const ite[2] =
10590 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10591 CMP_CMP, CMN_CMP, CMP_CMP,
10592 CMN_CMP, CMP_CMN, CMN_CMN};
10594 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10596 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10597 if (TARGET_THUMB2) {
10598 output_asm_insn (ite[swap], operands);
10600 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10603 [(set_attr "conds" "set")
10604 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10605 (set_attr_alternative "length"
10611 (if_then_else (eq_attr "is_thumb" "no")
10614 (if_then_else (eq_attr "is_thumb" "no")
10617 (if_then_else (eq_attr "is_thumb" "no")
10620 (if_then_else (eq_attr "is_thumb" "no")
10625 (define_insn "*cmp_ite1"
10626 [(set (match_operand 6 "dominant_cc_register" "")
10629 (match_operator 4 "arm_comparison_operator"
10630 [(match_operand:SI 0 "s_register_operand"
10631 "l,l,l,r,r,r,r,r,r")
10632 (match_operand:SI 1 "arm_add_operand"
10633 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10634 (match_operator:SI 5 "arm_comparison_operator"
10635 [(match_operand:SI 2 "s_register_operand"
10636 "l,r,r,l,l,r,r,r,r")
10637 (match_operand:SI 3 "arm_add_operand"
10638 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10644 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10648 {\"cmn\\t%0, #%n1\",
10651 \"cmn\\t%2, #%n3\"},
10652 {\"cmn\\t%0, #%n1\",
10653 \"cmn\\t%2, #%n3\"}
10655 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10657 {\"cmp%d4\\t%2, %3\",
10658 \"cmp%D5\\t%0, %1\"},
10659 {\"cmp%d4\\t%2, %3\",
10660 \"cmn%D5\\t%0, #%n1\"},
10661 {\"cmn%d4\\t%2, #%n3\",
10662 \"cmp%D5\\t%0, %1\"},
10663 {\"cmn%d4\\t%2, #%n3\",
10664 \"cmn%D5\\t%0, #%n1\"}
10666 static const char * const ite[2] =
10671 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10672 CMP_CMP, CMN_CMP, CMP_CMP,
10673 CMN_CMP, CMP_CMN, CMN_CMN};
10675 comparison_dominates_p (GET_CODE (operands[5]),
10676 reverse_condition (GET_CODE (operands[4])));
10678 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10679 if (TARGET_THUMB2) {
10680 output_asm_insn (ite[swap], operands);
10682 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10685 [(set_attr "conds" "set")
10686 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10687 (set_attr_alternative "length"
10693 (if_then_else (eq_attr "is_thumb" "no")
10696 (if_then_else (eq_attr "is_thumb" "no")
10699 (if_then_else (eq_attr "is_thumb" "no")
10702 (if_then_else (eq_attr "is_thumb" "no")
10707 (define_insn "*cmp_and"
10708 [(set (match_operand 6 "dominant_cc_register" "")
10711 (match_operator 4 "arm_comparison_operator"
10712 [(match_operand:SI 0 "s_register_operand"
10713 "l,l,l,r,r,r,r,r,r")
10714 (match_operand:SI 1 "arm_add_operand"
10715 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10716 (match_operator:SI 5 "arm_comparison_operator"
10717 [(match_operand:SI 2 "s_register_operand"
10718 "l,r,r,l,l,r,r,r,r")
10719 (match_operand:SI 3 "arm_add_operand"
10720 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10725 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10727 {\"cmp%d5\\t%0, %1\",
10728 \"cmp%d4\\t%2, %3\"},
10729 {\"cmn%d5\\t%0, #%n1\",
10730 \"cmp%d4\\t%2, %3\"},
10731 {\"cmp%d5\\t%0, %1\",
10732 \"cmn%d4\\t%2, #%n3\"},
10733 {\"cmn%d5\\t%0, #%n1\",
10734 \"cmn%d4\\t%2, #%n3\"}
10736 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10741 \"cmn\\t%0, #%n1\"},
10742 {\"cmn\\t%2, #%n3\",
10744 {\"cmn\\t%2, #%n3\",
10745 \"cmn\\t%0, #%n1\"}
10747 static const char *const ite[2] =
10752 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10753 CMP_CMP, CMN_CMP, CMP_CMP,
10754 CMN_CMP, CMP_CMN, CMN_CMN};
10756 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10758 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10759 if (TARGET_THUMB2) {
10760 output_asm_insn (ite[swap], operands);
10762 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10765 [(set_attr "conds" "set")
10766 (set_attr "predicable" "no")
10767 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10768 (set_attr_alternative "length"
10774 (if_then_else (eq_attr "is_thumb" "no")
10777 (if_then_else (eq_attr "is_thumb" "no")
10780 (if_then_else (eq_attr "is_thumb" "no")
10783 (if_then_else (eq_attr "is_thumb" "no")
10788 (define_insn "*cmp_ior"
10789 [(set (match_operand 6 "dominant_cc_register" "")
10792 (match_operator 4 "arm_comparison_operator"
10793 [(match_operand:SI 0 "s_register_operand"
10794 "l,l,l,r,r,r,r,r,r")
10795 (match_operand:SI 1 "arm_add_operand"
10796 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10797 (match_operator:SI 5 "arm_comparison_operator"
10798 [(match_operand:SI 2 "s_register_operand"
10799 "l,r,r,l,l,r,r,r,r")
10800 (match_operand:SI 3 "arm_add_operand"
10801 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10806 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10810 {\"cmn\\t%0, #%n1\",
10813 \"cmn\\t%2, #%n3\"},
10814 {\"cmn\\t%0, #%n1\",
10815 \"cmn\\t%2, #%n3\"}
10817 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10819 {\"cmp%D4\\t%2, %3\",
10820 \"cmp%D5\\t%0, %1\"},
10821 {\"cmp%D4\\t%2, %3\",
10822 \"cmn%D5\\t%0, #%n1\"},
10823 {\"cmn%D4\\t%2, #%n3\",
10824 \"cmp%D5\\t%0, %1\"},
10825 {\"cmn%D4\\t%2, #%n3\",
10826 \"cmn%D5\\t%0, #%n1\"}
10828 static const char *const ite[2] =
10833 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10834 CMP_CMP, CMN_CMP, CMP_CMP,
10835 CMN_CMP, CMP_CMN, CMN_CMN};
10837 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10839 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10840 if (TARGET_THUMB2) {
10841 output_asm_insn (ite[swap], operands);
10843 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10847 [(set_attr "conds" "set")
10848 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10849 (set_attr_alternative "length"
10855 (if_then_else (eq_attr "is_thumb" "no")
10858 (if_then_else (eq_attr "is_thumb" "no")
10861 (if_then_else (eq_attr "is_thumb" "no")
10864 (if_then_else (eq_attr "is_thumb" "no")
10869 (define_insn_and_split "*ior_scc_scc"
10870 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10871 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10872 [(match_operand:SI 1 "s_register_operand" "r")
10873 (match_operand:SI 2 "arm_add_operand" "rIL")])
10874 (match_operator:SI 6 "arm_comparison_operator"
10875 [(match_operand:SI 4 "s_register_operand" "r")
10876 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10877 (clobber (reg:CC CC_REGNUM))]
10879 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10882 "TARGET_32BIT && reload_completed"
10883 [(set (match_dup 7)
10886 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10887 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10889 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10891 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10894 [(set_attr "conds" "clob")
10895 (set_attr "length" "16")])
10897 ; If the above pattern is followed by a CMP insn, then the compare is
10898 ; redundant, since we can rework the conditional instruction that follows.
10899 (define_insn_and_split "*ior_scc_scc_cmp"
10900 [(set (match_operand 0 "dominant_cc_register" "")
10901 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10902 [(match_operand:SI 1 "s_register_operand" "r")
10903 (match_operand:SI 2 "arm_add_operand" "rIL")])
10904 (match_operator:SI 6 "arm_comparison_operator"
10905 [(match_operand:SI 4 "s_register_operand" "r")
10906 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10908 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10909 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10910 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10913 "TARGET_32BIT && reload_completed"
10914 [(set (match_dup 0)
10917 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10918 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10920 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10922 [(set_attr "conds" "set")
10923 (set_attr "length" "16")])
10925 (define_insn_and_split "*and_scc_scc"
10926 [(set (match_operand:SI 0 "s_register_operand" "=Ts")
10927 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10928 [(match_operand:SI 1 "s_register_operand" "r")
10929 (match_operand:SI 2 "arm_add_operand" "rIL")])
10930 (match_operator:SI 6 "arm_comparison_operator"
10931 [(match_operand:SI 4 "s_register_operand" "r")
10932 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10933 (clobber (reg:CC CC_REGNUM))]
10935 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10938 "TARGET_32BIT && reload_completed
10939 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10941 [(set (match_dup 7)
10944 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10945 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10947 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10949 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10952 [(set_attr "conds" "clob")
10953 (set_attr "length" "16")])
10955 ; If the above pattern is followed by a CMP insn, then the compare is
10956 ; redundant, since we can rework the conditional instruction that follows.
10957 (define_insn_and_split "*and_scc_scc_cmp"
10958 [(set (match_operand 0 "dominant_cc_register" "")
10959 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10960 [(match_operand:SI 1 "s_register_operand" "r")
10961 (match_operand:SI 2 "arm_add_operand" "rIL")])
10962 (match_operator:SI 6 "arm_comparison_operator"
10963 [(match_operand:SI 4 "s_register_operand" "r")
10964 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10966 (set (match_operand:SI 7 "s_register_operand" "=Ts")
10967 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10968 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10971 "TARGET_32BIT && reload_completed"
10972 [(set (match_dup 0)
10975 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10976 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10978 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10980 [(set_attr "conds" "set")
10981 (set_attr "length" "16")])
10983 ;; If there is no dominance in the comparison, then we can still save an
10984 ;; instruction in the AND case, since we can know that the second compare
10985 ;; need only zero the value if false (if true, then the value is already
10987 (define_insn_and_split "*and_scc_scc_nodom"
10988 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10989 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10990 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10991 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10992 (match_operator:SI 6 "arm_comparison_operator"
10993 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10994 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10995 (clobber (reg:CC CC_REGNUM))]
10997 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
11000 "TARGET_32BIT && reload_completed"
11001 [(parallel [(set (match_dup 0)
11002 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
11003 (clobber (reg:CC CC_REGNUM))])
11004 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
11006 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
11009 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
11010 operands[4], operands[5]),
11012 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
11014 [(set_attr "conds" "clob")
11015 (set_attr "length" "20")])
11018 [(set (reg:CC_NOOV CC_REGNUM)
11019 (compare:CC_NOOV (ior:SI
11020 (and:SI (match_operand:SI 0 "s_register_operand" "")
11022 (match_operator:SI 1 "arm_comparison_operator"
11023 [(match_operand:SI 2 "s_register_operand" "")
11024 (match_operand:SI 3 "arm_add_operand" "")]))
11026 (clobber (match_operand:SI 4 "s_register_operand" ""))]
11028 [(set (match_dup 4)
11029 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
11031 (set (reg:CC_NOOV CC_REGNUM)
11032 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
11037 [(set (reg:CC_NOOV CC_REGNUM)
11038 (compare:CC_NOOV (ior:SI
11039 (match_operator:SI 1 "arm_comparison_operator"
11040 [(match_operand:SI 2 "s_register_operand" "")
11041 (match_operand:SI 3 "arm_add_operand" "")])
11042 (and:SI (match_operand:SI 0 "s_register_operand" "")
11045 (clobber (match_operand:SI 4 "s_register_operand" ""))]
11047 [(set (match_dup 4)
11048 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
11050 (set (reg:CC_NOOV CC_REGNUM)
11051 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
11054 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
11056 (define_insn_and_split "*negscc"
11057 [(set (match_operand:SI 0 "s_register_operand" "=r")
11058 (neg:SI (match_operator 3 "arm_comparison_operator"
11059 [(match_operand:SI 1 "s_register_operand" "r")
11060 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
11061 (clobber (reg:CC CC_REGNUM))]
11064 "&& reload_completed"
11067 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
11069 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
11071 /* Emit mov\\t%0, %1, asr #31 */
11072 emit_insn (gen_rtx_SET (VOIDmode,
11074 gen_rtx_ASHIFTRT (SImode,
11079 else if (GET_CODE (operands[3]) == NE)
11081 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
11082 if (CONST_INT_P (operands[2]))
11083 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
11084 GEN_INT (- INTVAL (operands[2]))));
11086 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
11088 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11089 gen_rtx_NE (SImode,
11092 gen_rtx_SET (SImode,
11099 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
11100 emit_insn (gen_rtx_SET (VOIDmode,
11102 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
11103 enum rtx_code rc = GET_CODE (operands[3]);
11105 rc = reverse_condition (rc);
11106 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11107 gen_rtx_fmt_ee (rc,
11111 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
11112 rc = GET_CODE (operands[3]);
11113 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
11114 gen_rtx_fmt_ee (rc,
11118 gen_rtx_SET (VOIDmode,
11125 [(set_attr "conds" "clob")
11126 (set_attr "length" "12")]
11129 (define_insn_and_split "movcond_addsi"
11130 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
11132 (match_operator 5 "comparison_operator"
11133 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
11134 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
11136 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
11137 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
11138 (clobber (reg:CC CC_REGNUM))]
11141 "&& reload_completed"
11142 [(set (reg:CC_NOOV CC_REGNUM)
11144 (plus:SI (match_dup 3)
11147 (set (match_dup 0) (match_dup 1))
11148 (cond_exec (match_dup 6)
11149 (set (match_dup 0) (match_dup 2)))]
11152 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
11153 operands[3], operands[4]);
11154 enum rtx_code rc = GET_CODE (operands[5]);
11156 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11157 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
11158 rc = reverse_condition (rc);
11160 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11163 [(set_attr "conds" "clob")
11164 (set_attr "enabled_for_depr_it" "no,yes,yes")]
11167 (define_insn "movcond"
11168 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11170 (match_operator 5 "arm_comparison_operator"
11171 [(match_operand:SI 3 "s_register_operand" "r,r,r")
11172 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
11173 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
11174 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
11175 (clobber (reg:CC CC_REGNUM))]
11178 if (GET_CODE (operands[5]) == LT
11179 && (operands[4] == const0_rtx))
11181 if (which_alternative != 1 && REG_P (operands[1]))
11183 if (operands[2] == const0_rtx)
11184 return \"and\\t%0, %1, %3, asr #31\";
11185 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
11187 else if (which_alternative != 0 && REG_P (operands[2]))
11189 if (operands[1] == const0_rtx)
11190 return \"bic\\t%0, %2, %3, asr #31\";
11191 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
11193 /* The only case that falls through to here is when both ops 1 & 2
11197 if (GET_CODE (operands[5]) == GE
11198 && (operands[4] == const0_rtx))
11200 if (which_alternative != 1 && REG_P (operands[1]))
11202 if (operands[2] == const0_rtx)
11203 return \"bic\\t%0, %1, %3, asr #31\";
11204 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
11206 else if (which_alternative != 0 && REG_P (operands[2]))
11208 if (operands[1] == const0_rtx)
11209 return \"and\\t%0, %2, %3, asr #31\";
11210 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
11212 /* The only case that falls through to here is when both ops 1 & 2
11215 if (CONST_INT_P (operands[4])
11216 && !const_ok_for_arm (INTVAL (operands[4])))
11217 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
11219 output_asm_insn (\"cmp\\t%3, %4\", operands);
11220 if (which_alternative != 0)
11221 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
11222 if (which_alternative != 1)
11223 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
11226 [(set_attr "conds" "clob")
11227 (set_attr "length" "8,8,12")]
11230 ;; ??? The patterns below need checking for Thumb-2 usefulness.
11232 (define_insn "*ifcompare_plus_move"
11233 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11234 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11235 [(match_operand:SI 4 "s_register_operand" "r,r")
11236 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11238 (match_operand:SI 2 "s_register_operand" "r,r")
11239 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
11240 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11241 (clobber (reg:CC CC_REGNUM))]
11244 [(set_attr "conds" "clob")
11245 (set_attr "length" "8,12")]
11248 (define_insn "*if_plus_move"
11249 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11251 (match_operator 4 "arm_comparison_operator"
11252 [(match_operand 5 "cc_register" "") (const_int 0)])
11254 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11255 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
11256 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
11259 add%d4\\t%0, %2, %3
11260 sub%d4\\t%0, %2, #%n3
11261 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
11262 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
11263 [(set_attr "conds" "use")
11264 (set_attr "length" "4,4,8,8")
11265 (set_attr_alternative "type"
11266 [(if_then_else (match_operand 3 "const_int_operand" "")
11267 (const_string "arlo_imm" )
11268 (const_string "*"))
11269 (const_string "arlo_imm")
11271 (const_string "*")])]
11274 (define_insn "*ifcompare_move_plus"
11275 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11276 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11277 [(match_operand:SI 4 "s_register_operand" "r,r")
11278 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11279 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11281 (match_operand:SI 2 "s_register_operand" "r,r")
11282 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
11283 (clobber (reg:CC CC_REGNUM))]
11286 [(set_attr "conds" "clob")
11287 (set_attr "length" "8,12")]
11290 (define_insn "*if_move_plus"
11291 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
11293 (match_operator 4 "arm_comparison_operator"
11294 [(match_operand 5 "cc_register" "") (const_int 0)])
11295 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
11297 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
11298 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
11301 add%D4\\t%0, %2, %3
11302 sub%D4\\t%0, %2, #%n3
11303 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
11304 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
11305 [(set_attr "conds" "use")
11306 (set_attr "length" "4,4,8,8")
11307 (set_attr_alternative "type"
11308 [(if_then_else (match_operand 3 "const_int_operand" "")
11309 (const_string "arlo_imm" )
11310 (const_string "*"))
11311 (const_string "arlo_imm")
11313 (const_string "*")])]
11316 (define_insn "*ifcompare_arith_arith"
11317 [(set (match_operand:SI 0 "s_register_operand" "=r")
11318 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
11319 [(match_operand:SI 5 "s_register_operand" "r")
11320 (match_operand:SI 6 "arm_add_operand" "rIL")])
11321 (match_operator:SI 8 "shiftable_operator"
11322 [(match_operand:SI 1 "s_register_operand" "r")
11323 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11324 (match_operator:SI 7 "shiftable_operator"
11325 [(match_operand:SI 3 "s_register_operand" "r")
11326 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
11327 (clobber (reg:CC CC_REGNUM))]
11330 [(set_attr "conds" "clob")
11331 (set_attr "length" "12")]
11334 (define_insn "*if_arith_arith"
11335 [(set (match_operand:SI 0 "s_register_operand" "=r")
11336 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
11337 [(match_operand 8 "cc_register" "") (const_int 0)])
11338 (match_operator:SI 6 "shiftable_operator"
11339 [(match_operand:SI 1 "s_register_operand" "r")
11340 (match_operand:SI 2 "arm_rhs_operand" "rI")])
11341 (match_operator:SI 7 "shiftable_operator"
11342 [(match_operand:SI 3 "s_register_operand" "r")
11343 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
11345 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
11346 [(set_attr "conds" "use")
11347 (set_attr "length" "8")]
11350 (define_insn "*ifcompare_arith_move"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11352 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11353 [(match_operand:SI 2 "s_register_operand" "r,r")
11354 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
11355 (match_operator:SI 7 "shiftable_operator"
11356 [(match_operand:SI 4 "s_register_operand" "r,r")
11357 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
11358 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
11359 (clobber (reg:CC CC_REGNUM))]
11362 /* If we have an operation where (op x 0) is the identity operation and
11363 the conditional operator is LT or GE and we are comparing against zero and
11364 everything is in registers then we can do this in two instructions. */
11365 if (operands[3] == const0_rtx
11366 && GET_CODE (operands[7]) != AND
11367 && REG_P (operands[5])
11368 && REG_P (operands[1])
11369 && REGNO (operands[1]) == REGNO (operands[4])
11370 && REGNO (operands[4]) != REGNO (operands[0]))
11372 if (GET_CODE (operands[6]) == LT)
11373 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11374 else if (GET_CODE (operands[6]) == GE)
11375 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
11377 if (CONST_INT_P (operands[3])
11378 && !const_ok_for_arm (INTVAL (operands[3])))
11379 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
11381 output_asm_insn (\"cmp\\t%2, %3\", operands);
11382 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
11383 if (which_alternative != 0)
11384 return \"mov%D6\\t%0, %1\";
11387 [(set_attr "conds" "clob")
11388 (set_attr "length" "8,12")]
11391 (define_insn "*if_arith_move"
11392 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11393 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11394 [(match_operand 6 "cc_register" "") (const_int 0)])
11395 (match_operator:SI 5 "shiftable_operator"
11396 [(match_operand:SI 2 "s_register_operand" "r,r")
11397 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
11398 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
11401 %I5%d4\\t%0, %2, %3
11402 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
11403 [(set_attr "conds" "use")
11404 (set_attr "length" "4,8")
11405 (set_attr "type" "*,*")]
11408 (define_insn "*ifcompare_move_arith"
11409 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11410 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
11411 [(match_operand:SI 4 "s_register_operand" "r,r")
11412 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11413 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11414 (match_operator:SI 7 "shiftable_operator"
11415 [(match_operand:SI 2 "s_register_operand" "r,r")
11416 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
11417 (clobber (reg:CC CC_REGNUM))]
11420 /* If we have an operation where (op x 0) is the identity operation and
11421 the conditional operator is LT or GE and we are comparing against zero and
11422 everything is in registers then we can do this in two instructions */
11423 if (operands[5] == const0_rtx
11424 && GET_CODE (operands[7]) != AND
11425 && REG_P (operands[3])
11426 && REG_P (operands[1])
11427 && REGNO (operands[1]) == REGNO (operands[2])
11428 && REGNO (operands[2]) != REGNO (operands[0]))
11430 if (GET_CODE (operands[6]) == GE)
11431 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11432 else if (GET_CODE (operands[6]) == LT)
11433 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
11436 if (CONST_INT_P (operands[5])
11437 && !const_ok_for_arm (INTVAL (operands[5])))
11438 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
11440 output_asm_insn (\"cmp\\t%4, %5\", operands);
11442 if (which_alternative != 0)
11443 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
11444 return \"%I7%D6\\t%0, %2, %3\";
11446 [(set_attr "conds" "clob")
11447 (set_attr "length" "8,12")]
11450 (define_insn "*if_move_arith"
11451 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11453 (match_operator 4 "arm_comparison_operator"
11454 [(match_operand 6 "cc_register" "") (const_int 0)])
11455 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11456 (match_operator:SI 5 "shiftable_operator"
11457 [(match_operand:SI 2 "s_register_operand" "r,r")
11458 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11461 %I5%D4\\t%0, %2, %3
11462 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11463 [(set_attr "conds" "use")
11464 (set_attr "length" "4,8")
11465 (set_attr "type" "*,*")]
11468 (define_insn "*ifcompare_move_not"
11469 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11471 (match_operator 5 "arm_comparison_operator"
11472 [(match_operand:SI 3 "s_register_operand" "r,r")
11473 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11474 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11476 (match_operand:SI 2 "s_register_operand" "r,r"))))
11477 (clobber (reg:CC CC_REGNUM))]
11480 [(set_attr "conds" "clob")
11481 (set_attr "length" "8,12")]
11484 (define_insn "*if_move_not"
11485 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11487 (match_operator 4 "arm_comparison_operator"
11488 [(match_operand 3 "cc_register" "") (const_int 0)])
11489 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11490 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11494 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11495 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11496 [(set_attr "conds" "use")
11497 (set_attr "type" "mvn_reg")
11498 (set_attr "length" "4,8,8")]
11501 (define_insn "*ifcompare_not_move"
11502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11504 (match_operator 5 "arm_comparison_operator"
11505 [(match_operand:SI 3 "s_register_operand" "r,r")
11506 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11508 (match_operand:SI 2 "s_register_operand" "r,r"))
11509 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11510 (clobber (reg:CC CC_REGNUM))]
11513 [(set_attr "conds" "clob")
11514 (set_attr "length" "8,12")]
11517 (define_insn "*if_not_move"
11518 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11520 (match_operator 4 "arm_comparison_operator"
11521 [(match_operand 3 "cc_register" "") (const_int 0)])
11522 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11523 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11527 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11528 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11529 [(set_attr "conds" "use")
11530 (set_attr "type" "mvn_reg")
11531 (set_attr "length" "4,8,8")]
11534 (define_insn "*ifcompare_shift_move"
11535 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11537 (match_operator 6 "arm_comparison_operator"
11538 [(match_operand:SI 4 "s_register_operand" "r,r")
11539 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11540 (match_operator:SI 7 "shift_operator"
11541 [(match_operand:SI 2 "s_register_operand" "r,r")
11542 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11543 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11544 (clobber (reg:CC CC_REGNUM))]
11547 [(set_attr "conds" "clob")
11548 (set_attr "length" "8,12")]
11551 (define_insn "*if_shift_move"
11552 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11554 (match_operator 5 "arm_comparison_operator"
11555 [(match_operand 6 "cc_register" "") (const_int 0)])
11556 (match_operator:SI 4 "shift_operator"
11557 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11558 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11559 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11563 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11564 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11565 [(set_attr "conds" "use")
11566 (set_attr "shift" "2")
11567 (set_attr "length" "4,8,8")
11568 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11569 (const_string "mov_shift")
11570 (const_string "mov_shift_reg")))]
11573 (define_insn "*ifcompare_move_shift"
11574 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11576 (match_operator 6 "arm_comparison_operator"
11577 [(match_operand:SI 4 "s_register_operand" "r,r")
11578 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11579 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11580 (match_operator:SI 7 "shift_operator"
11581 [(match_operand:SI 2 "s_register_operand" "r,r")
11582 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11583 (clobber (reg:CC CC_REGNUM))]
11586 [(set_attr "conds" "clob")
11587 (set_attr "length" "8,12")]
11590 (define_insn "*if_move_shift"
11591 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11593 (match_operator 5 "arm_comparison_operator"
11594 [(match_operand 6 "cc_register" "") (const_int 0)])
11595 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11596 (match_operator:SI 4 "shift_operator"
11597 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11598 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11602 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11603 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11604 [(set_attr "conds" "use")
11605 (set_attr "shift" "2")
11606 (set_attr "length" "4,8,8")
11607 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11608 (const_string "mov_shift")
11609 (const_string "mov_shift_reg")))]
11612 (define_insn "*ifcompare_shift_shift"
11613 [(set (match_operand:SI 0 "s_register_operand" "=r")
11615 (match_operator 7 "arm_comparison_operator"
11616 [(match_operand:SI 5 "s_register_operand" "r")
11617 (match_operand:SI 6 "arm_add_operand" "rIL")])
11618 (match_operator:SI 8 "shift_operator"
11619 [(match_operand:SI 1 "s_register_operand" "r")
11620 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11621 (match_operator:SI 9 "shift_operator"
11622 [(match_operand:SI 3 "s_register_operand" "r")
11623 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11624 (clobber (reg:CC CC_REGNUM))]
11627 [(set_attr "conds" "clob")
11628 (set_attr "length" "12")]
11631 (define_insn "*if_shift_shift"
11632 [(set (match_operand:SI 0 "s_register_operand" "=r")
11634 (match_operator 5 "arm_comparison_operator"
11635 [(match_operand 8 "cc_register" "") (const_int 0)])
11636 (match_operator:SI 6 "shift_operator"
11637 [(match_operand:SI 1 "s_register_operand" "r")
11638 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11639 (match_operator:SI 7 "shift_operator"
11640 [(match_operand:SI 3 "s_register_operand" "r")
11641 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11643 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11644 [(set_attr "conds" "use")
11645 (set_attr "shift" "1")
11646 (set_attr "length" "8")
11647 (set (attr "type") (if_then_else
11648 (and (match_operand 2 "const_int_operand" "")
11649 (match_operand 4 "const_int_operand" ""))
11650 (const_string "mov_shift")
11651 (const_string "mov_shift_reg")))]
11654 (define_insn "*ifcompare_not_arith"
11655 [(set (match_operand:SI 0 "s_register_operand" "=r")
11657 (match_operator 6 "arm_comparison_operator"
11658 [(match_operand:SI 4 "s_register_operand" "r")
11659 (match_operand:SI 5 "arm_add_operand" "rIL")])
11660 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11661 (match_operator:SI 7 "shiftable_operator"
11662 [(match_operand:SI 2 "s_register_operand" "r")
11663 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11664 (clobber (reg:CC CC_REGNUM))]
11667 [(set_attr "conds" "clob")
11668 (set_attr "length" "12")]
11671 (define_insn "*if_not_arith"
11672 [(set (match_operand:SI 0 "s_register_operand" "=r")
11674 (match_operator 5 "arm_comparison_operator"
11675 [(match_operand 4 "cc_register" "") (const_int 0)])
11676 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11677 (match_operator:SI 6 "shiftable_operator"
11678 [(match_operand:SI 2 "s_register_operand" "r")
11679 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11681 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11682 [(set_attr "conds" "use")
11683 (set_attr "type" "mvn_reg")
11684 (set_attr "length" "8")]
11687 (define_insn "*ifcompare_arith_not"
11688 [(set (match_operand:SI 0 "s_register_operand" "=r")
11690 (match_operator 6 "arm_comparison_operator"
11691 [(match_operand:SI 4 "s_register_operand" "r")
11692 (match_operand:SI 5 "arm_add_operand" "rIL")])
11693 (match_operator:SI 7 "shiftable_operator"
11694 [(match_operand:SI 2 "s_register_operand" "r")
11695 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11696 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11697 (clobber (reg:CC CC_REGNUM))]
11700 [(set_attr "conds" "clob")
11701 (set_attr "length" "12")]
11704 (define_insn "*if_arith_not"
11705 [(set (match_operand:SI 0 "s_register_operand" "=r")
11707 (match_operator 5 "arm_comparison_operator"
11708 [(match_operand 4 "cc_register" "") (const_int 0)])
11709 (match_operator:SI 6 "shiftable_operator"
11710 [(match_operand:SI 2 "s_register_operand" "r")
11711 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11712 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11714 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11715 [(set_attr "conds" "use")
11716 (set_attr "type" "mvn_reg")
11717 (set_attr "length" "8")]
11720 (define_insn "*ifcompare_neg_move"
11721 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11723 (match_operator 5 "arm_comparison_operator"
11724 [(match_operand:SI 3 "s_register_operand" "r,r")
11725 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11726 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11727 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11728 (clobber (reg:CC CC_REGNUM))]
11731 [(set_attr "conds" "clob")
11732 (set_attr "length" "8,12")]
11735 (define_insn "*if_neg_move"
11736 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11738 (match_operator 4 "arm_comparison_operator"
11739 [(match_operand 3 "cc_register" "") (const_int 0)])
11740 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11741 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11744 rsb%d4\\t%0, %2, #0
11745 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11746 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11747 [(set_attr "conds" "use")
11748 (set_attr "length" "4,8,8")]
11751 (define_insn "*ifcompare_move_neg"
11752 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11754 (match_operator 5 "arm_comparison_operator"
11755 [(match_operand:SI 3 "s_register_operand" "r,r")
11756 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11757 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11758 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11759 (clobber (reg:CC CC_REGNUM))]
11762 [(set_attr "conds" "clob")
11763 (set_attr "length" "8,12")]
11766 (define_insn "*if_move_neg"
11767 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11769 (match_operator 4 "arm_comparison_operator"
11770 [(match_operand 3 "cc_register" "") (const_int 0)])
11771 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11772 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11775 rsb%D4\\t%0, %2, #0
11776 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11777 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11778 [(set_attr "conds" "use")
11779 (set_attr "length" "4,8,8")]
11782 (define_insn "*arith_adjacentmem"
11783 [(set (match_operand:SI 0 "s_register_operand" "=r")
11784 (match_operator:SI 1 "shiftable_operator"
11785 [(match_operand:SI 2 "memory_operand" "m")
11786 (match_operand:SI 3 "memory_operand" "m")]))
11787 (clobber (match_scratch:SI 4 "=r"))]
11788 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11794 HOST_WIDE_INT val1 = 0, val2 = 0;
11796 if (REGNO (operands[0]) > REGNO (operands[4]))
11798 ldm[1] = operands[4];
11799 ldm[2] = operands[0];
11803 ldm[1] = operands[0];
11804 ldm[2] = operands[4];
11807 base_reg = XEXP (operands[2], 0);
11809 if (!REG_P (base_reg))
11811 val1 = INTVAL (XEXP (base_reg, 1));
11812 base_reg = XEXP (base_reg, 0);
11815 if (!REG_P (XEXP (operands[3], 0)))
11816 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11818 arith[0] = operands[0];
11819 arith[3] = operands[1];
11833 if (val1 !=0 && val2 != 0)
11837 if (val1 == 4 || val2 == 4)
11838 /* Other val must be 8, since we know they are adjacent and neither
11840 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11841 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11843 ldm[0] = ops[0] = operands[4];
11845 ops[2] = GEN_INT (val1);
11846 output_add_immediate (ops);
11848 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11850 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11854 /* Offset is out of range for a single add, so use two ldr. */
11857 ops[2] = GEN_INT (val1);
11858 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11860 ops[2] = GEN_INT (val2);
11861 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11864 else if (val1 != 0)
11867 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11869 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11874 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11876 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11878 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11881 [(set_attr "length" "12")
11882 (set_attr "predicable" "yes")
11883 (set_attr "type" "load1")]
11886 ; This pattern is never tried by combine, so do it as a peephole
11889 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11890 (match_operand:SI 1 "arm_general_register_operand" ""))
11891 (set (reg:CC CC_REGNUM)
11892 (compare:CC (match_dup 1) (const_int 0)))]
11894 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11895 (set (match_dup 0) (match_dup 1))])]
11900 [(set (match_operand:SI 0 "s_register_operand" "")
11901 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11903 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11904 [(match_operand:SI 3 "s_register_operand" "")
11905 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11906 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11908 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11909 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11914 ;; This split can be used because CC_Z mode implies that the following
11915 ;; branch will be an equality, or an unsigned inequality, so the sign
11916 ;; extension is not needed.
11919 [(set (reg:CC_Z CC_REGNUM)
11921 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11923 (match_operand 1 "const_int_operand" "")))
11924 (clobber (match_scratch:SI 2 ""))]
11926 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11927 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11928 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11929 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11931 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11934 ;; ??? Check the patterns above for Thumb-2 usefulness
11936 (define_expand "prologue"
11937 [(clobber (const_int 0))]
11940 arm_expand_prologue ();
11942 thumb1_expand_prologue ();
11947 (define_expand "epilogue"
11948 [(clobber (const_int 0))]
11951 if (crtl->calls_eh_return)
11952 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11955 thumb1_expand_epilogue ();
11956 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11957 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11959 else if (HAVE_return)
11961 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11962 no need for explicit testing again. */
11963 emit_jump_insn (gen_return ());
11965 else if (TARGET_32BIT)
11967 arm_expand_epilogue (true);
11973 (define_insn "prologue_thumb1_interwork"
11974 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11976 "* return thumb1_output_interwork ();"
11977 [(set_attr "length" "8")]
11980 ;; Note - although unspec_volatile's USE all hard registers,
11981 ;; USEs are ignored after relaod has completed. Thus we need
11982 ;; to add an unspec of the link register to ensure that flow
11983 ;; does not think that it is unused by the sibcall branch that
11984 ;; will replace the standard function epilogue.
11985 (define_expand "sibcall_epilogue"
11986 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11987 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11990 arm_expand_epilogue (false);
11995 (define_insn "*epilogue_insns"
11996 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11999 return thumb1_unexpanded_epilogue ();
12001 ; Length is absolute worst case
12002 [(set_attr "length" "44")
12003 (set_attr "type" "block")
12004 ;; We don't clobber the conditions, but the potential length of this
12005 ;; operation is sufficient to make conditionalizing the sequence
12006 ;; unlikely to be profitable.
12007 (set_attr "conds" "clob")]
12010 (define_expand "eh_epilogue"
12011 [(use (match_operand:SI 0 "register_operand" ""))
12012 (use (match_operand:SI 1 "register_operand" ""))
12013 (use (match_operand:SI 2 "register_operand" ""))]
12017 cfun->machine->eh_epilogue_sp_ofs = operands[1];
12018 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
12020 rtx ra = gen_rtx_REG (Pmode, 2);
12022 emit_move_insn (ra, operands[2]);
12025 /* This is a hack -- we may have crystalized the function type too
12027 cfun->machine->func_type = 0;
12031 ;; This split is only used during output to reduce the number of patterns
12032 ;; that need assembler instructions adding to them. We allowed the setting
12033 ;; of the conditions to be implicit during rtl generation so that
12034 ;; the conditional compare patterns would work. However this conflicts to
12035 ;; some extent with the conditional data operations, so we have to split them
12038 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
12039 ;; conditional execution sufficient?
12042 [(set (match_operand:SI 0 "s_register_operand" "")
12043 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
12044 [(match_operand 2 "" "") (match_operand 3 "" "")])
12046 (match_operand 4 "" "")))
12047 (clobber (reg:CC CC_REGNUM))]
12048 "TARGET_ARM && reload_completed"
12049 [(set (match_dup 5) (match_dup 6))
12050 (cond_exec (match_dup 7)
12051 (set (match_dup 0) (match_dup 4)))]
12054 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
12055 operands[2], operands[3]);
12056 enum rtx_code rc = GET_CODE (operands[1]);
12058 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
12059 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
12060 if (mode == CCFPmode || mode == CCFPEmode)
12061 rc = reverse_condition_maybe_unordered (rc);
12063 rc = reverse_condition (rc);
12065 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
12070 [(set (match_operand:SI 0 "s_register_operand" "")
12071 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
12072 [(match_operand 2 "" "") (match_operand 3 "" "")])
12073 (match_operand 4 "" "")
12075 (clobber (reg:CC CC_REGNUM))]
12076 "TARGET_ARM && reload_completed"
12077 [(set (match_dup 5) (match_dup 6))
12078 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
12079 (set (match_dup 0) (match_dup 4)))]
12082 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
12083 operands[2], operands[3]);
12085 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
12086 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
12091 [(set (match_operand:SI 0 "s_register_operand" "")
12092 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
12093 [(match_operand 2 "" "") (match_operand 3 "" "")])
12094 (match_operand 4 "" "")
12095 (match_operand 5 "" "")))
12096 (clobber (reg:CC CC_REGNUM))]
12097 "TARGET_ARM && reload_completed"
12098 [(set (match_dup 6) (match_dup 7))
12099 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
12100 (set (match_dup 0) (match_dup 4)))
12101 (cond_exec (match_dup 8)
12102 (set (match_dup 0) (match_dup 5)))]
12105 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
12106 operands[2], operands[3]);
12107 enum rtx_code rc = GET_CODE (operands[1]);
12109 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
12110 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
12111 if (mode == CCFPmode || mode == CCFPEmode)
12112 rc = reverse_condition_maybe_unordered (rc);
12114 rc = reverse_condition (rc);
12116 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
12121 [(set (match_operand:SI 0 "s_register_operand" "")
12122 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
12123 [(match_operand:SI 2 "s_register_operand" "")
12124 (match_operand:SI 3 "arm_add_operand" "")])
12125 (match_operand:SI 4 "arm_rhs_operand" "")
12127 (match_operand:SI 5 "s_register_operand" ""))))
12128 (clobber (reg:CC CC_REGNUM))]
12129 "TARGET_ARM && reload_completed"
12130 [(set (match_dup 6) (match_dup 7))
12131 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
12132 (set (match_dup 0) (match_dup 4)))
12133 (cond_exec (match_dup 8)
12134 (set (match_dup 0) (not:SI (match_dup 5))))]
12137 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
12138 operands[2], operands[3]);
12139 enum rtx_code rc = GET_CODE (operands[1]);
12141 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
12142 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
12143 if (mode == CCFPmode || mode == CCFPEmode)
12144 rc = reverse_condition_maybe_unordered (rc);
12146 rc = reverse_condition (rc);
12148 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
12152 (define_insn "*cond_move_not"
12153 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
12154 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
12155 [(match_operand 3 "cc_register" "") (const_int 0)])
12156 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
12158 (match_operand:SI 2 "s_register_operand" "r,r"))))]
12162 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
12163 [(set_attr "conds" "use")
12164 (set_attr "type" "mvn_reg")
12165 (set_attr "length" "4,8")]
12168 ;; The next two patterns occur when an AND operation is followed by a
12169 ;; scc insn sequence
12171 (define_insn "*sign_extract_onebit"
12172 [(set (match_operand:SI 0 "s_register_operand" "=r")
12173 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12175 (match_operand:SI 2 "const_int_operand" "n")))
12176 (clobber (reg:CC CC_REGNUM))]
12179 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12180 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
12181 return \"mvnne\\t%0, #0\";
12183 [(set_attr "conds" "clob")
12184 (set_attr "length" "8")]
12187 (define_insn "*not_signextract_onebit"
12188 [(set (match_operand:SI 0 "s_register_operand" "=r")
12190 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
12192 (match_operand:SI 2 "const_int_operand" "n"))))
12193 (clobber (reg:CC CC_REGNUM))]
12196 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
12197 output_asm_insn (\"tst\\t%1, %2\", operands);
12198 output_asm_insn (\"mvneq\\t%0, #0\", operands);
12199 return \"movne\\t%0, #0\";
12201 [(set_attr "conds" "clob")
12202 (set_attr "length" "12")]
12204 ;; ??? The above patterns need auditing for Thumb-2
12206 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
12207 ;; expressions. For simplicity, the first register is also in the unspec
12209 ;; To avoid the usage of GNU extension, the length attribute is computed
12210 ;; in a C function arm_attr_length_push_multi.
12211 (define_insn "*push_multi"
12212 [(match_parallel 2 "multi_register_push"
12213 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
12214 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
12215 UNSPEC_PUSH_MULT))])]
12219 int num_saves = XVECLEN (operands[2], 0);
12221 /* For the StrongARM at least it is faster to
12222 use STR to store only a single register.
12223 In Thumb mode always use push, and the assembler will pick
12224 something appropriate. */
12225 if (num_saves == 1 && TARGET_ARM)
12226 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
12233 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
12234 else if (TARGET_THUMB2)
12235 strcpy (pattern, \"push%?\\t{%1\");
12237 strcpy (pattern, \"push\\t{%1\");
12239 for (i = 1; i < num_saves; i++)
12241 strcat (pattern, \", %|\");
12243 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
12246 strcat (pattern, \"}\");
12247 output_asm_insn (pattern, operands);
12252 [(set_attr "type" "store4")
12253 (set (attr "length")
12254 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
12257 (define_insn "stack_tie"
12258 [(set (mem:BLK (scratch))
12259 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
12260 (match_operand:SI 1 "s_register_operand" "rk")]
12264 [(set_attr "length" "0")]
12267 ;; Pop (as used in epilogue RTL)
12269 (define_insn "*load_multiple_with_writeback"
12270 [(match_parallel 0 "load_multiple_operation"
12271 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12272 (plus:SI (match_dup 1)
12273 (match_operand:SI 2 "const_int_operand" "I")))
12274 (set (match_operand:SI 3 "s_register_operand" "=rk")
12275 (mem:SI (match_dup 1)))
12277 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12280 arm_output_multireg_pop (operands, /*return_pc=*/false,
12281 /*cond=*/const_true_rtx,
12287 [(set_attr "type" "load4")
12288 (set_attr "predicable" "yes")]
12291 ;; Pop with return (as used in epilogue RTL)
12293 ;; This instruction is generated when the registers are popped at the end of
12294 ;; epilogue. Here, instead of popping the value into LR and then generating
12295 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
12297 (define_insn "*pop_multiple_with_writeback_and_return"
12298 [(match_parallel 0 "pop_multiple_return"
12300 (set (match_operand:SI 1 "s_register_operand" "+rk")
12301 (plus:SI (match_dup 1)
12302 (match_operand:SI 2 "const_int_operand" "I")))
12303 (set (match_operand:SI 3 "s_register_operand" "=rk")
12304 (mem:SI (match_dup 1)))
12306 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12309 arm_output_multireg_pop (operands, /*return_pc=*/true,
12310 /*cond=*/const_true_rtx,
12316 [(set_attr "type" "load4")
12317 (set_attr "predicable" "yes")]
12320 (define_insn "*pop_multiple_with_return"
12321 [(match_parallel 0 "pop_multiple_return"
12323 (set (match_operand:SI 2 "s_register_operand" "=rk")
12324 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12326 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12329 arm_output_multireg_pop (operands, /*return_pc=*/true,
12330 /*cond=*/const_true_rtx,
12336 [(set_attr "type" "load4")
12337 (set_attr "predicable" "yes")]
12340 ;; Load into PC and return
12341 (define_insn "*ldr_with_return"
12343 (set (reg:SI PC_REGNUM)
12344 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
12345 "TARGET_32BIT && (reload_in_progress || reload_completed)"
12346 "ldr%?\t%|pc, [%0], #4"
12347 [(set_attr "type" "load1")
12348 (set_attr "predicable" "yes")]
12350 ;; Pop for floating point registers (as used in epilogue RTL)
12351 (define_insn "*vfp_pop_multiple_with_writeback"
12352 [(match_parallel 0 "pop_multiple_fp"
12353 [(set (match_operand:SI 1 "s_register_operand" "+rk")
12354 (plus:SI (match_dup 1)
12355 (match_operand:SI 2 "const_int_operand" "I")))
12356 (set (match_operand:DF 3 "arm_hard_register_operand" "")
12357 (mem:DF (match_dup 1)))])]
12358 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
12361 int num_regs = XVECLEN (operands[0], 0);
12364 strcpy (pattern, \"fldmfdd\\t\");
12365 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
12366 strcat (pattern, \"!, {\");
12367 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
12368 strcat (pattern, \"%P0\");
12369 if ((num_regs - 1) > 1)
12371 strcat (pattern, \"-%P1\");
12372 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
12375 strcat (pattern, \"}\");
12376 output_asm_insn (pattern, op_list);
12380 [(set_attr "type" "load4")
12381 (set_attr "conds" "unconditional")
12382 (set_attr "predicable" "no")]
12385 ;; Special patterns for dealing with the constant pool
12387 (define_insn "align_4"
12388 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
12391 assemble_align (32);
12396 (define_insn "align_8"
12397 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
12400 assemble_align (64);
12405 (define_insn "consttable_end"
12406 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
12409 making_const_table = FALSE;
12414 (define_insn "consttable_1"
12415 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
12418 making_const_table = TRUE;
12419 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
12420 assemble_zeros (3);
12423 [(set_attr "length" "4")]
12426 (define_insn "consttable_2"
12427 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
12430 making_const_table = TRUE;
12431 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
12432 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
12433 assemble_zeros (2);
12436 [(set_attr "length" "4")]
12439 (define_insn "consttable_4"
12440 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
12444 rtx x = operands[0];
12445 making_const_table = TRUE;
12446 switch (GET_MODE_CLASS (GET_MODE (x)))
12449 if (GET_MODE (x) == HFmode)
12450 arm_emit_fp16_const (x);
12454 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12455 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12459 /* XXX: Sometimes gcc does something really dumb and ends up with
12460 a HIGH in a constant pool entry, usually because it's trying to
12461 load into a VFP register. We know this will always be used in
12462 combination with a LO_SUM which ignores the high bits, so just
12463 strip off the HIGH. */
12464 if (GET_CODE (x) == HIGH)
12466 assemble_integer (x, 4, BITS_PER_WORD, 1);
12467 mark_symbol_refs_as_used (x);
12472 [(set_attr "length" "4")]
12475 (define_insn "consttable_8"
12476 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12480 making_const_table = TRUE;
12481 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12486 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12487 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12491 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12496 [(set_attr "length" "8")]
12499 (define_insn "consttable_16"
12500 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12504 making_const_table = TRUE;
12505 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12510 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12511 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12515 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12520 [(set_attr "length" "16")]
12523 ;; Miscellaneous Thumb patterns
12525 (define_expand "tablejump"
12526 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12527 (use (label_ref (match_operand 1 "" "")))])]
12532 /* Hopefully, CSE will eliminate this copy. */
12533 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12534 rtx reg2 = gen_reg_rtx (SImode);
12536 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12537 operands[0] = reg2;
12542 ;; NB never uses BX.
12543 (define_insn "*thumb1_tablejump"
12544 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12545 (use (label_ref (match_operand 1 "" "")))]
12548 [(set_attr "length" "2")]
12551 ;; V5 Instructions,
12553 (define_insn "clzsi2"
12554 [(set (match_operand:SI 0 "s_register_operand" "=r")
12555 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12556 "TARGET_32BIT && arm_arch5"
12558 [(set_attr "predicable" "yes")
12559 (set_attr "type" "clz")])
12561 (define_insn "rbitsi2"
12562 [(set (match_operand:SI 0 "s_register_operand" "=r")
12563 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12564 "TARGET_32BIT && arm_arch_thumb2"
12566 [(set_attr "predicable" "yes")
12567 (set_attr "type" "clz")])
12569 (define_expand "ctzsi2"
12570 [(set (match_operand:SI 0 "s_register_operand" "")
12571 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12572 "TARGET_32BIT && arm_arch_thumb2"
12575 rtx tmp = gen_reg_rtx (SImode);
12576 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12577 emit_insn (gen_clzsi2 (operands[0], tmp));
12583 ;; V5E instructions.
12585 (define_insn "prefetch"
12586 [(prefetch (match_operand:SI 0 "address_operand" "p")
12587 (match_operand:SI 1 "" "")
12588 (match_operand:SI 2 "" ""))]
12589 "TARGET_32BIT && arm_arch5e"
12592 ;; General predication pattern
12595 [(match_operator 0 "arm_comparison_operator"
12596 [(match_operand 1 "cc_register" "")
12600 [(set_attr "predicated" "yes")]
12603 (define_insn "force_register_use"
12604 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12607 [(set_attr "length" "0")]
12611 ;; Patterns for exception handling
12613 (define_expand "eh_return"
12614 [(use (match_operand 0 "general_operand" ""))]
12619 emit_insn (gen_arm_eh_return (operands[0]));
12621 emit_insn (gen_thumb_eh_return (operands[0]));
12626 ;; We can't expand this before we know where the link register is stored.
12627 (define_insn_and_split "arm_eh_return"
12628 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12630 (clobber (match_scratch:SI 1 "=&r"))]
12633 "&& reload_completed"
12637 arm_set_return_address (operands[0], operands[1]);
12642 (define_insn_and_split "thumb_eh_return"
12643 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12645 (clobber (match_scratch:SI 1 "=&l"))]
12648 "&& reload_completed"
12652 thumb_set_return_address (operands[0], operands[1]);
12660 (define_insn "load_tp_hard"
12661 [(set (match_operand:SI 0 "register_operand" "=r")
12662 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12664 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12665 [(set_attr "predicable" "yes")]
12668 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12669 (define_insn "load_tp_soft"
12670 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12671 (clobber (reg:SI LR_REGNUM))
12672 (clobber (reg:SI IP_REGNUM))
12673 (clobber (reg:CC CC_REGNUM))]
12675 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12676 [(set_attr "conds" "clob")]
12679 ;; tls descriptor call
12680 (define_insn "tlscall"
12681 [(set (reg:SI R0_REGNUM)
12682 (unspec:SI [(reg:SI R0_REGNUM)
12683 (match_operand:SI 0 "" "X")
12684 (match_operand 1 "" "")] UNSPEC_TLS))
12685 (clobber (reg:SI R1_REGNUM))
12686 (clobber (reg:SI LR_REGNUM))
12687 (clobber (reg:SI CC_REGNUM))]
12690 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12691 INTVAL (operands[1]));
12692 return "bl\\t%c0(tlscall)";
12694 [(set_attr "conds" "clob")
12695 (set_attr "length" "4")]
12698 ;; For thread pointer builtin
12699 (define_expand "get_thread_pointersi"
12700 [(match_operand:SI 0 "s_register_operand" "=r")]
12704 arm_load_tp (operands[0]);
12710 ;; We only care about the lower 16 bits of the constant
12711 ;; being inserted into the upper 16 bits of the register.
12712 (define_insn "*arm_movtas_ze"
12713 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12716 (match_operand:SI 1 "const_int_operand" ""))]
12719 [(set_attr "predicable" "yes")
12720 (set_attr "predicable_short_it" "no")
12721 (set_attr "length" "4")]
12724 (define_insn "*arm_rev"
12725 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12726 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12732 [(set_attr "arch" "t1,t2,32")
12733 (set_attr "length" "2,2,4")]
12736 (define_expand "arm_legacy_rev"
12737 [(set (match_operand:SI 2 "s_register_operand" "")
12738 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12742 (lshiftrt:SI (match_dup 2)
12744 (set (match_operand:SI 3 "s_register_operand" "")
12745 (rotatert:SI (match_dup 1)
12748 (and:SI (match_dup 2)
12749 (const_int -65281)))
12750 (set (match_operand:SI 0 "s_register_operand" "")
12751 (xor:SI (match_dup 3)
12757 ;; Reuse temporaries to keep register pressure down.
12758 (define_expand "thumb_legacy_rev"
12759 [(set (match_operand:SI 2 "s_register_operand" "")
12760 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12762 (set (match_operand:SI 3 "s_register_operand" "")
12763 (lshiftrt:SI (match_dup 1)
12766 (ior:SI (match_dup 3)
12768 (set (match_operand:SI 4 "s_register_operand" "")
12770 (set (match_operand:SI 5 "s_register_operand" "")
12771 (rotatert:SI (match_dup 1)
12774 (ashift:SI (match_dup 5)
12777 (lshiftrt:SI (match_dup 5)
12780 (ior:SI (match_dup 5)
12783 (rotatert:SI (match_dup 5)
12785 (set (match_operand:SI 0 "s_register_operand" "")
12786 (ior:SI (match_dup 5)
12792 (define_expand "bswapsi2"
12793 [(set (match_operand:SI 0 "s_register_operand" "=r")
12794 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12795 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12799 rtx op2 = gen_reg_rtx (SImode);
12800 rtx op3 = gen_reg_rtx (SImode);
12804 rtx op4 = gen_reg_rtx (SImode);
12805 rtx op5 = gen_reg_rtx (SImode);
12807 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12808 op2, op3, op4, op5));
12812 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12821 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12822 ;; and unsigned variants, respectively. For rev16, expose
12823 ;; byte-swapping in the lower 16 bits only.
12824 (define_insn "*arm_revsh"
12825 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12826 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12832 [(set_attr "arch" "t1,t2,32")
12833 (set_attr "length" "2,2,4")]
12836 (define_insn "*arm_rev16"
12837 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12838 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12844 [(set_attr "arch" "t1,t2,32")
12845 (set_attr "length" "2,2,4")]
12848 (define_expand "bswaphi2"
12849 [(set (match_operand:HI 0 "s_register_operand" "=r")
12850 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12855 ;; Patterns for LDRD/STRD in Thumb2 mode
12857 (define_insn "*thumb2_ldrd"
12858 [(set (match_operand:SI 0 "s_register_operand" "=r")
12859 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12860 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12861 (set (match_operand:SI 3 "s_register_operand" "=r")
12862 (mem:SI (plus:SI (match_dup 1)
12863 (match_operand:SI 4 "const_int_operand" ""))))]
12864 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12865 && current_tune->prefer_ldrd_strd
12866 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12867 && (operands_ok_ldrd_strd (operands[0], operands[3],
12868 operands[1], INTVAL (operands[2]),
12870 "ldrd%?\t%0, %3, [%1, %2]"
12871 [(set_attr "type" "load2")
12872 (set_attr "predicable" "yes")
12873 (set_attr "predicable_short_it" "no")])
12875 (define_insn "*thumb2_ldrd_base"
12876 [(set (match_operand:SI 0 "s_register_operand" "=r")
12877 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12878 (set (match_operand:SI 2 "s_register_operand" "=r")
12879 (mem:SI (plus:SI (match_dup 1)
12881 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12882 && current_tune->prefer_ldrd_strd
12883 && (operands_ok_ldrd_strd (operands[0], operands[2],
12884 operands[1], 0, false, true))"
12885 "ldrd%?\t%0, %2, [%1]"
12886 [(set_attr "type" "load2")
12887 (set_attr "predicable" "yes")
12888 (set_attr "predicable_short_it" "no")])
12890 (define_insn "*thumb2_ldrd_base_neg"
12891 [(set (match_operand:SI 0 "s_register_operand" "=r")
12892 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12894 (set (match_operand:SI 2 "s_register_operand" "=r")
12895 (mem:SI (match_dup 1)))]
12896 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12897 && current_tune->prefer_ldrd_strd
12898 && (operands_ok_ldrd_strd (operands[0], operands[2],
12899 operands[1], -4, false, true))"
12900 "ldrd%?\t%0, %2, [%1, #-4]"
12901 [(set_attr "type" "load2")
12902 (set_attr "predicable" "yes")
12903 (set_attr "predicable_short_it" "no")])
12905 (define_insn "*thumb2_strd"
12906 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12907 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12908 (match_operand:SI 2 "s_register_operand" "r"))
12909 (set (mem:SI (plus:SI (match_dup 0)
12910 (match_operand:SI 3 "const_int_operand" "")))
12911 (match_operand:SI 4 "s_register_operand" "r"))]
12912 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12913 && current_tune->prefer_ldrd_strd
12914 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12915 && (operands_ok_ldrd_strd (operands[2], operands[4],
12916 operands[0], INTVAL (operands[1]),
12918 "strd%?\t%2, %4, [%0, %1]"
12919 [(set_attr "type" "store2")
12920 (set_attr "predicable" "yes")
12921 (set_attr "predicable_short_it" "no")])
12923 (define_insn "*thumb2_strd_base"
12924 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12925 (match_operand:SI 1 "s_register_operand" "r"))
12926 (set (mem:SI (plus:SI (match_dup 0)
12928 (match_operand:SI 2 "s_register_operand" "r"))]
12929 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12930 && current_tune->prefer_ldrd_strd
12931 && (operands_ok_ldrd_strd (operands[1], operands[2],
12932 operands[0], 0, false, false))"
12933 "strd%?\t%1, %2, [%0]"
12934 [(set_attr "type" "store2")
12935 (set_attr "predicable" "yes")
12936 (set_attr "predicable_short_it" "no")])
12938 (define_insn "*thumb2_strd_base_neg"
12939 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12941 (match_operand:SI 1 "s_register_operand" "r"))
12942 (set (mem:SI (match_dup 0))
12943 (match_operand:SI 2 "s_register_operand" "r"))]
12944 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12945 && current_tune->prefer_ldrd_strd
12946 && (operands_ok_ldrd_strd (operands[1], operands[2],
12947 operands[0], -4, false, false))"
12948 "strd%?\t%1, %2, [%0, #-4]"
12949 [(set_attr "type" "store2")
12950 (set_attr "predicable" "yes")
12951 (set_attr "predicable_short_it" "no")])
12954 ;; Load the load/store double peephole optimizations.
12955 (include "ldrdstrd.md")
12957 ;; Load the load/store multiple patterns
12958 (include "ldmstm.md")
12960 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12961 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12962 (define_insn "*load_multiple"
12963 [(match_parallel 0 "load_multiple_operation"
12964 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12965 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12970 arm_output_multireg_pop (operands, /*return_pc=*/false,
12971 /*cond=*/const_true_rtx,
12977 [(set_attr "predicable" "yes")]
12980 ;; Vector bits common to IWMMXT and Neon
12981 (include "vec-common.md")
12982 ;; Load the Intel Wireless Multimedia Extension patterns
12983 (include "iwmmxt.md")
12984 ;; Load the VFP co-processor patterns
12986 ;; Thumb-2 patterns
12987 (include "thumb2.md")
12989 (include "neon.md")
12990 ;; Synchronization Primitives
12991 (include "sync.md")
12992 ;; Fixed-point patterns
12993 (include "arm-fixed.md")