1 /**************************************************************************
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 **************************************************************************/
25 #include "pipe/p_config.h"
26 #include "util/u_cpu_detect.h"
28 #if defined(PIPE_ARCH_X86) || defined(PIPE_ARCH_X86_64)
30 #include "pipe/p_compiler.h"
31 #include "util/u_debug.h"
32 #include "util/u_pointer.h"
34 #include "rtasm_execmem.h"
35 #include "rtasm_x86sse.h"
44 void x86_print_reg( struct x86_reg reg
)
46 if (reg
.mod
!= mod_REG
)
52 case reg_AX
: debug_printf( "EAX" ); break;
53 case reg_CX
: debug_printf( "ECX" ); break;
54 case reg_DX
: debug_printf( "EDX" ); break;
55 case reg_BX
: debug_printf( "EBX" ); break;
56 case reg_SP
: debug_printf( "ESP" ); break;
57 case reg_BP
: debug_printf( "EBP" ); break;
58 case reg_SI
: debug_printf( "ESI" ); break;
59 case reg_DI
: debug_printf( "EDI" ); break;
63 debug_printf( "MMX%u", reg
.idx
);
66 debug_printf( "XMM%u", reg
.idx
);
69 debug_printf( "fp%u", reg
.idx
);
73 if (reg
.mod
== mod_DISP8
||
74 reg
.mod
== mod_DISP32
)
75 debug_printf("+%d", reg
.disp
);
77 if (reg
.mod
!= mod_REG
)
83 #define DUMP_START() debug_printf( "\n" )
84 #define DUMP_END() debug_printf( "\n" )
87 const char *foo = __FUNCTION__; \
88 while (*foo && *foo != '_') \
92 debug_printf( "\n%4x %14s ", p->csr - p->store, foo ); \
95 #define DUMP_I( I ) do { \
97 debug_printf( "%u", I ); \
100 #define DUMP_R( R0 ) do { \
102 x86_print_reg( R0 ); \
105 #define DUMP_RR( R0, R1 ) do { \
107 x86_print_reg( R0 ); \
108 debug_printf( ", " ); \
109 x86_print_reg( R1 ); \
112 #define DUMP_RI( R0, I ) do { \
114 x86_print_reg( R0 ); \
115 debug_printf( ", %u", I ); \
118 #define DUMP_RRI( R0, R1, I ) do { \
120 x86_print_reg( R0 ); \
121 debug_printf( ", " ); \
122 x86_print_reg( R1 ); \
123 debug_printf( ", %u", I ); \
133 #define DUMP_RR( R0, R1 )
134 #define DUMP_RI( R0, I )
135 #define DUMP_RRI( R0, R1, I )
140 static void do_realloc( struct x86_function
*p
)
142 if (p
->store
== p
->error_overflow
) {
145 else if (p
->size
== 0) {
147 p
->store
= rtasm_exec_malloc(p
->size
);
151 uintptr_t used
= pointer_to_uintptr( p
->csr
) - pointer_to_uintptr( p
->store
);
152 unsigned char *tmp
= p
->store
;
154 p
->store
= rtasm_exec_malloc(p
->size
);
157 memcpy(p
->store
, tmp
, used
);
158 p
->csr
= p
->store
+ used
;
164 rtasm_exec_free(tmp
);
167 if (p
->store
== NULL
) {
168 p
->store
= p
->csr
= p
->error_overflow
;
169 p
->size
= sizeof(p
->error_overflow
);
173 /* Emit bytes to the instruction stream:
175 static unsigned char *reserve( struct x86_function
*p
, int bytes
)
177 if (p
->csr
+ bytes
- p
->store
> (int) p
->size
)
181 unsigned char *csr
= p
->csr
;
189 static void emit_1b( struct x86_function
*p
, char b0
)
191 char *csr
= (char *)reserve(p
, 1);
195 static void emit_1i( struct x86_function
*p
, int i0
)
197 int *icsr
= (int *)reserve(p
, sizeof(i0
));
201 static void emit_1ub( struct x86_function
*p
, unsigned char b0
)
203 unsigned char *csr
= reserve(p
, 1);
207 static void emit_2ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
)
209 unsigned char *csr
= reserve(p
, 2);
214 static void emit_3ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
, unsigned char b2
)
216 unsigned char *csr
= reserve(p
, 3);
223 /* Build a modRM byte + possible displacement. No treatment of SIB
224 * indexing. BZZT - no way to encode an absolute address.
226 * This is the "/r" field in the x86 manuals...
228 static void emit_modrm( struct x86_function
*p
,
230 struct x86_reg regmem
)
232 unsigned char val
= 0;
234 assert(reg
.mod
== mod_REG
);
236 /* TODO: support extended x86-64 registers */
238 assert(regmem
.idx
< 8);
240 val
|= regmem
.mod
<< 6; /* mod field */
241 val
|= reg
.idx
<< 3; /* reg field */
242 val
|= regmem
.idx
; /* r/m field */
246 /* Oh-oh we've stumbled into the SIB thing.
248 if (regmem
.file
== file_REG32
&&
249 regmem
.idx
== reg_SP
&&
250 regmem
.mod
!= mod_REG
) {
251 emit_1ub(p
, 0x24); /* simplistic! */
254 switch (regmem
.mod
) {
259 emit_1b(p
, (char) regmem
.disp
);
262 emit_1i(p
, regmem
.disp
);
270 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
272 static void emit_modrm_noreg( struct x86_function
*p
,
274 struct x86_reg regmem
)
276 struct x86_reg dummy
= x86_make_reg(file_REG32
, op
);
277 emit_modrm(p
, dummy
, regmem
);
280 /* Many x86 instructions have two opcodes to cope with the situations
281 * where the destination is a register or memory reference
282 * respectively. This function selects the correct opcode based on
283 * the arguments presented.
285 static void emit_op_modrm( struct x86_function
*p
,
286 unsigned char op_dst_is_reg
,
287 unsigned char op_dst_is_mem
,
293 emit_1ub(p
, op_dst_is_reg
);
294 emit_modrm(p
, dst
, src
);
299 assert(src
.mod
== mod_REG
);
300 emit_1ub(p
, op_dst_is_mem
);
301 emit_modrm(p
, src
, dst
);
315 /* Create and manipulate registers and regmem values:
317 struct x86_reg
x86_make_reg( enum x86_reg_file file
,
318 enum x86_reg_name idx
)
330 struct x86_reg
x86_make_disp( struct x86_reg reg
,
333 assert(reg
.file
== file_REG32
);
335 if (reg
.mod
== mod_REG
)
340 if (reg
.disp
== 0 && reg
.idx
!= reg_BP
)
341 reg
.mod
= mod_INDIRECT
;
342 else if (reg
.disp
<= 127 && reg
.disp
>= -128)
345 reg
.mod
= mod_DISP32
;
350 struct x86_reg
x86_deref( struct x86_reg reg
)
352 return x86_make_disp(reg
, 0);
355 struct x86_reg
x86_get_base_reg( struct x86_reg reg
)
357 return x86_make_reg( reg
.file
, reg
.idx
);
360 int x86_get_label( struct x86_function
*p
)
362 return p
->csr
- p
->store
;
367 /***********************************************************************
372 void x64_rexw(struct x86_function
*p
)
374 if(x86_target(p
) != X86_32
)
378 void x86_jcc( struct x86_function
*p
,
382 int offset
= label
- (x86_get_label(p
) + 2);
386 /*assert(p->csr - p->store > -offset);*/
387 if (p
->csr
- p
->store
<= -offset
) {
388 /* probably out of memory (using the error_overflow buffer) */
393 if (offset
<= 127 && offset
>= -128) {
394 emit_1ub(p
, 0x70 + cc
);
395 emit_1b(p
, (char) offset
);
398 offset
= label
- (x86_get_label(p
) + 6);
399 emit_2ub(p
, 0x0f, 0x80 + cc
);
404 /* Always use a 32bit offset for forward jumps:
406 int x86_jcc_forward( struct x86_function
*p
,
410 emit_2ub(p
, 0x0f, 0x80 + cc
);
412 return x86_get_label(p
);
415 int x86_jmp_forward( struct x86_function
*p
)
420 return x86_get_label(p
);
423 int x86_call_forward( struct x86_function
*p
)
429 return x86_get_label(p
);
432 /* Fixup offset from forward jump:
434 void x86_fixup_fwd_jump( struct x86_function
*p
,
437 *(int *)(p
->store
+ fixup
- 4) = x86_get_label(p
) - fixup
;
440 void x86_jmp( struct x86_function
*p
, int label
)
444 emit_1i(p
, label
- x86_get_label(p
) - 4);
447 void x86_call( struct x86_function
*p
, struct x86_reg reg
)
451 emit_modrm_noreg(p
, 2, reg
);
455 void x86_mov_reg_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
458 assert(dst
.file
== file_REG32
);
459 assert(dst
.mod
== mod_REG
);
460 emit_1ub(p
, 0xb8 + dst
.idx
);
464 void x86_mov_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
467 if(dst
.mod
== mod_REG
)
468 x86_mov_reg_imm(p
, dst
, imm
);
472 emit_modrm_noreg(p
, 0, dst
);
477 void x86_mov16_imm( struct x86_function
*p
, struct x86_reg dst
, uint16_t imm
)
481 if(dst
.mod
== mod_REG
)
483 emit_1ub(p
, 0xb8 + dst
.idx
);
484 emit_2ub(p
, imm
& 0xff, imm
>> 8);
489 emit_modrm_noreg(p
, 0, dst
);
490 emit_2ub(p
, imm
& 0xff, imm
>> 8);
494 void x86_mov8_imm( struct x86_function
*p
, struct x86_reg dst
, uint8_t imm
)
497 if(dst
.mod
== mod_REG
)
499 emit_1ub(p
, 0xb0 + dst
.idx
);
505 emit_modrm_noreg(p
, 0, dst
);
511 * Immediate group 1 instructions.
514 x86_group1_imm( struct x86_function
*p
,
515 unsigned op
, struct x86_reg dst
, int imm
)
517 assert(dst
.file
== file_REG32
);
518 assert(dst
.mod
== mod_REG
);
519 if(-0x80 <= imm
&& imm
< 0x80) {
521 emit_modrm_noreg(p
, op
, dst
);
522 emit_1b(p
, (char)imm
);
526 emit_modrm_noreg(p
, op
, dst
);
531 void x86_add_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
534 x86_group1_imm(p
, 0, dst
, imm
);
537 void x86_or_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
540 x86_group1_imm(p
, 1, dst
, imm
);
543 void x86_and_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
546 x86_group1_imm(p
, 4, dst
, imm
);
549 void x86_sub_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
552 x86_group1_imm(p
, 5, dst
, imm
);
555 void x86_xor_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
558 x86_group1_imm(p
, 6, dst
, imm
);
561 void x86_cmp_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
564 x86_group1_imm(p
, 7, dst
, imm
);
568 void x86_push( struct x86_function
*p
,
572 if (reg
.mod
== mod_REG
)
573 emit_1ub(p
, 0x50 + reg
.idx
);
577 emit_modrm_noreg(p
, 6, reg
);
581 p
->stack_offset
+= sizeof(void*);
584 void x86_push_imm32( struct x86_function
*p
,
591 p
->stack_offset
+= sizeof(void*);
595 void x86_pop( struct x86_function
*p
,
599 assert(reg
.mod
== mod_REG
);
600 emit_1ub(p
, 0x58 + reg
.idx
);
601 p
->stack_offset
-= sizeof(void*);
604 void x86_inc( struct x86_function
*p
,
608 if(x86_target(p
) == X86_32
&& reg
.mod
== mod_REG
)
610 emit_1ub(p
, 0x40 + reg
.idx
);
614 emit_modrm_noreg(p
, 0, reg
);
617 void x86_dec( struct x86_function
*p
,
621 if(x86_target(p
) == X86_32
&& reg
.mod
== mod_REG
)
623 emit_1ub(p
, 0x48 + reg
.idx
);
627 emit_modrm_noreg(p
, 1, reg
);
630 void x86_ret( struct x86_function
*p
)
633 assert(p
->stack_offset
== 0);
637 void x86_retw( struct x86_function
*p
, unsigned short imm
)
640 emit_3ub(p
, 0xc2, imm
& 0xff, (imm
>> 8) & 0xff);
643 void x86_sahf( struct x86_function
*p
)
649 void x86_mov( struct x86_function
*p
,
654 /* special hack for reading arguments until we support x86-64 registers everywhere */
655 if(src
.mod
== mod_REG
&& dst
.mod
== mod_REG
&& (src
.idx
>= 8 || dst
.idx
>= 8))
670 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
673 void x86_mov16( struct x86_function
*p
,
679 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
682 void x86_mov8( struct x86_function
*p
,
687 emit_op_modrm( p
, 0x8a, 0x88, dst
, src
);
690 void x64_mov64( struct x86_function
*p
,
696 assert(x86_target(p
) != X86_32
);
698 /* special hack for reading arguments until we support x86-64 registers everywhere */
699 if(src
.mod
== mod_REG
&& dst
.mod
== mod_REG
&& (src
.idx
>= 8 || dst
.idx
>= 8))
713 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
716 void x86_movzx8(struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
719 emit_2ub(p
, 0x0f, 0xb6);
720 emit_modrm(p
, dst
, src
);
723 void x86_movzx16(struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
726 emit_2ub(p
, 0x0f, 0xb7);
727 emit_modrm(p
, dst
, src
);
730 void x86_cmovcc( struct x86_function
*p
,
735 DUMP_RRI( dst
, src
, cc
);
736 emit_2ub( p
, 0x0f, 0x40 + cc
);
737 emit_modrm( p
, dst
, src
);
740 void x86_xor( struct x86_function
*p
,
745 emit_op_modrm( p
, 0x33, 0x31, dst
, src
);
748 void x86_cmp( struct x86_function
*p
,
753 emit_op_modrm( p
, 0x3b, 0x39, dst
, src
);
756 void x86_lea( struct x86_function
*p
,
762 emit_modrm( p
, dst
, src
);
765 void x86_test( struct x86_function
*p
,
771 emit_modrm( p
, dst
, src
);
774 void x86_add( struct x86_function
*p
,
779 emit_op_modrm(p
, 0x03, 0x01, dst
, src
);
782 /* Calculate EAX * src, results in EDX:EAX.
784 void x86_mul( struct x86_function
*p
,
789 emit_modrm_noreg(p
, 4, src
);
793 void x86_imul( struct x86_function
*p
,
798 emit_2ub(p
, X86_TWOB
, 0xAF);
799 emit_modrm(p
, dst
, src
);
803 void x86_sub( struct x86_function
*p
,
808 emit_op_modrm(p
, 0x2b, 0x29, dst
, src
);
811 void x86_or( struct x86_function
*p
,
816 emit_op_modrm( p
, 0x0b, 0x09, dst
, src
);
819 void x86_and( struct x86_function
*p
,
824 emit_op_modrm( p
, 0x23, 0x21, dst
, src
);
827 void x86_div( struct x86_function
*p
,
830 assert(src
.file
== file_REG32
&& src
.mod
== mod_REG
);
831 emit_op_modrm(p
, 0xf7, 0, x86_make_reg(file_REG32
, 6), src
);
834 void x86_bswap( struct x86_function
*p
, struct x86_reg reg
)
837 assert(reg
.file
== file_REG32
);
838 assert(reg
.mod
== mod_REG
);
839 emit_2ub(p
, 0x0f, 0xc8 + reg
.idx
);
842 void x86_shr_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
848 emit_modrm_noreg(p
, 5, reg
);
853 emit_modrm_noreg(p
, 5, reg
);
858 void x86_sar_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
864 emit_modrm_noreg(p
, 7, reg
);
869 emit_modrm_noreg(p
, 7, reg
);
874 void x86_shl_imm( struct x86_function
*p
, struct x86_reg reg
, unsigned imm
)
880 emit_modrm_noreg(p
, 4, reg
);
885 emit_modrm_noreg(p
, 4, reg
);
891 /***********************************************************************
895 void sse_prefetchnta( struct x86_function
*p
, struct x86_reg ptr
)
898 assert(ptr
.mod
!= mod_REG
);
899 emit_2ub(p
, 0x0f, 0x18);
900 emit_modrm_noreg(p
, 0, ptr
);
903 void sse_prefetch0( struct x86_function
*p
, struct x86_reg ptr
)
906 assert(ptr
.mod
!= mod_REG
);
907 emit_2ub(p
, 0x0f, 0x18);
908 emit_modrm_noreg(p
, 1, ptr
);
911 void sse_prefetch1( struct x86_function
*p
, struct x86_reg ptr
)
914 assert(ptr
.mod
!= mod_REG
);
915 emit_2ub(p
, 0x0f, 0x18);
916 emit_modrm_noreg(p
, 2, ptr
);
919 void sse_movntps( struct x86_function
*p
,
925 assert(dst
.mod
!= mod_REG
);
926 assert(src
.mod
== mod_REG
);
927 emit_2ub(p
, 0x0f, 0x2b);
928 emit_modrm(p
, src
, dst
);
934 void sse_movss( struct x86_function
*p
,
939 emit_2ub(p
, 0xF3, X86_TWOB
);
940 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
943 void sse_movaps( struct x86_function
*p
,
948 emit_1ub(p
, X86_TWOB
);
949 emit_op_modrm( p
, 0x28, 0x29, dst
, src
);
952 void sse_movups( struct x86_function
*p
,
957 emit_1ub(p
, X86_TWOB
);
958 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
961 void sse_movhps( struct x86_function
*p
,
966 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
967 emit_1ub(p
, X86_TWOB
);
968 emit_op_modrm( p
, 0x16, 0x17, dst
, src
); /* cf movlhps */
971 void sse_movlps( struct x86_function
*p
,
976 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
977 emit_1ub(p
, X86_TWOB
);
978 emit_op_modrm( p
, 0x12, 0x13, dst
, src
); /* cf movhlps */
981 void sse_maxps( struct x86_function
*p
,
986 emit_2ub(p
, X86_TWOB
, 0x5F);
987 emit_modrm( p
, dst
, src
);
990 void sse_maxss( struct x86_function
*p
,
995 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5F);
996 emit_modrm( p
, dst
, src
);
999 void sse_divss( struct x86_function
*p
,
1001 struct x86_reg src
)
1003 DUMP_RR( dst
, src
);
1004 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5E);
1005 emit_modrm( p
, dst
, src
);
1008 void sse_minps( struct x86_function
*p
,
1010 struct x86_reg src
)
1012 DUMP_RR( dst
, src
);
1013 emit_2ub(p
, X86_TWOB
, 0x5D);
1014 emit_modrm( p
, dst
, src
);
1017 void sse_subps( struct x86_function
*p
,
1019 struct x86_reg src
)
1021 DUMP_RR( dst
, src
);
1022 emit_2ub(p
, X86_TWOB
, 0x5C);
1023 emit_modrm( p
, dst
, src
);
1026 void sse_mulps( struct x86_function
*p
,
1028 struct x86_reg src
)
1030 DUMP_RR( dst
, src
);
1031 emit_2ub(p
, X86_TWOB
, 0x59);
1032 emit_modrm( p
, dst
, src
);
1035 void sse_mulss( struct x86_function
*p
,
1037 struct x86_reg src
)
1039 DUMP_RR( dst
, src
);
1040 emit_3ub(p
, 0xF3, X86_TWOB
, 0x59);
1041 emit_modrm( p
, dst
, src
);
1044 void sse_addps( struct x86_function
*p
,
1046 struct x86_reg src
)
1048 DUMP_RR( dst
, src
);
1049 emit_2ub(p
, X86_TWOB
, 0x58);
1050 emit_modrm( p
, dst
, src
);
1053 void sse_addss( struct x86_function
*p
,
1055 struct x86_reg src
)
1057 DUMP_RR( dst
, src
);
1058 emit_3ub(p
, 0xF3, X86_TWOB
, 0x58);
1059 emit_modrm( p
, dst
, src
);
1062 void sse_andnps( struct x86_function
*p
,
1064 struct x86_reg src
)
1066 DUMP_RR( dst
, src
);
1067 emit_2ub(p
, X86_TWOB
, 0x55);
1068 emit_modrm( p
, dst
, src
);
1071 void sse_andps( struct x86_function
*p
,
1073 struct x86_reg src
)
1075 DUMP_RR( dst
, src
);
1076 emit_2ub(p
, X86_TWOB
, 0x54);
1077 emit_modrm( p
, dst
, src
);
1080 void sse_rsqrtps( struct x86_function
*p
,
1082 struct x86_reg src
)
1084 DUMP_RR( dst
, src
);
1085 emit_2ub(p
, X86_TWOB
, 0x52);
1086 emit_modrm( p
, dst
, src
);
1089 void sse_rsqrtss( struct x86_function
*p
,
1091 struct x86_reg src
)
1093 DUMP_RR( dst
, src
);
1094 emit_3ub(p
, 0xF3, X86_TWOB
, 0x52);
1095 emit_modrm( p
, dst
, src
);
1099 void sse_movhlps( struct x86_function
*p
,
1101 struct x86_reg src
)
1103 DUMP_RR( dst
, src
);
1104 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
1105 emit_2ub(p
, X86_TWOB
, 0x12);
1106 emit_modrm( p
, dst
, src
);
1109 void sse_movlhps( struct x86_function
*p
,
1111 struct x86_reg src
)
1113 DUMP_RR( dst
, src
);
1114 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
1115 emit_2ub(p
, X86_TWOB
, 0x16);
1116 emit_modrm( p
, dst
, src
);
1119 void sse_orps( struct x86_function
*p
,
1121 struct x86_reg src
)
1123 DUMP_RR( dst
, src
);
1124 emit_2ub(p
, X86_TWOB
, 0x56);
1125 emit_modrm( p
, dst
, src
);
1128 void sse_xorps( struct x86_function
*p
,
1130 struct x86_reg src
)
1132 DUMP_RR( dst
, src
);
1133 emit_2ub(p
, X86_TWOB
, 0x57);
1134 emit_modrm( p
, dst
, src
);
1137 void sse_cvtps2pi( struct x86_function
*p
,
1139 struct x86_reg src
)
1141 DUMP_RR( dst
, src
);
1142 assert(dst
.file
== file_MMX
&&
1143 (src
.file
== file_XMM
|| src
.mod
!= mod_REG
));
1147 emit_2ub(p
, X86_TWOB
, 0x2d);
1148 emit_modrm( p
, dst
, src
);
1151 void sse2_cvtdq2ps( struct x86_function
*p
,
1153 struct x86_reg src
)
1155 DUMP_RR( dst
, src
);
1156 emit_2ub(p
, X86_TWOB
, 0x5b);
1157 emit_modrm( p
, dst
, src
);
1161 /* Shufps can also be used to implement a reduced swizzle when dest ==
1164 void sse_shufps( struct x86_function
*p
,
1169 DUMP_RRI( dst
, src
, shuf
);
1170 emit_2ub(p
, X86_TWOB
, 0xC6);
1171 emit_modrm(p
, dst
, src
);
1175 void sse_unpckhps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1177 DUMP_RR( dst
, src
);
1178 emit_2ub( p
, X86_TWOB
, 0x15 );
1179 emit_modrm( p
, dst
, src
);
1182 void sse_unpcklps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1184 DUMP_RR( dst
, src
);
1185 emit_2ub( p
, X86_TWOB
, 0x14 );
1186 emit_modrm( p
, dst
, src
);
1189 void sse_cmpps( struct x86_function
*p
,
1194 DUMP_RRI( dst
, src
, cc
);
1195 emit_2ub(p
, X86_TWOB
, 0xC2);
1196 emit_modrm(p
, dst
, src
);
1200 void sse_pmovmskb( struct x86_function
*p
,
1204 DUMP_RR( dst
, src
);
1205 emit_3ub(p
, 0x66, X86_TWOB
, 0xD7);
1206 emit_modrm(p
, dst
, src
);
1209 void sse_movmskps( struct x86_function
*p
,
1213 DUMP_RR( dst
, src
);
1214 emit_2ub(p
, X86_TWOB
, 0x50);
1215 emit_modrm(p
, dst
, src
);
1218 /***********************************************************************
1222 void sse2_movd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1225 emit_2ub(p
, 0x66, 0x0f);
1226 if(dst
.mod
== mod_REG
&& dst
.file
== file_REG32
)
1229 emit_modrm(p
, src
, dst
);
1233 emit_op_modrm(p
, 0x6e, 0x7e, dst
, src
);
1237 void sse2_movq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1242 emit_3ub(p
, 0xf3, 0x0f, 0x7e);
1243 emit_modrm(p
, dst
, src
);
1248 assert(src
.mod
== mod_REG
);
1249 emit_3ub(p
, 0x66, 0x0f, 0xd6);
1250 emit_modrm(p
, src
, dst
);
1258 void sse2_movdqu( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1261 emit_2ub(p
, 0xf3, 0x0f);
1262 emit_op_modrm(p
, 0x6f, 0x7f, dst
, src
);
1265 void sse2_movdqa( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1268 emit_2ub(p
, 0x66, 0x0f);
1269 emit_op_modrm(p
, 0x6f, 0x7f, dst
, src
);
1272 void sse2_movsd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1275 emit_2ub(p
, 0xf2, 0x0f);
1276 emit_op_modrm(p
, 0x10, 0x11, dst
, src
);
1279 void sse2_movupd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1282 emit_2ub(p
, 0x66, 0x0f);
1283 emit_op_modrm(p
, 0x10, 0x11, dst
, src
);
1286 void sse2_movapd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1289 emit_2ub(p
, 0x66, 0x0f);
1290 emit_op_modrm(p
, 0x28, 0x29, dst
, src
);
1294 * Perform a reduced swizzle:
1296 void sse2_pshufd( struct x86_function
*p
,
1301 DUMP_RRI( dst
, src
, shuf
);
1302 emit_3ub(p
, 0x66, X86_TWOB
, 0x70);
1303 emit_modrm(p
, dst
, src
);
1307 void sse2_pshuflw( struct x86_function
*p
,
1312 DUMP_RRI( dst
, src
, shuf
);
1313 emit_3ub(p
, 0xf2, X86_TWOB
, 0x70);
1314 emit_modrm(p
, dst
, src
);
1318 void sse2_pshufhw( struct x86_function
*p
,
1323 DUMP_RRI( dst
, src
, shuf
);
1324 emit_3ub(p
, 0xf3, X86_TWOB
, 0x70);
1325 emit_modrm(p
, dst
, src
);
1329 void sse2_cvttps2dq( struct x86_function
*p
,
1331 struct x86_reg src
)
1333 DUMP_RR( dst
, src
);
1334 emit_3ub( p
, 0xF3, X86_TWOB
, 0x5B );
1335 emit_modrm( p
, dst
, src
);
1338 void sse2_cvtps2dq( struct x86_function
*p
,
1340 struct x86_reg src
)
1342 DUMP_RR( dst
, src
);
1343 emit_3ub(p
, 0x66, X86_TWOB
, 0x5B);
1344 emit_modrm( p
, dst
, src
);
1347 void sse2_cvtsd2ss( struct x86_function
*p
,
1349 struct x86_reg src
)
1351 DUMP_RR( dst
, src
);
1352 emit_3ub(p
, 0xf2, 0x0f, 0x5a);
1353 emit_modrm( p
, dst
, src
);
1356 void sse2_cvtpd2ps( struct x86_function
*p
,
1358 struct x86_reg src
)
1360 DUMP_RR( dst
, src
);
1361 emit_3ub(p
, 0x66, 0x0f, 0x5a);
1362 emit_modrm( p
, dst
, src
);
1365 void sse2_packssdw( struct x86_function
*p
,
1367 struct x86_reg src
)
1369 DUMP_RR( dst
, src
);
1370 emit_3ub(p
, 0x66, X86_TWOB
, 0x6B);
1371 emit_modrm( p
, dst
, src
);
1374 void sse2_packsswb( struct x86_function
*p
,
1376 struct x86_reg src
)
1378 DUMP_RR( dst
, src
);
1379 emit_3ub(p
, 0x66, X86_TWOB
, 0x63);
1380 emit_modrm( p
, dst
, src
);
1383 void sse2_packuswb( struct x86_function
*p
,
1385 struct x86_reg src
)
1387 DUMP_RR( dst
, src
);
1388 emit_3ub(p
, 0x66, X86_TWOB
, 0x67);
1389 emit_modrm( p
, dst
, src
);
1392 void sse2_punpcklbw( struct x86_function
*p
,
1394 struct x86_reg src
)
1396 DUMP_RR( dst
, src
);
1397 emit_3ub(p
, 0x66, X86_TWOB
, 0x60);
1398 emit_modrm( p
, dst
, src
);
1401 void sse2_punpcklwd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1403 DUMP_RR( dst
, src
);
1404 emit_3ub(p
, 0x66, 0x0f, 0x61);
1405 emit_modrm( p
, dst
, src
);
1408 void sse2_punpckldq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1410 DUMP_RR( dst
, src
);
1411 emit_3ub(p
, 0x66, 0x0f, 0x62);
1412 emit_modrm( p
, dst
, src
);
1415 void sse2_punpcklqdq( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1417 DUMP_RR( dst
, src
);
1418 emit_3ub(p
, 0x66, 0x0f, 0x6c);
1419 emit_modrm( p
, dst
, src
);
1422 void sse2_psllw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1425 emit_3ub(p
, 0x66, 0x0f, 0x71);
1426 emit_modrm_noreg(p
, 6, dst
);
1430 void sse2_pslld_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1433 emit_3ub(p
, 0x66, 0x0f, 0x72);
1434 emit_modrm_noreg(p
, 6, dst
);
1438 void sse2_psllq_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1441 emit_3ub(p
, 0x66, 0x0f, 0x73);
1442 emit_modrm_noreg(p
, 6, dst
);
1446 void sse2_psrlw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1449 emit_3ub(p
, 0x66, 0x0f, 0x71);
1450 emit_modrm_noreg(p
, 2, dst
);
1454 void sse2_psrld_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1457 emit_3ub(p
, 0x66, 0x0f, 0x72);
1458 emit_modrm_noreg(p
, 2, dst
);
1462 void sse2_psrlq_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1465 emit_3ub(p
, 0x66, 0x0f, 0x73);
1466 emit_modrm_noreg(p
, 2, dst
);
1470 void sse2_psraw_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1473 emit_3ub(p
, 0x66, 0x0f, 0x71);
1474 emit_modrm_noreg(p
, 4, dst
);
1478 void sse2_psrad_imm( struct x86_function
*p
, struct x86_reg dst
, unsigned imm
)
1481 emit_3ub(p
, 0x66, 0x0f, 0x72);
1482 emit_modrm_noreg(p
, 4, dst
);
1486 void sse2_por( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1489 emit_3ub(p
, 0x66, 0x0f, 0xeb);
1490 emit_modrm(p
, dst
, src
);
1493 void sse2_rcpps( struct x86_function
*p
,
1495 struct x86_reg src
)
1497 DUMP_RR( dst
, src
);
1498 emit_2ub(p
, X86_TWOB
, 0x53);
1499 emit_modrm( p
, dst
, src
);
1502 void sse2_rcpss( struct x86_function
*p
,
1504 struct x86_reg src
)
1506 DUMP_RR( dst
, src
);
1507 emit_3ub(p
, 0xF3, X86_TWOB
, 0x53);
1508 emit_modrm( p
, dst
, src
);
1511 /***********************************************************************
1514 static void note_x87_pop( struct x86_function
*p
)
1517 assert(p
->x87_stack
>= 0);
1520 static void note_x87_push( struct x86_function
*p
)
1523 assert(p
->x87_stack
<= 7);
1526 void x87_assert_stack_empty( struct x86_function
*p
)
1528 assert (p
->x87_stack
== 0);
1532 void x87_fist( struct x86_function
*p
, struct x86_reg dst
)
1536 emit_modrm_noreg(p
, 2, dst
);
1539 void x87_fistp( struct x86_function
*p
, struct x86_reg dst
)
1543 emit_modrm_noreg(p
, 3, dst
);
1547 void x87_fild( struct x86_function
*p
, struct x86_reg arg
)
1551 emit_modrm_noreg(p
, 0, arg
);
1555 void x87_fldz( struct x86_function
*p
)
1558 emit_2ub(p
, 0xd9, 0xee);
1563 void x87_fldcw( struct x86_function
*p
, struct x86_reg arg
)
1566 assert(arg
.file
== file_REG32
);
1567 assert(arg
.mod
!= mod_REG
);
1569 emit_modrm_noreg(p
, 5, arg
);
1572 void x87_fld1( struct x86_function
*p
)
1575 emit_2ub(p
, 0xd9, 0xe8);
1579 void x87_fldl2e( struct x86_function
*p
)
1582 emit_2ub(p
, 0xd9, 0xea);
1586 void x87_fldln2( struct x86_function
*p
)
1589 emit_2ub(p
, 0xd9, 0xed);
1593 void x87_fwait( struct x86_function
*p
)
1599 void x87_fnclex( struct x86_function
*p
)
1602 emit_2ub(p
, 0xdb, 0xe2);
1605 void x87_fclex( struct x86_function
*p
)
1611 void x87_fcmovb( struct x86_function
*p
, struct x86_reg arg
)
1614 assert(arg
.file
== file_x87
);
1615 emit_2ub(p
, 0xda, 0xc0+arg
.idx
);
1618 void x87_fcmove( struct x86_function
*p
, struct x86_reg arg
)
1621 assert(arg
.file
== file_x87
);
1622 emit_2ub(p
, 0xda, 0xc8+arg
.idx
);
1625 void x87_fcmovbe( struct x86_function
*p
, struct x86_reg arg
)
1628 assert(arg
.file
== file_x87
);
1629 emit_2ub(p
, 0xda, 0xd0+arg
.idx
);
1632 void x87_fcmovnb( struct x86_function
*p
, struct x86_reg arg
)
1635 assert(arg
.file
== file_x87
);
1636 emit_2ub(p
, 0xdb, 0xc0+arg
.idx
);
1639 void x87_fcmovne( struct x86_function
*p
, struct x86_reg arg
)
1642 assert(arg
.file
== file_x87
);
1643 emit_2ub(p
, 0xdb, 0xc8+arg
.idx
);
1646 void x87_fcmovnbe( struct x86_function
*p
, struct x86_reg arg
)
1649 assert(arg
.file
== file_x87
);
1650 emit_2ub(p
, 0xdb, 0xd0+arg
.idx
);
1655 static void x87_arith_op( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg arg
,
1656 unsigned char dst0ub0
,
1657 unsigned char dst0ub1
,
1658 unsigned char arg0ub0
,
1659 unsigned char arg0ub1
,
1660 unsigned char argmem_noreg
)
1662 assert(dst
.file
== file_x87
);
1664 if (arg
.file
== file_x87
) {
1666 emit_2ub(p
, dst0ub0
, dst0ub1
+arg
.idx
);
1667 else if (arg
.idx
== 0)
1668 emit_2ub(p
, arg0ub0
, arg0ub1
+arg
.idx
);
1672 else if (dst
.idx
== 0) {
1673 assert(arg
.file
== file_REG32
);
1675 emit_modrm_noreg(p
, argmem_noreg
, arg
);
1681 void x87_fmul( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1683 DUMP_RR( dst
, src
);
1684 x87_arith_op(p
, dst
, src
,
1690 void x87_fsub( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1692 DUMP_RR( dst
, src
);
1693 x87_arith_op(p
, dst
, src
,
1699 void x87_fsubr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1701 DUMP_RR( dst
, src
);
1702 x87_arith_op(p
, dst
, src
,
1708 void x87_fadd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1710 DUMP_RR( dst
, src
);
1711 x87_arith_op(p
, dst
, src
,
1717 void x87_fdiv( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1719 DUMP_RR( dst
, src
);
1720 x87_arith_op(p
, dst
, src
,
1726 void x87_fdivr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1728 DUMP_RR( dst
, src
);
1729 x87_arith_op(p
, dst
, src
,
1735 void x87_fmulp( struct x86_function
*p
, struct x86_reg dst
)
1738 assert(dst
.file
== file_x87
);
1739 assert(dst
.idx
>= 1);
1740 emit_2ub(p
, 0xde, 0xc8+dst
.idx
);
1744 void x87_fsubp( struct x86_function
*p
, struct x86_reg dst
)
1747 assert(dst
.file
== file_x87
);
1748 assert(dst
.idx
>= 1);
1749 emit_2ub(p
, 0xde, 0xe8+dst
.idx
);
1753 void x87_fsubrp( struct x86_function
*p
, struct x86_reg dst
)
1756 assert(dst
.file
== file_x87
);
1757 assert(dst
.idx
>= 1);
1758 emit_2ub(p
, 0xde, 0xe0+dst
.idx
);
1762 void x87_faddp( struct x86_function
*p
, struct x86_reg dst
)
1765 assert(dst
.file
== file_x87
);
1766 assert(dst
.idx
>= 1);
1767 emit_2ub(p
, 0xde, 0xc0+dst
.idx
);
1771 void x87_fdivp( struct x86_function
*p
, struct x86_reg dst
)
1774 assert(dst
.file
== file_x87
);
1775 assert(dst
.idx
>= 1);
1776 emit_2ub(p
, 0xde, 0xf8+dst
.idx
);
1780 void x87_fdivrp( struct x86_function
*p
, struct x86_reg dst
)
1783 assert(dst
.file
== file_x87
);
1784 assert(dst
.idx
>= 1);
1785 emit_2ub(p
, 0xde, 0xf0+dst
.idx
);
1789 void x87_ftst( struct x86_function
*p
)
1792 emit_2ub(p
, 0xd9, 0xe4);
1795 void x87_fucom( struct x86_function
*p
, struct x86_reg arg
)
1798 assert(arg
.file
== file_x87
);
1799 emit_2ub(p
, 0xdd, 0xe0+arg
.idx
);
1802 void x87_fucomp( struct x86_function
*p
, struct x86_reg arg
)
1805 assert(arg
.file
== file_x87
);
1806 emit_2ub(p
, 0xdd, 0xe8+arg
.idx
);
1810 void x87_fucompp( struct x86_function
*p
)
1813 emit_2ub(p
, 0xda, 0xe9);
1814 note_x87_pop(p
); /* pop twice */
1815 note_x87_pop(p
); /* pop twice */
1818 void x87_fxch( struct x86_function
*p
, struct x86_reg arg
)
1821 assert(arg
.file
== file_x87
);
1822 emit_2ub(p
, 0xd9, 0xc8+arg
.idx
);
1825 void x87_fabs( struct x86_function
*p
)
1828 emit_2ub(p
, 0xd9, 0xe1);
1831 void x87_fchs( struct x86_function
*p
)
1834 emit_2ub(p
, 0xd9, 0xe0);
1837 void x87_fcos( struct x86_function
*p
)
1840 emit_2ub(p
, 0xd9, 0xff);
1844 void x87_fprndint( struct x86_function
*p
)
1847 emit_2ub(p
, 0xd9, 0xfc);
1850 void x87_fscale( struct x86_function
*p
)
1853 emit_2ub(p
, 0xd9, 0xfd);
1856 void x87_fsin( struct x86_function
*p
)
1859 emit_2ub(p
, 0xd9, 0xfe);
1862 void x87_fsincos( struct x86_function
*p
)
1865 emit_2ub(p
, 0xd9, 0xfb);
1868 void x87_fsqrt( struct x86_function
*p
)
1871 emit_2ub(p
, 0xd9, 0xfa);
1874 void x87_fxtract( struct x86_function
*p
)
1877 emit_2ub(p
, 0xd9, 0xf4);
1882 * Restrictions: -1.0 <= st0 <= 1.0
1884 void x87_f2xm1( struct x86_function
*p
)
1887 emit_2ub(p
, 0xd9, 0xf0);
1890 /* st1 = st1 * log2(st0);
1893 void x87_fyl2x( struct x86_function
*p
)
1896 emit_2ub(p
, 0xd9, 0xf1);
1900 /* st1 = st1 * log2(st0 + 1.0);
1903 * A fast operation, with restrictions: -.29 < st0 < .29
1905 void x87_fyl2xp1( struct x86_function
*p
)
1908 emit_2ub(p
, 0xd9, 0xf9);
1913 void x87_fld( struct x86_function
*p
, struct x86_reg arg
)
1916 if (arg
.file
== file_x87
)
1917 emit_2ub(p
, 0xd9, 0xc0 + arg
.idx
);
1920 emit_modrm_noreg(p
, 0, arg
);
1925 void x87_fst( struct x86_function
*p
, struct x86_reg dst
)
1928 if (dst
.file
== file_x87
)
1929 emit_2ub(p
, 0xdd, 0xd0 + dst
.idx
);
1932 emit_modrm_noreg(p
, 2, dst
);
1936 void x87_fstp( struct x86_function
*p
, struct x86_reg dst
)
1939 if (dst
.file
== file_x87
)
1940 emit_2ub(p
, 0xdd, 0xd8 + dst
.idx
);
1943 emit_modrm_noreg(p
, 3, dst
);
1948 void x87_fpop( struct x86_function
*p
)
1950 x87_fstp( p
, x86_make_reg( file_x87
, 0 ));
1954 void x87_fcom( struct x86_function
*p
, struct x86_reg dst
)
1957 if (dst
.file
== file_x87
)
1958 emit_2ub(p
, 0xd8, 0xd0 + dst
.idx
);
1961 emit_modrm_noreg(p
, 2, dst
);
1966 void x87_fcomp( struct x86_function
*p
, struct x86_reg dst
)
1969 if (dst
.file
== file_x87
)
1970 emit_2ub(p
, 0xd8, 0xd8 + dst
.idx
);
1973 emit_modrm_noreg(p
, 3, dst
);
1978 void x87_fcomi( struct x86_function
*p
, struct x86_reg arg
)
1981 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1984 void x87_fcomip( struct x86_function
*p
, struct x86_reg arg
)
1987 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1992 void x87_fnstsw( struct x86_function
*p
, struct x86_reg dst
)
1995 assert(dst
.file
== file_REG32
);
1997 if (dst
.idx
== reg_AX
&&
1999 emit_2ub(p
, 0xdf, 0xe0);
2002 emit_modrm_noreg(p
, 7, dst
);
2007 void x87_fnstcw( struct x86_function
*p
, struct x86_reg dst
)
2010 assert(dst
.file
== file_REG32
);
2012 emit_1ub(p
, 0x9b); /* WAIT -- needed? */
2014 emit_modrm_noreg(p
, 7, dst
);
2020 /***********************************************************************
2024 void mmx_emms( struct x86_function
*p
)
2027 assert(p
->need_emms
);
2028 emit_2ub(p
, 0x0f, 0x77);
2032 void mmx_packssdw( struct x86_function
*p
,
2034 struct x86_reg src
)
2036 DUMP_RR( dst
, src
);
2037 assert(dst
.file
== file_MMX
&&
2038 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
2042 emit_2ub(p
, X86_TWOB
, 0x6b);
2043 emit_modrm( p
, dst
, src
);
2046 void mmx_packuswb( struct x86_function
*p
,
2048 struct x86_reg src
)
2050 DUMP_RR( dst
, src
);
2051 assert(dst
.file
== file_MMX
&&
2052 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
2056 emit_2ub(p
, X86_TWOB
, 0x67);
2057 emit_modrm( p
, dst
, src
);
2060 void mmx_movd( struct x86_function
*p
,
2062 struct x86_reg src
)
2064 DUMP_RR( dst
, src
);
2066 emit_1ub(p
, X86_TWOB
);
2067 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
2070 void mmx_movq( struct x86_function
*p
,
2072 struct x86_reg src
)
2074 DUMP_RR( dst
, src
);
2076 emit_1ub(p
, X86_TWOB
);
2077 emit_op_modrm( p
, 0x6f, 0x7f, dst
, src
);
2081 /***********************************************************************
2086 void x86_cdecl_caller_push_regs( struct x86_function
*p
)
2088 x86_push(p
, x86_make_reg(file_REG32
, reg_AX
));
2089 x86_push(p
, x86_make_reg(file_REG32
, reg_CX
));
2090 x86_push(p
, x86_make_reg(file_REG32
, reg_DX
));
2093 void x86_cdecl_caller_pop_regs( struct x86_function
*p
)
2095 x86_pop(p
, x86_make_reg(file_REG32
, reg_DX
));
2096 x86_pop(p
, x86_make_reg(file_REG32
, reg_CX
));
2097 x86_pop(p
, x86_make_reg(file_REG32
, reg_AX
));
2101 struct x86_reg
x86_fn_arg( struct x86_function
*p
,
2104 switch(x86_target(p
))
2106 case X86_64_WIN64_ABI
:
2107 /* Microsoft uses a different calling convention than the rest of the world */
2111 return x86_make_reg(file_REG32
, reg_CX
);
2113 return x86_make_reg(file_REG32
, reg_DX
);
2115 return x86_make_reg(file_REG32
, reg_R8
);
2117 return x86_make_reg(file_REG32
, reg_R9
);
2119 /* Win64 allocates stack slots as if it pushed the first 4 arguments too */
2120 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2121 p
->stack_offset
+ arg
* 8);
2123 case X86_64_STD_ABI
:
2127 return x86_make_reg(file_REG32
, reg_DI
);
2129 return x86_make_reg(file_REG32
, reg_SI
);
2131 return x86_make_reg(file_REG32
, reg_DX
);
2133 return x86_make_reg(file_REG32
, reg_CX
);
2135 return x86_make_reg(file_REG32
, reg_R8
);
2137 return x86_make_reg(file_REG32
, reg_R9
);
2139 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2140 p
->stack_offset
+ (arg
- 6) * 8); /* ??? */
2143 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
2144 p
->stack_offset
+ arg
* 4); /* ??? */
2146 assert(0 && "Unexpected x86 target ABI in x86_fn_arg");
2147 return x86_make_reg(file_REG32
, reg_CX
); /* not used / silence warning */
2151 static void x86_init_func_common( struct x86_function
*p
)
2155 if(util_cpu_caps
.has_mmx
)
2157 if(util_cpu_caps
.has_mmx2
)
2158 p
->caps
|= X86_MMX2
;
2159 if(util_cpu_caps
.has_sse
)
2161 if(util_cpu_caps
.has_sse2
)
2162 p
->caps
|= X86_SSE2
;
2163 if(util_cpu_caps
.has_sse3
)
2164 p
->caps
|= X86_SSE3
;
2165 if(util_cpu_caps
.has_sse4_1
)
2166 p
->caps
|= X86_SSE4_1
;
2171 void x86_init_func( struct x86_function
*p
)
2175 x86_init_func_common(p
);
2178 void x86_init_func_size( struct x86_function
*p
, unsigned code_size
)
2180 p
->size
= code_size
;
2181 p
->store
= rtasm_exec_malloc(code_size
);
2182 if (p
->store
== NULL
) {
2183 p
->store
= p
->error_overflow
;
2185 x86_init_func_common(p
);
2188 void x86_release_func( struct x86_function
*p
)
2190 if (p
->store
&& p
->store
!= p
->error_overflow
)
2191 rtasm_exec_free(p
->store
);
2199 static INLINE x86_func
2200 voidptr_to_x86_func(void *v
)
2206 assert(sizeof(u
.v
) == sizeof(u
.f
));
2212 x86_func
x86_get_func( struct x86_function
*p
)
2215 if (DISASSEM
&& p
->store
)
2216 debug_printf("disassemble %p %p\n", p
->store
, p
->csr
);
2218 if (p
->store
== p
->error_overflow
)
2219 return voidptr_to_x86_func(NULL
);
2221 return voidptr_to_x86_func(p
->store
);
2226 void x86sse_dummy( void );
2228 void x86sse_dummy( void )