1 /**************************************************************************
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
19 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 **************************************************************************/
24 #if defined(__i386__) || defined(__386__) || defined(i386)
26 #include "pipe/p_compiler.h"
27 #include "pipe/p_debug.h"
28 #include "pipe/p_pointer.h"
30 #include "rtasm_execmem.h"
31 #include "rtasm_x86sse.h"
40 void x86_print_reg( struct x86_reg reg
)
42 if (reg
.mod
!= mod_REG
)
48 case reg_AX
: debug_printf( "EAX" ); break;
49 case reg_CX
: debug_printf( "ECX" ); break;
50 case reg_DX
: debug_printf( "EDX" ); break;
51 case reg_BX
: debug_printf( "EBX" ); break;
52 case reg_SP
: debug_printf( "ESP" ); break;
53 case reg_BP
: debug_printf( "EBP" ); break;
54 case reg_SI
: debug_printf( "ESI" ); break;
55 case reg_DI
: debug_printf( "EDI" ); break;
59 debug_printf( "MMX%u", reg
.idx
);
62 debug_printf( "XMM%u", reg
.idx
);
65 debug_printf( "fp%u", reg
.idx
);
69 if (reg
.mod
== mod_DISP8
||
70 reg
.mod
== mod_DISP32
)
71 debug_printf("+%d", reg
.disp
);
73 if (reg
.mod
!= mod_REG
)
79 #define DUMP_START() debug_printf( "\n" )
80 #define DUMP_END() debug_printf( "\n" )
83 const char *foo = __FUNCTION__; \
84 while (*foo && *foo != '_') \
88 debug_printf( "\n% 4x% 15s ", p->csr - p->store, foo ); \
91 #define DUMP_I( I ) do { \
93 debug_printf( "%u", I ); \
96 #define DUMP_R( R0 ) do { \
98 x86_print_reg( R0 ); \
101 #define DUMP_RR( R0, R1 ) do { \
103 x86_print_reg( R0 ); \
104 debug_printf( ", " ); \
105 x86_print_reg( R1 ); \
108 #define DUMP_RI( R0, I ) do { \
110 x86_print_reg( R0 ); \
111 debug_printf( ", %u", I ); \
114 #define DUMP_RRI( R0, R1, I ) do { \
116 x86_print_reg( R0 ); \
117 debug_printf( ", " ); \
118 x86_print_reg( R1 ); \
119 debug_printf( ", %u", I ); \
129 #define DUMP_RR( R0, R1 )
130 #define DUMP_RI( R0, I )
131 #define DUMP_RRI( R0, R1, I )
136 static void do_realloc( struct x86_function
*p
)
138 if (p
->store
== p
->error_overflow
) {
141 else if (p
->size
== 0) {
143 p
->store
= rtasm_exec_malloc(p
->size
);
147 uintptr_t used
= pointer_to_uintptr( p
->csr
) - pointer_to_uintptr( p
->store
);
148 unsigned char *tmp
= p
->store
;
150 p
->store
= rtasm_exec_malloc(p
->size
);
153 memcpy(p
->store
, tmp
, used
);
154 p
->csr
= p
->store
+ used
;
160 rtasm_exec_free(tmp
);
163 if (p
->store
== NULL
) {
164 p
->store
= p
->csr
= p
->error_overflow
;
165 p
->size
= sizeof(p
->error_overflow
);
169 /* Emit bytes to the instruction stream:
171 static unsigned char *reserve( struct x86_function
*p
, int bytes
)
173 if (p
->csr
+ bytes
- p
->store
> (int) p
->size
)
177 unsigned char *csr
= p
->csr
;
185 static void emit_1b( struct x86_function
*p
, char b0
)
187 char *csr
= (char *)reserve(p
, 1);
191 static void emit_1i( struct x86_function
*p
, int i0
)
193 int *icsr
= (int *)reserve(p
, sizeof(i0
));
197 static void emit_1ub( struct x86_function
*p
, unsigned char b0
)
199 unsigned char *csr
= reserve(p
, 1);
203 static void emit_2ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
)
205 unsigned char *csr
= reserve(p
, 2);
210 static void emit_3ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
, unsigned char b2
)
212 unsigned char *csr
= reserve(p
, 3);
219 /* Build a modRM byte + possible displacement. No treatment of SIB
220 * indexing. BZZT - no way to encode an absolute address.
222 * This is the "/r" field in the x86 manuals...
224 static void emit_modrm( struct x86_function
*p
,
226 struct x86_reg regmem
)
228 unsigned char val
= 0;
230 assert(reg
.mod
== mod_REG
);
232 val
|= regmem
.mod
<< 6; /* mod field */
233 val
|= reg
.idx
<< 3; /* reg field */
234 val
|= regmem
.idx
; /* r/m field */
238 /* Oh-oh we've stumbled into the SIB thing.
240 if (regmem
.file
== file_REG32
&&
241 regmem
.idx
== reg_SP
) {
242 emit_1ub(p
, 0x24); /* simplistic! */
245 switch (regmem
.mod
) {
250 emit_1b(p
, (char) regmem
.disp
);
253 emit_1i(p
, regmem
.disp
);
261 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
263 static void emit_modrm_noreg( struct x86_function
*p
,
265 struct x86_reg regmem
)
267 struct x86_reg dummy
= x86_make_reg(file_REG32
, op
);
268 emit_modrm(p
, dummy
, regmem
);
271 /* Many x86 instructions have two opcodes to cope with the situations
272 * where the destination is a register or memory reference
273 * respectively. This function selects the correct opcode based on
274 * the arguments presented.
276 static void emit_op_modrm( struct x86_function
*p
,
277 unsigned char op_dst_is_reg
,
278 unsigned char op_dst_is_mem
,
284 emit_1ub(p
, op_dst_is_reg
);
285 emit_modrm(p
, dst
, src
);
290 assert(src
.mod
== mod_REG
);
291 emit_1ub(p
, op_dst_is_mem
);
292 emit_modrm(p
, src
, dst
);
306 /* Create and manipulate registers and regmem values:
308 struct x86_reg
x86_make_reg( enum x86_reg_file file
,
309 enum x86_reg_name idx
)
321 struct x86_reg
x86_make_disp( struct x86_reg reg
,
324 assert(reg
.file
== file_REG32
);
326 if (reg
.mod
== mod_REG
)
331 if (reg
.disp
== 0 && reg
.idx
!= reg_BP
)
332 reg
.mod
= mod_INDIRECT
;
333 else if (reg
.disp
<= 127 && reg
.disp
>= -128)
336 reg
.mod
= mod_DISP32
;
341 struct x86_reg
x86_deref( struct x86_reg reg
)
343 return x86_make_disp(reg
, 0);
346 struct x86_reg
x86_get_base_reg( struct x86_reg reg
)
348 return x86_make_reg( reg
.file
, reg
.idx
);
351 int x86_get_label( struct x86_function
*p
)
353 return p
->csr
- p
->store
;
358 /***********************************************************************
363 void x86_jcc( struct x86_function
*p
,
367 int offset
= label
- (x86_get_label(p
) + 2);
371 assert(p
->csr
- p
->store
> -offset
);
374 if (offset
<= 127 && offset
>= -128) {
375 emit_1ub(p
, 0x70 + cc
);
376 emit_1b(p
, (char) offset
);
379 offset
= label
- (x86_get_label(p
) + 6);
380 emit_2ub(p
, 0x0f, 0x80 + cc
);
385 /* Always use a 32bit offset for forward jumps:
387 int x86_jcc_forward( struct x86_function
*p
,
391 emit_2ub(p
, 0x0f, 0x80 + cc
);
393 return x86_get_label(p
);
396 int x86_jmp_forward( struct x86_function
*p
)
401 return x86_get_label(p
);
404 int x86_call_forward( struct x86_function
*p
)
410 return x86_get_label(p
);
413 /* Fixup offset from forward jump:
415 void x86_fixup_fwd_jump( struct x86_function
*p
,
418 *(int *)(p
->store
+ fixup
- 4) = x86_get_label(p
) - fixup
;
421 void x86_jmp( struct x86_function
*p
, int label
)
425 emit_1i(p
, label
- x86_get_label(p
) - 4);
428 void x86_call( struct x86_function
*p
, struct x86_reg reg
)
432 emit_modrm_noreg(p
, 2, reg
);
437 * Temporary. As I need immediate operands, and dont want to mess with the codegen,
438 * I load the immediate into general purpose register and use it.
440 void x86_mov_reg_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
443 assert(dst
.mod
== mod_REG
);
444 emit_1ub(p
, 0xb8 + dst
.idx
);
448 void x86_add_reg_imm8( struct x86_function
*p
, struct x86_reg dst
, ubyte imm
)
451 assert(dst
.mod
== mod_REG
);
453 emit_modrm_noreg(p
, 0, dst
);
458 void x86_push( struct x86_function
*p
,
462 if (reg
.mod
== mod_REG
)
463 emit_1ub(p
, 0x50 + reg
.idx
);
467 emit_modrm_noreg(p
, 6, reg
);
471 p
->stack_offset
+= 4;
474 void x86_push_imm32( struct x86_function
*p
,
481 p
->stack_offset
+= 4;
485 void x86_pop( struct x86_function
*p
,
489 assert(reg
.mod
== mod_REG
);
490 emit_1ub(p
, 0x58 + reg
.idx
);
491 p
->stack_offset
-= 4;
494 void x86_inc( struct x86_function
*p
,
498 assert(reg
.mod
== mod_REG
);
499 emit_1ub(p
, 0x40 + reg
.idx
);
502 void x86_dec( struct x86_function
*p
,
506 assert(reg
.mod
== mod_REG
);
507 emit_1ub(p
, 0x48 + reg
.idx
);
510 void x86_ret( struct x86_function
*p
)
513 assert(p
->stack_offset
== 0);
517 void x86_retw( struct x86_function
*p
, unsigned short imm
)
520 emit_3ub(p
, 0xc2, imm
& 0xff, (imm
>> 8) & 0xff);
523 void x86_sahf( struct x86_function
*p
)
529 void x86_mov( struct x86_function
*p
,
534 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
537 void x86_xor( struct x86_function
*p
,
542 emit_op_modrm( p
, 0x33, 0x31, dst
, src
);
545 void x86_cmp( struct x86_function
*p
,
550 emit_op_modrm( p
, 0x3b, 0x39, dst
, src
);
553 void x86_lea( struct x86_function
*p
,
559 emit_modrm( p
, dst
, src
);
562 void x86_test( struct x86_function
*p
,
568 emit_modrm( p
, dst
, src
);
571 void x86_add( struct x86_function
*p
,
576 emit_op_modrm(p
, 0x03, 0x01, dst
, src
);
579 /* Calculate EAX * src, results in EDX:EAX.
581 void x86_mul( struct x86_function
*p
,
586 emit_modrm_noreg(p
, 4, src
);
590 void x86_imul( struct x86_function
*p
,
595 emit_2ub(p
, X86_TWOB
, 0xAF);
596 emit_modrm(p
, dst
, src
);
600 void x86_sub( struct x86_function
*p
,
605 emit_op_modrm(p
, 0x2b, 0x29, dst
, src
);
608 void x86_or( struct x86_function
*p
,
613 emit_op_modrm( p
, 0x0b, 0x09, dst
, src
);
616 void x86_and( struct x86_function
*p
,
621 emit_op_modrm( p
, 0x23, 0x21, dst
, src
);
626 /***********************************************************************
631 void sse_movss( struct x86_function
*p
,
636 emit_2ub(p
, 0xF3, X86_TWOB
);
637 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
640 void sse_movaps( struct x86_function
*p
,
645 emit_1ub(p
, X86_TWOB
);
646 emit_op_modrm( p
, 0x28, 0x29, dst
, src
);
649 void sse_movups( struct x86_function
*p
,
654 emit_1ub(p
, X86_TWOB
);
655 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
658 void sse_movhps( struct x86_function
*p
,
663 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
664 emit_1ub(p
, X86_TWOB
);
665 emit_op_modrm( p
, 0x16, 0x17, dst
, src
); /* cf movlhps */
668 void sse_movlps( struct x86_function
*p
,
673 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
674 emit_1ub(p
, X86_TWOB
);
675 emit_op_modrm( p
, 0x12, 0x13, dst
, src
); /* cf movhlps */
678 void sse_maxps( struct x86_function
*p
,
683 emit_2ub(p
, X86_TWOB
, 0x5F);
684 emit_modrm( p
, dst
, src
);
687 void sse_maxss( struct x86_function
*p
,
692 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5F);
693 emit_modrm( p
, dst
, src
);
696 void sse_divss( struct x86_function
*p
,
701 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5E);
702 emit_modrm( p
, dst
, src
);
705 void sse_minps( struct x86_function
*p
,
710 emit_2ub(p
, X86_TWOB
, 0x5D);
711 emit_modrm( p
, dst
, src
);
714 void sse_subps( struct x86_function
*p
,
719 emit_2ub(p
, X86_TWOB
, 0x5C);
720 emit_modrm( p
, dst
, src
);
723 void sse_mulps( struct x86_function
*p
,
728 emit_2ub(p
, X86_TWOB
, 0x59);
729 emit_modrm( p
, dst
, src
);
732 void sse_mulss( struct x86_function
*p
,
737 emit_3ub(p
, 0xF3, X86_TWOB
, 0x59);
738 emit_modrm( p
, dst
, src
);
741 void sse_addps( struct x86_function
*p
,
746 emit_2ub(p
, X86_TWOB
, 0x58);
747 emit_modrm( p
, dst
, src
);
750 void sse_addss( struct x86_function
*p
,
755 emit_3ub(p
, 0xF3, X86_TWOB
, 0x58);
756 emit_modrm( p
, dst
, src
);
759 void sse_andnps( struct x86_function
*p
,
764 emit_2ub(p
, X86_TWOB
, 0x55);
765 emit_modrm( p
, dst
, src
);
768 void sse_andps( struct x86_function
*p
,
773 emit_2ub(p
, X86_TWOB
, 0x54);
774 emit_modrm( p
, dst
, src
);
777 void sse_rsqrtps( struct x86_function
*p
,
782 emit_2ub(p
, X86_TWOB
, 0x52);
783 emit_modrm( p
, dst
, src
);
786 void sse_rsqrtss( struct x86_function
*p
,
791 emit_3ub(p
, 0xF3, X86_TWOB
, 0x52);
792 emit_modrm( p
, dst
, src
);
796 void sse_movhlps( struct x86_function
*p
,
801 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
802 emit_2ub(p
, X86_TWOB
, 0x12);
803 emit_modrm( p
, dst
, src
);
806 void sse_movlhps( struct x86_function
*p
,
811 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
812 emit_2ub(p
, X86_TWOB
, 0x16);
813 emit_modrm( p
, dst
, src
);
816 void sse_orps( struct x86_function
*p
,
821 emit_2ub(p
, X86_TWOB
, 0x56);
822 emit_modrm( p
, dst
, src
);
825 void sse_xorps( struct x86_function
*p
,
830 emit_2ub(p
, X86_TWOB
, 0x57);
831 emit_modrm( p
, dst
, src
);
834 void sse_cvtps2pi( struct x86_function
*p
,
839 assert(dst
.file
== file_MMX
&&
840 (src
.file
== file_XMM
|| src
.mod
!= mod_REG
));
844 emit_2ub(p
, X86_TWOB
, 0x2d);
845 emit_modrm( p
, dst
, src
);
848 void sse2_cvtdq2ps( struct x86_function
*p
,
853 emit_2ub(p
, X86_TWOB
, 0x5b);
854 emit_modrm( p
, dst
, src
);
858 /* Shufps can also be used to implement a reduced swizzle when dest ==
861 void sse_shufps( struct x86_function
*p
,
866 DUMP_RRI( dst
, src
, shuf
);
867 emit_2ub(p
, X86_TWOB
, 0xC6);
868 emit_modrm(p
, dst
, src
);
872 void sse_unpckhps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
875 emit_2ub( p
, X86_TWOB
, 0x15 );
876 emit_modrm( p
, dst
, src
);
879 void sse_unpcklps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
882 emit_2ub( p
, X86_TWOB
, 0x14 );
883 emit_modrm( p
, dst
, src
);
886 void sse_cmpps( struct x86_function
*p
,
891 DUMP_RRI( dst
, src
, cc
);
892 emit_2ub(p
, X86_TWOB
, 0xC2);
893 emit_modrm(p
, dst
, src
);
897 void sse_pmovmskb( struct x86_function
*p
,
902 emit_3ub(p
, 0x66, X86_TWOB
, 0xD7);
903 emit_modrm(p
, dst
, src
);
906 /***********************************************************************
911 * Perform a reduced swizzle:
913 void sse2_pshufd( struct x86_function
*p
,
918 DUMP_RRI( dst
, src
, shuf
);
919 emit_3ub(p
, 0x66, X86_TWOB
, 0x70);
920 emit_modrm(p
, dst
, src
);
924 void sse2_cvttps2dq( struct x86_function
*p
,
929 emit_3ub( p
, 0xF3, X86_TWOB
, 0x5B );
930 emit_modrm( p
, dst
, src
);
933 void sse2_cvtps2dq( struct x86_function
*p
,
938 emit_3ub(p
, 0x66, X86_TWOB
, 0x5B);
939 emit_modrm( p
, dst
, src
);
942 void sse2_packssdw( struct x86_function
*p
,
947 emit_3ub(p
, 0x66, X86_TWOB
, 0x6B);
948 emit_modrm( p
, dst
, src
);
951 void sse2_packsswb( struct x86_function
*p
,
956 emit_3ub(p
, 0x66, X86_TWOB
, 0x63);
957 emit_modrm( p
, dst
, src
);
960 void sse2_packuswb( struct x86_function
*p
,
965 emit_3ub(p
, 0x66, X86_TWOB
, 0x67);
966 emit_modrm( p
, dst
, src
);
969 void sse2_punpcklbw( struct x86_function
*p
,
974 emit_3ub(p
, 0x66, X86_TWOB
, 0x60);
975 emit_modrm( p
, dst
, src
);
979 void sse2_rcpps( struct x86_function
*p
,
984 emit_2ub(p
, X86_TWOB
, 0x53);
985 emit_modrm( p
, dst
, src
);
988 void sse2_rcpss( struct x86_function
*p
,
993 emit_3ub(p
, 0xF3, X86_TWOB
, 0x53);
994 emit_modrm( p
, dst
, src
);
997 void sse2_movd( struct x86_function
*p
,
1001 DUMP_RR( dst
, src
);
1002 emit_2ub(p
, 0x66, X86_TWOB
);
1003 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
1009 /***********************************************************************
1012 static void note_x87_pop( struct x86_function
*p
)
1015 assert(p
->x87_stack
>= 0);
1018 static void note_x87_push( struct x86_function
*p
)
1021 assert(p
->x87_stack
<= 7);
1024 void x87_assert_stack_empty( struct x86_function
*p
)
1026 assert (p
->x87_stack
== 0);
1030 void x87_fist( struct x86_function
*p
, struct x86_reg dst
)
1034 emit_modrm_noreg(p
, 2, dst
);
1037 void x87_fistp( struct x86_function
*p
, struct x86_reg dst
)
1041 emit_modrm_noreg(p
, 3, dst
);
1045 void x87_fild( struct x86_function
*p
, struct x86_reg arg
)
1049 emit_modrm_noreg(p
, 0, arg
);
1053 void x87_fldz( struct x86_function
*p
)
1056 emit_2ub(p
, 0xd9, 0xee);
1061 void x87_fldcw( struct x86_function
*p
, struct x86_reg arg
)
1064 assert(arg
.file
== file_REG32
);
1065 assert(arg
.mod
!= mod_REG
);
1067 emit_modrm_noreg(p
, 5, arg
);
1070 void x87_fld1( struct x86_function
*p
)
1073 emit_2ub(p
, 0xd9, 0xe8);
1077 void x87_fldl2e( struct x86_function
*p
)
1080 emit_2ub(p
, 0xd9, 0xea);
1084 void x87_fldln2( struct x86_function
*p
)
1087 emit_2ub(p
, 0xd9, 0xed);
1091 void x87_fwait( struct x86_function
*p
)
1097 void x87_fnclex( struct x86_function
*p
)
1100 emit_2ub(p
, 0xdb, 0xe2);
1103 void x87_fclex( struct x86_function
*p
)
1109 void x87_fcmovb( struct x86_function
*p
, struct x86_reg arg
)
1112 assert(arg
.file
== file_x87
);
1113 emit_2ub(p
, 0xda, 0xc0+arg
.idx
);
1116 void x87_fcmove( struct x86_function
*p
, struct x86_reg arg
)
1119 assert(arg
.file
== file_x87
);
1120 emit_2ub(p
, 0xda, 0xc8+arg
.idx
);
1123 void x87_fcmovbe( struct x86_function
*p
, struct x86_reg arg
)
1126 assert(arg
.file
== file_x87
);
1127 emit_2ub(p
, 0xda, 0xd0+arg
.idx
);
1130 void x87_fcmovnb( struct x86_function
*p
, struct x86_reg arg
)
1133 assert(arg
.file
== file_x87
);
1134 emit_2ub(p
, 0xdb, 0xc0+arg
.idx
);
1137 void x87_fcmovne( struct x86_function
*p
, struct x86_reg arg
)
1140 assert(arg
.file
== file_x87
);
1141 emit_2ub(p
, 0xdb, 0xc8+arg
.idx
);
1144 void x87_fcmovnbe( struct x86_function
*p
, struct x86_reg arg
)
1147 assert(arg
.file
== file_x87
);
1148 emit_2ub(p
, 0xdb, 0xd0+arg
.idx
);
1153 static void x87_arith_op( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg arg
,
1154 unsigned char dst0ub0
,
1155 unsigned char dst0ub1
,
1156 unsigned char arg0ub0
,
1157 unsigned char arg0ub1
,
1158 unsigned char argmem_noreg
)
1160 assert(dst
.file
== file_x87
);
1162 if (arg
.file
== file_x87
) {
1164 emit_2ub(p
, dst0ub0
, dst0ub1
+arg
.idx
);
1165 else if (arg
.idx
== 0)
1166 emit_2ub(p
, arg0ub0
, arg0ub1
+arg
.idx
);
1170 else if (dst
.idx
== 0) {
1171 assert(arg
.file
== file_REG32
);
1173 emit_modrm_noreg(p
, argmem_noreg
, arg
);
1179 void x87_fmul( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1181 DUMP_RR( dst
, src
);
1182 x87_arith_op(p
, dst
, src
,
1188 void x87_fsub( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1190 DUMP_RR( dst
, src
);
1191 x87_arith_op(p
, dst
, src
,
1197 void x87_fsubr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1199 DUMP_RR( dst
, src
);
1200 x87_arith_op(p
, dst
, src
,
1206 void x87_fadd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1208 DUMP_RR( dst
, src
);
1209 x87_arith_op(p
, dst
, src
,
1215 void x87_fdiv( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1217 DUMP_RR( dst
, src
);
1218 x87_arith_op(p
, dst
, src
,
1224 void x87_fdivr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1226 DUMP_RR( dst
, src
);
1227 x87_arith_op(p
, dst
, src
,
1233 void x87_fmulp( struct x86_function
*p
, struct x86_reg dst
)
1236 assert(dst
.file
== file_x87
);
1237 assert(dst
.idx
>= 1);
1238 emit_2ub(p
, 0xde, 0xc8+dst
.idx
);
1242 void x87_fsubp( struct x86_function
*p
, struct x86_reg dst
)
1245 assert(dst
.file
== file_x87
);
1246 assert(dst
.idx
>= 1);
1247 emit_2ub(p
, 0xde, 0xe8+dst
.idx
);
1251 void x87_fsubrp( struct x86_function
*p
, struct x86_reg dst
)
1254 assert(dst
.file
== file_x87
);
1255 assert(dst
.idx
>= 1);
1256 emit_2ub(p
, 0xde, 0xe0+dst
.idx
);
1260 void x87_faddp( struct x86_function
*p
, struct x86_reg dst
)
1263 assert(dst
.file
== file_x87
);
1264 assert(dst
.idx
>= 1);
1265 emit_2ub(p
, 0xde, 0xc0+dst
.idx
);
1269 void x87_fdivp( struct x86_function
*p
, struct x86_reg dst
)
1272 assert(dst
.file
== file_x87
);
1273 assert(dst
.idx
>= 1);
1274 emit_2ub(p
, 0xde, 0xf8+dst
.idx
);
1278 void x87_fdivrp( struct x86_function
*p
, struct x86_reg dst
)
1281 assert(dst
.file
== file_x87
);
1282 assert(dst
.idx
>= 1);
1283 emit_2ub(p
, 0xde, 0xf0+dst
.idx
);
1287 void x87_ftst( struct x86_function
*p
)
1290 emit_2ub(p
, 0xd9, 0xe4);
1293 void x87_fucom( struct x86_function
*p
, struct x86_reg arg
)
1296 assert(arg
.file
== file_x87
);
1297 emit_2ub(p
, 0xdd, 0xe0+arg
.idx
);
1300 void x87_fucomp( struct x86_function
*p
, struct x86_reg arg
)
1303 assert(arg
.file
== file_x87
);
1304 emit_2ub(p
, 0xdd, 0xe8+arg
.idx
);
1308 void x87_fucompp( struct x86_function
*p
)
1311 emit_2ub(p
, 0xda, 0xe9);
1312 note_x87_pop(p
); /* pop twice */
1313 note_x87_pop(p
); /* pop twice */
1316 void x87_fxch( struct x86_function
*p
, struct x86_reg arg
)
1319 assert(arg
.file
== file_x87
);
1320 emit_2ub(p
, 0xd9, 0xc8+arg
.idx
);
1323 void x87_fabs( struct x86_function
*p
)
1326 emit_2ub(p
, 0xd9, 0xe1);
1329 void x87_fchs( struct x86_function
*p
)
1332 emit_2ub(p
, 0xd9, 0xe0);
1335 void x87_fcos( struct x86_function
*p
)
1338 emit_2ub(p
, 0xd9, 0xff);
1342 void x87_fprndint( struct x86_function
*p
)
1345 emit_2ub(p
, 0xd9, 0xfc);
1348 void x87_fscale( struct x86_function
*p
)
1351 emit_2ub(p
, 0xd9, 0xfd);
1354 void x87_fsin( struct x86_function
*p
)
1357 emit_2ub(p
, 0xd9, 0xfe);
1360 void x87_fsincos( struct x86_function
*p
)
1363 emit_2ub(p
, 0xd9, 0xfb);
1366 void x87_fsqrt( struct x86_function
*p
)
1369 emit_2ub(p
, 0xd9, 0xfa);
1372 void x87_fxtract( struct x86_function
*p
)
1375 emit_2ub(p
, 0xd9, 0xf4);
1380 * Restrictions: -1.0 <= st0 <= 1.0
1382 void x87_f2xm1( struct x86_function
*p
)
1385 emit_2ub(p
, 0xd9, 0xf0);
1388 /* st1 = st1 * log2(st0);
1391 void x87_fyl2x( struct x86_function
*p
)
1394 emit_2ub(p
, 0xd9, 0xf1);
1398 /* st1 = st1 * log2(st0 + 1.0);
1401 * A fast operation, with restrictions: -.29 < st0 < .29
1403 void x87_fyl2xp1( struct x86_function
*p
)
1406 emit_2ub(p
, 0xd9, 0xf9);
1411 void x87_fld( struct x86_function
*p
, struct x86_reg arg
)
1414 if (arg
.file
== file_x87
)
1415 emit_2ub(p
, 0xd9, 0xc0 + arg
.idx
);
1418 emit_modrm_noreg(p
, 0, arg
);
1423 void x87_fst( struct x86_function
*p
, struct x86_reg dst
)
1426 if (dst
.file
== file_x87
)
1427 emit_2ub(p
, 0xdd, 0xd0 + dst
.idx
);
1430 emit_modrm_noreg(p
, 2, dst
);
1434 void x87_fstp( struct x86_function
*p
, struct x86_reg dst
)
1437 if (dst
.file
== file_x87
)
1438 emit_2ub(p
, 0xdd, 0xd8 + dst
.idx
);
1441 emit_modrm_noreg(p
, 3, dst
);
1446 void x87_fpop( struct x86_function
*p
)
1448 x87_fstp( p
, x86_make_reg( file_x87
, 0 ));
1452 void x87_fcom( struct x86_function
*p
, struct x86_reg dst
)
1455 if (dst
.file
== file_x87
)
1456 emit_2ub(p
, 0xd8, 0xd0 + dst
.idx
);
1459 emit_modrm_noreg(p
, 2, dst
);
1464 void x87_fcomp( struct x86_function
*p
, struct x86_reg dst
)
1467 if (dst
.file
== file_x87
)
1468 emit_2ub(p
, 0xd8, 0xd8 + dst
.idx
);
1471 emit_modrm_noreg(p
, 3, dst
);
1476 void x87_fcomi( struct x86_function
*p
, struct x86_reg arg
)
1479 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1482 void x87_fcomip( struct x86_function
*p
, struct x86_reg arg
)
1485 emit_2ub(p
, 0xdb, 0xf0+arg
.idx
);
1490 void x87_fnstsw( struct x86_function
*p
, struct x86_reg dst
)
1493 assert(dst
.file
== file_REG32
);
1495 if (dst
.idx
== reg_AX
&&
1497 emit_2ub(p
, 0xdf, 0xe0);
1500 emit_modrm_noreg(p
, 7, dst
);
1505 void x87_fnstcw( struct x86_function
*p
, struct x86_reg dst
)
1508 assert(dst
.file
== file_REG32
);
1510 emit_1ub(p
, 0x9b); /* WAIT -- needed? */
1512 emit_modrm_noreg(p
, 7, dst
);
1518 /***********************************************************************
1522 void mmx_emms( struct x86_function
*p
)
1525 assert(p
->need_emms
);
1526 emit_2ub(p
, 0x0f, 0x77);
1530 void mmx_packssdw( struct x86_function
*p
,
1532 struct x86_reg src
)
1534 DUMP_RR( dst
, src
);
1535 assert(dst
.file
== file_MMX
&&
1536 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1540 emit_2ub(p
, X86_TWOB
, 0x6b);
1541 emit_modrm( p
, dst
, src
);
1544 void mmx_packuswb( struct x86_function
*p
,
1546 struct x86_reg src
)
1548 DUMP_RR( dst
, src
);
1549 assert(dst
.file
== file_MMX
&&
1550 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1554 emit_2ub(p
, X86_TWOB
, 0x67);
1555 emit_modrm( p
, dst
, src
);
1558 void mmx_movd( struct x86_function
*p
,
1560 struct x86_reg src
)
1562 DUMP_RR( dst
, src
);
1564 emit_1ub(p
, X86_TWOB
);
1565 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
1568 void mmx_movq( struct x86_function
*p
,
1570 struct x86_reg src
)
1572 DUMP_RR( dst
, src
);
1574 emit_1ub(p
, X86_TWOB
);
1575 emit_op_modrm( p
, 0x6f, 0x7f, dst
, src
);
1579 /***********************************************************************
1584 void x86_cdecl_caller_push_regs( struct x86_function
*p
)
1586 x86_push(p
, x86_make_reg(file_REG32
, reg_AX
));
1587 x86_push(p
, x86_make_reg(file_REG32
, reg_CX
));
1588 x86_push(p
, x86_make_reg(file_REG32
, reg_DX
));
1591 void x86_cdecl_caller_pop_regs( struct x86_function
*p
)
1593 x86_pop(p
, x86_make_reg(file_REG32
, reg_DX
));
1594 x86_pop(p
, x86_make_reg(file_REG32
, reg_CX
));
1595 x86_pop(p
, x86_make_reg(file_REG32
, reg_AX
));
1599 /* Retreive a reference to one of the function arguments, taking into
1600 * account any push/pop activity:
1602 struct x86_reg
x86_fn_arg( struct x86_function
*p
,
1605 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
1606 p
->stack_offset
+ arg
* 4); /* ??? */
1610 void x86_init_func( struct x86_function
*p
)
1618 void x86_init_func_size( struct x86_function
*p
, unsigned code_size
)
1620 p
->size
= code_size
;
1621 p
->store
= rtasm_exec_malloc(code_size
);
1622 if (p
->store
== NULL
) {
1623 p
->store
= p
->error_overflow
;
1629 void x86_release_func( struct x86_function
*p
)
1631 if (p
->store
&& p
->store
!= p
->error_overflow
)
1632 rtasm_exec_free(p
->store
);
1640 void (*x86_get_func( struct x86_function
*p
))(void)
1643 if (DISASSEM
&& p
->store
)
1644 debug_printf("disassemble %p %p\n", p
->store
, p
->csr
);
1646 if (p
->store
== p
->error_overflow
)
1647 return (void (*)(void)) NULL
;
1649 return (void (*)(void)) p
->store
;
1654 void x86sse_dummy( void )