1 /**************************************************************************
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
19 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 **************************************************************************/
24 #if defined(__i386__) || defined(__386__) || defined(i386)
26 #include "pipe/p_compiler.h"
27 #include "pipe/p_debug.h"
28 #include "pipe/p_pointer.h"
30 #include "rtasm_execmem.h"
31 #include "rtasm_x86sse.h"
45 if (reg
.mod
!= mod_REG
)
51 case reg_AX
: debug_printf( "EAX" ); break;
52 case reg_CX
: debug_printf( "ECX" ); break;
53 case reg_DX
: debug_printf( "EDX" ); break;
54 case reg_BX
: debug_printf( "EBX" ); break;
55 case reg_SP
: debug_printf( "ESP" ); break;
56 case reg_BP
: debug_printf( "EBP" ); break;
57 case reg_SI
: debug_printf( "ESI" ); break;
58 case reg_DI
: debug_printf( "EDI" ); break;
62 debug_printf( "MMX%u", reg
.idx
);
65 debug_printf( "XMM%u", reg
.idx
);
68 debug_printf( "fp%u", reg
.idx
);
72 if (reg
.mod
== mod_DISP8
||
73 reg
.mod
== mod_DISP32
)
74 debug_printf("+%d", reg
.disp
);
76 if (reg
.mod
!= mod_REG
)
81 #define DUMP_START() debug_printf( "\n" )
82 #define DUMP_END() debug_printf( "\n" )
85 const char *foo = __FUNCTION__; \
86 while (*foo && *foo != '_') \
90 debug_printf( "\n% 15s ", foo ); \
93 #define DUMP_I( I ) do { \
95 debug_printf( "%u", I ); \
98 #define DUMP_R( R0 ) do { \
103 #define DUMP_RR( R0, R1 ) do { \
106 debug_printf( ", " ); \
110 #define DUMP_RI( R0, I ) do { \
113 debug_printf( ", %u", I ); \
116 #define DUMP_RRI( R0, R1, I ) do { \
119 debug_printf( ", " ); \
121 debug_printf( ", %u", I ); \
131 #define DUMP_RR( R0, R1 )
132 #define DUMP_RI( R0, I )
133 #define DUMP_RRI( R0, R1, I )
138 static void do_realloc( struct x86_function
*p
)
140 if (p
->store
== p
->error_overflow
) {
143 else if (p
->size
== 0) {
145 p
->store
= rtasm_exec_malloc(p
->size
);
149 uintptr_t used
= pointer_to_uintptr( p
->csr
) - pointer_to_uintptr( p
->store
);
150 unsigned char *tmp
= p
->store
;
152 p
->store
= rtasm_exec_malloc(p
->size
);
155 memcpy(p
->store
, tmp
, used
);
156 p
->csr
= p
->store
+ used
;
162 rtasm_exec_free(tmp
);
165 if (p
->store
== NULL
) {
166 p
->store
= p
->csr
= p
->error_overflow
;
167 p
->size
= sizeof(p
->error_overflow
);
171 /* Emit bytes to the instruction stream:
173 static unsigned char *reserve( struct x86_function
*p
, int bytes
)
175 if (p
->csr
+ bytes
- p
->store
> (int) p
->size
)
179 unsigned char *csr
= p
->csr
;
187 static void emit_1b( struct x86_function
*p
, char b0
)
189 char *csr
= (char *)reserve(p
, 1);
193 static void emit_1i( struct x86_function
*p
, int i0
)
195 int *icsr
= (int *)reserve(p
, sizeof(i0
));
199 static void emit_1ub( struct x86_function
*p
, unsigned char b0
)
201 unsigned char *csr
= reserve(p
, 1);
205 static void emit_2ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
)
207 unsigned char *csr
= reserve(p
, 2);
212 static void emit_3ub( struct x86_function
*p
, unsigned char b0
, unsigned char b1
, unsigned char b2
)
214 unsigned char *csr
= reserve(p
, 3);
221 /* Build a modRM byte + possible displacement. No treatment of SIB
222 * indexing. BZZT - no way to encode an absolute address.
224 static void emit_modrm( struct x86_function
*p
,
226 struct x86_reg regmem
)
228 unsigned char val
= 0;
230 assert(reg
.mod
== mod_REG
);
232 val
|= regmem
.mod
<< 6; /* mod field */
233 val
|= reg
.idx
<< 3; /* reg field */
234 val
|= regmem
.idx
; /* r/m field */
238 /* Oh-oh we've stumbled into the SIB thing.
240 if (regmem
.file
== file_REG32
&&
241 regmem
.idx
== reg_SP
) {
242 emit_1ub(p
, 0x24); /* simplistic! */
245 switch (regmem
.mod
) {
250 emit_1b(p
, (char) regmem
.disp
);
253 emit_1i(p
, regmem
.disp
);
262 static void emit_modrm_noreg( struct x86_function
*p
,
264 struct x86_reg regmem
)
266 struct x86_reg dummy
= x86_make_reg(file_REG32
, op
);
267 emit_modrm(p
, dummy
, regmem
);
270 /* Many x86 instructions have two opcodes to cope with the situations
271 * where the destination is a register or memory reference
272 * respectively. This function selects the correct opcode based on
273 * the arguments presented.
275 static void emit_op_modrm( struct x86_function
*p
,
276 unsigned char op_dst_is_reg
,
277 unsigned char op_dst_is_mem
,
283 emit_1ub(p
, op_dst_is_reg
);
284 emit_modrm(p
, dst
, src
);
289 assert(src
.mod
== mod_REG
);
290 emit_1ub(p
, op_dst_is_mem
);
291 emit_modrm(p
, src
, dst
);
305 /* Create and manipulate registers and regmem values:
307 struct x86_reg
x86_make_reg( enum x86_reg_file file
,
308 enum x86_reg_name idx
)
320 struct x86_reg
x86_make_disp( struct x86_reg reg
,
323 assert(reg
.file
== file_REG32
);
325 if (reg
.mod
== mod_REG
)
331 reg
.mod
= mod_INDIRECT
;
332 else if (reg
.disp
<= 127 && reg
.disp
>= -128)
335 reg
.mod
= mod_DISP32
;
340 struct x86_reg
x86_deref( struct x86_reg reg
)
342 return x86_make_disp(reg
, 0);
345 struct x86_reg
x86_get_base_reg( struct x86_reg reg
)
347 return x86_make_reg( reg
.file
, reg
.idx
);
350 int x86_get_label( struct x86_function
*p
)
352 return p
->csr
- p
->store
;
357 /***********************************************************************
362 void x86_jcc( struct x86_function
*p
,
366 int offset
= label
- (x86_get_label(p
) + 2);
370 int amt
= p
->csr
- p
->store
;
371 assert(amt
> -offset
);
374 if (offset
<= 127 && offset
>= -128) {
375 emit_1ub(p
, 0x70 + cc
);
376 emit_1b(p
, (char) offset
);
379 offset
= label
- (x86_get_label(p
) + 6);
380 emit_2ub(p
, 0x0f, 0x80 + cc
);
385 /* Always use a 32bit offset for forward jumps:
387 int x86_jcc_forward( struct x86_function
*p
,
391 emit_2ub(p
, 0x0f, 0x80 + cc
);
393 return x86_get_label(p
);
396 int x86_jmp_forward( struct x86_function
*p
)
401 return x86_get_label(p
);
404 int x86_call_forward( struct x86_function
*p
)
410 return x86_get_label(p
);
413 /* Fixup offset from forward jump:
415 void x86_fixup_fwd_jump( struct x86_function
*p
,
418 *(int *)(p
->store
+ fixup
- 4) = x86_get_label(p
) - fixup
;
421 void x86_jmp( struct x86_function
*p
, int label
)
425 emit_1i(p
, label
- x86_get_label(p
) - 4);
428 void x86_call( struct x86_function
*p
, struct x86_reg reg
)
432 emit_modrm_noreg(p
, 2, reg
);
437 * Temporary. As I need immediate operands, and dont want to mess with the codegen,
438 * I load the immediate into general purpose register and use it.
440 void x86_mov_reg_imm( struct x86_function
*p
, struct x86_reg dst
, int imm
)
443 assert(dst
.mod
== mod_REG
);
444 emit_1ub(p
, 0xb8 + dst
.idx
);
448 void x86_push( struct x86_function
*p
,
452 if (reg
.mod
== mod_REG
)
453 emit_1ub(p
, 0x50 + reg
.idx
);
457 emit_modrm_noreg(p
, 6, reg
);
461 p
->stack_offset
+= 4;
464 void x86_pop( struct x86_function
*p
,
468 assert(reg
.mod
== mod_REG
);
469 emit_1ub(p
, 0x58 + reg
.idx
);
470 p
->stack_offset
-= 4;
473 void x86_inc( struct x86_function
*p
,
477 assert(reg
.mod
== mod_REG
);
478 emit_1ub(p
, 0x40 + reg
.idx
);
481 void x86_dec( struct x86_function
*p
,
485 assert(reg
.mod
== mod_REG
);
486 emit_1ub(p
, 0x48 + reg
.idx
);
489 void x86_ret( struct x86_function
*p
)
492 assert(p
->stack_offset
== 0);
496 void x86_retw( struct x86_function
*p
, unsigned short imm
)
499 emit_3ub(p
, 0xc2, imm
& 0xff, (imm
>> 8) & 0xff);
502 void x86_sahf( struct x86_function
*p
)
508 void x86_mov( struct x86_function
*p
,
513 emit_op_modrm( p
, 0x8b, 0x89, dst
, src
);
516 void x86_xor( struct x86_function
*p
,
521 emit_op_modrm( p
, 0x33, 0x31, dst
, src
);
524 void x86_cmp( struct x86_function
*p
,
529 emit_op_modrm( p
, 0x3b, 0x39, dst
, src
);
532 void x86_lea( struct x86_function
*p
,
538 emit_modrm( p
, dst
, src
);
541 void x86_test( struct x86_function
*p
,
547 emit_modrm( p
, dst
, src
);
550 void x86_add( struct x86_function
*p
,
555 emit_op_modrm(p
, 0x03, 0x01, dst
, src
);
558 /* Calculate EAX * src, results in EDX:EAX.
560 void x86_mul( struct x86_function
*p
,
565 emit_modrm_noreg(p
, 4, src
);
569 void x86_imul( struct x86_function
*p
,
574 emit_2ub(p
, X86_TWOB
, 0xAF);
575 emit_modrm(p
, dst
, src
);
579 void x86_sub( struct x86_function
*p
,
584 emit_op_modrm(p
, 0x2b, 0x29, dst
, src
);
587 void x86_or( struct x86_function
*p
,
592 emit_op_modrm( p
, 0x0b, 0x09, dst
, src
);
595 void x86_and( struct x86_function
*p
,
600 emit_op_modrm( p
, 0x23, 0x21, dst
, src
);
605 /***********************************************************************
610 void sse_movss( struct x86_function
*p
,
615 emit_2ub(p
, 0xF3, X86_TWOB
);
616 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
619 void sse_movaps( struct x86_function
*p
,
624 emit_1ub(p
, X86_TWOB
);
625 emit_op_modrm( p
, 0x28, 0x29, dst
, src
);
628 void sse_movups( struct x86_function
*p
,
633 emit_1ub(p
, X86_TWOB
);
634 emit_op_modrm( p
, 0x10, 0x11, dst
, src
);
637 void sse_movhps( struct x86_function
*p
,
642 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
643 emit_1ub(p
, X86_TWOB
);
644 emit_op_modrm( p
, 0x16, 0x17, dst
, src
); /* cf movlhps */
647 void sse_movlps( struct x86_function
*p
,
652 assert(dst
.mod
!= mod_REG
|| src
.mod
!= mod_REG
);
653 emit_1ub(p
, X86_TWOB
);
654 emit_op_modrm( p
, 0x12, 0x13, dst
, src
); /* cf movhlps */
657 void sse_maxps( struct x86_function
*p
,
662 emit_2ub(p
, X86_TWOB
, 0x5F);
663 emit_modrm( p
, dst
, src
);
666 void sse_maxss( struct x86_function
*p
,
671 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5F);
672 emit_modrm( p
, dst
, src
);
675 void sse_divss( struct x86_function
*p
,
680 emit_3ub(p
, 0xF3, X86_TWOB
, 0x5E);
681 emit_modrm( p
, dst
, src
);
684 void sse_minps( struct x86_function
*p
,
689 emit_2ub(p
, X86_TWOB
, 0x5D);
690 emit_modrm( p
, dst
, src
);
693 void sse_subps( struct x86_function
*p
,
698 emit_2ub(p
, X86_TWOB
, 0x5C);
699 emit_modrm( p
, dst
, src
);
702 void sse_mulps( struct x86_function
*p
,
707 emit_2ub(p
, X86_TWOB
, 0x59);
708 emit_modrm( p
, dst
, src
);
711 void sse_mulss( struct x86_function
*p
,
716 emit_3ub(p
, 0xF3, X86_TWOB
, 0x59);
717 emit_modrm( p
, dst
, src
);
720 void sse_addps( struct x86_function
*p
,
725 emit_2ub(p
, X86_TWOB
, 0x58);
726 emit_modrm( p
, dst
, src
);
729 void sse_addss( struct x86_function
*p
,
734 emit_3ub(p
, 0xF3, X86_TWOB
, 0x58);
735 emit_modrm( p
, dst
, src
);
738 void sse_andnps( struct x86_function
*p
,
743 emit_2ub(p
, X86_TWOB
, 0x55);
744 emit_modrm( p
, dst
, src
);
747 void sse_andps( struct x86_function
*p
,
752 emit_2ub(p
, X86_TWOB
, 0x54);
753 emit_modrm( p
, dst
, src
);
756 void sse_rsqrtps( struct x86_function
*p
,
761 emit_2ub(p
, X86_TWOB
, 0x52);
762 emit_modrm( p
, dst
, src
);
765 void sse_rsqrtss( struct x86_function
*p
,
770 emit_3ub(p
, 0xF3, X86_TWOB
, 0x52);
771 emit_modrm( p
, dst
, src
);
775 void sse_movhlps( struct x86_function
*p
,
780 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
781 emit_2ub(p
, X86_TWOB
, 0x12);
782 emit_modrm( p
, dst
, src
);
785 void sse_movlhps( struct x86_function
*p
,
790 assert(dst
.mod
== mod_REG
&& src
.mod
== mod_REG
);
791 emit_2ub(p
, X86_TWOB
, 0x16);
792 emit_modrm( p
, dst
, src
);
795 void sse_orps( struct x86_function
*p
,
800 emit_2ub(p
, X86_TWOB
, 0x56);
801 emit_modrm( p
, dst
, src
);
804 void sse_xorps( struct x86_function
*p
,
809 emit_2ub(p
, X86_TWOB
, 0x57);
810 emit_modrm( p
, dst
, src
);
813 void sse_cvtps2pi( struct x86_function
*p
,
818 assert(dst
.file
== file_MMX
&&
819 (src
.file
== file_XMM
|| src
.mod
!= mod_REG
));
823 emit_2ub(p
, X86_TWOB
, 0x2d);
824 emit_modrm( p
, dst
, src
);
827 void sse2_cvtdq2ps( struct x86_function
*p
,
832 emit_2ub(p
, X86_TWOB
, 0x5b);
833 emit_modrm( p
, dst
, src
);
837 /* Shufps can also be used to implement a reduced swizzle when dest ==
840 void sse_shufps( struct x86_function
*p
,
845 DUMP_RRI( dst
, src
, shuf
);
846 emit_2ub(p
, X86_TWOB
, 0xC6);
847 emit_modrm(p
, dst
, src
);
851 void sse_unpckhps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
854 emit_2ub( p
, X86_TWOB
, 0x15 );
855 emit_modrm( p
, dst
, src
);
858 void sse_unpcklps( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
861 emit_2ub( p
, X86_TWOB
, 0x14 );
862 emit_modrm( p
, dst
, src
);
865 void sse_cmpps( struct x86_function
*p
,
870 DUMP_RRI( dst
, src
, cc
);
871 emit_2ub(p
, X86_TWOB
, 0xC2);
872 emit_modrm(p
, dst
, src
);
876 void sse_pmovmskb( struct x86_function
*p
,
881 emit_3ub(p
, 0x66, X86_TWOB
, 0xD7);
882 emit_modrm(p
, dst
, src
);
885 /***********************************************************************
890 * Perform a reduced swizzle:
892 void sse2_pshufd( struct x86_function
*p
,
897 DUMP_RRI( dst
, src
, shuf
);
898 emit_3ub(p
, 0x66, X86_TWOB
, 0x70);
899 emit_modrm(p
, dst
, src
);
903 void sse2_cvttps2dq( struct x86_function
*p
,
908 emit_3ub( p
, 0xF3, X86_TWOB
, 0x5B );
909 emit_modrm( p
, dst
, src
);
912 void sse2_cvtps2dq( struct x86_function
*p
,
917 emit_3ub(p
, 0x66, X86_TWOB
, 0x5B);
918 emit_modrm( p
, dst
, src
);
921 void sse2_packssdw( struct x86_function
*p
,
926 emit_3ub(p
, 0x66, X86_TWOB
, 0x6B);
927 emit_modrm( p
, dst
, src
);
930 void sse2_packsswb( struct x86_function
*p
,
935 emit_3ub(p
, 0x66, X86_TWOB
, 0x63);
936 emit_modrm( p
, dst
, src
);
939 void sse2_packuswb( struct x86_function
*p
,
944 emit_3ub(p
, 0x66, X86_TWOB
, 0x67);
945 emit_modrm( p
, dst
, src
);
948 void sse2_punpcklbw( struct x86_function
*p
,
953 emit_3ub(p
, 0x66, X86_TWOB
, 0x60);
954 emit_modrm( p
, dst
, src
);
958 void sse2_rcpps( struct x86_function
*p
,
963 emit_2ub(p
, X86_TWOB
, 0x53);
964 emit_modrm( p
, dst
, src
);
967 void sse2_rcpss( struct x86_function
*p
,
972 emit_3ub(p
, 0xF3, X86_TWOB
, 0x53);
973 emit_modrm( p
, dst
, src
);
976 void sse2_movd( struct x86_function
*p
,
981 emit_2ub(p
, 0x66, X86_TWOB
);
982 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
988 /***********************************************************************
991 void x87_fist( struct x86_function
*p
, struct x86_reg dst
)
995 emit_modrm_noreg(p
, 2, dst
);
998 void x87_fistp( struct x86_function
*p
, struct x86_reg dst
)
1002 emit_modrm_noreg(p
, 3, dst
);
1005 void x87_fild( struct x86_function
*p
, struct x86_reg arg
)
1009 emit_modrm_noreg(p
, 0, arg
);
1012 void x87_fldz( struct x86_function
*p
)
1015 emit_2ub(p
, 0xd9, 0xee);
1019 void x87_fldcw( struct x86_function
*p
, struct x86_reg arg
)
1022 assert(arg
.file
== file_REG32
);
1023 assert(arg
.mod
!= mod_REG
);
1025 emit_modrm_noreg(p
, 5, arg
);
1028 void x87_fld1( struct x86_function
*p
)
1031 emit_2ub(p
, 0xd9, 0xe8);
1034 void x87_fldl2e( struct x86_function
*p
)
1037 emit_2ub(p
, 0xd9, 0xea);
1040 void x87_fldln2( struct x86_function
*p
)
1043 emit_2ub(p
, 0xd9, 0xed);
1046 void x87_fwait( struct x86_function
*p
)
1052 void x87_fnclex( struct x86_function
*p
)
1055 emit_2ub(p
, 0xdb, 0xe2);
1058 void x87_fclex( struct x86_function
*p
)
1065 static void x87_arith_op( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg arg
,
1066 unsigned char dst0ub0
,
1067 unsigned char dst0ub1
,
1068 unsigned char arg0ub0
,
1069 unsigned char arg0ub1
,
1070 unsigned char argmem_noreg
)
1072 assert(dst
.file
== file_x87
);
1074 if (arg
.file
== file_x87
) {
1076 emit_2ub(p
, dst0ub0
, dst0ub1
+arg
.idx
);
1077 else if (arg
.idx
== 0)
1078 emit_2ub(p
, arg0ub0
, arg0ub1
+arg
.idx
);
1082 else if (dst
.idx
== 0) {
1083 assert(arg
.file
== file_REG32
);
1085 emit_modrm_noreg(p
, argmem_noreg
, arg
);
1091 void x87_fmul( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1093 DUMP_RR( dst
, src
);
1094 x87_arith_op(p
, dst
, src
,
1100 void x87_fsub( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1102 DUMP_RR( dst
, src
);
1103 x87_arith_op(p
, dst
, src
,
1109 void x87_fsubr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1111 DUMP_RR( dst
, src
);
1112 x87_arith_op(p
, dst
, src
,
1118 void x87_fadd( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1120 DUMP_RR( dst
, src
);
1121 x87_arith_op(p
, dst
, src
,
1127 void x87_fdiv( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1129 DUMP_RR( dst
, src
);
1130 x87_arith_op(p
, dst
, src
,
1136 void x87_fdivr( struct x86_function
*p
, struct x86_reg dst
, struct x86_reg src
)
1138 DUMP_RR( dst
, src
);
1139 x87_arith_op(p
, dst
, src
,
1145 void x87_fmulp( struct x86_function
*p
, struct x86_reg dst
)
1148 assert(dst
.file
== file_x87
);
1149 assert(dst
.idx
>= 1);
1150 emit_2ub(p
, 0xde, 0xc8+dst
.idx
);
1153 void x87_fsubp( struct x86_function
*p
, struct x86_reg dst
)
1156 assert(dst
.file
== file_x87
);
1157 assert(dst
.idx
>= 1);
1158 emit_2ub(p
, 0xde, 0xe8+dst
.idx
);
1161 void x87_fsubrp( struct x86_function
*p
, struct x86_reg dst
)
1164 assert(dst
.file
== file_x87
);
1165 assert(dst
.idx
>= 1);
1166 emit_2ub(p
, 0xde, 0xe0+dst
.idx
);
1169 void x87_faddp( struct x86_function
*p
, struct x86_reg dst
)
1172 assert(dst
.file
== file_x87
);
1173 assert(dst
.idx
>= 1);
1174 emit_2ub(p
, 0xde, 0xc0+dst
.idx
);
1177 void x87_fdivp( struct x86_function
*p
, struct x86_reg dst
)
1180 assert(dst
.file
== file_x87
);
1181 assert(dst
.idx
>= 1);
1182 emit_2ub(p
, 0xde, 0xf8+dst
.idx
);
1185 void x87_fdivrp( struct x86_function
*p
, struct x86_reg dst
)
1188 assert(dst
.file
== file_x87
);
1189 assert(dst
.idx
>= 1);
1190 emit_2ub(p
, 0xde, 0xf0+dst
.idx
);
1193 void x87_fucom( struct x86_function
*p
, struct x86_reg arg
)
1196 assert(arg
.file
== file_x87
);
1197 emit_2ub(p
, 0xdd, 0xe0+arg
.idx
);
1200 void x87_fucomp( struct x86_function
*p
, struct x86_reg arg
)
1203 assert(arg
.file
== file_x87
);
1204 emit_2ub(p
, 0xdd, 0xe8+arg
.idx
);
1207 void x87_fucompp( struct x86_function
*p
)
1210 emit_2ub(p
, 0xda, 0xe9);
1213 void x87_fxch( struct x86_function
*p
, struct x86_reg arg
)
1216 assert(arg
.file
== file_x87
);
1217 emit_2ub(p
, 0xd9, 0xc8+arg
.idx
);
1220 void x87_fabs( struct x86_function
*p
)
1223 emit_2ub(p
, 0xd9, 0xe1);
1226 void x87_fchs( struct x86_function
*p
)
1229 emit_2ub(p
, 0xd9, 0xe0);
1232 void x87_fcos( struct x86_function
*p
)
1235 emit_2ub(p
, 0xd9, 0xff);
1239 void x87_fprndint( struct x86_function
*p
)
1242 emit_2ub(p
, 0xd9, 0xfc);
1245 void x87_fscale( struct x86_function
*p
)
1248 emit_2ub(p
, 0xd9, 0xfd);
1251 void x87_fsin( struct x86_function
*p
)
1254 emit_2ub(p
, 0xd9, 0xfe);
1257 void x87_fsincos( struct x86_function
*p
)
1260 emit_2ub(p
, 0xd9, 0xfb);
1263 void x87_fsqrt( struct x86_function
*p
)
1266 emit_2ub(p
, 0xd9, 0xfa);
1269 void x87_fxtract( struct x86_function
*p
)
1272 emit_2ub(p
, 0xd9, 0xf4);
1277 * Restrictions: -1.0 <= st0 <= 1.0
1279 void x87_f2xm1( struct x86_function
*p
)
1282 emit_2ub(p
, 0xd9, 0xf0);
1285 /* st1 = st1 * log2(st0);
1288 void x87_fyl2x( struct x86_function
*p
)
1291 emit_2ub(p
, 0xd9, 0xf1);
1294 /* st1 = st1 * log2(st0 + 1.0);
1297 * A fast operation, with restrictions: -.29 < st0 < .29
1299 void x87_fyl2xp1( struct x86_function
*p
)
1302 emit_2ub(p
, 0xd9, 0xf9);
1306 void x87_fld( struct x86_function
*p
, struct x86_reg arg
)
1309 if (arg
.file
== file_x87
)
1310 emit_2ub(p
, 0xd9, 0xc0 + arg
.idx
);
1313 emit_modrm_noreg(p
, 0, arg
);
1317 void x87_fst( struct x86_function
*p
, struct x86_reg dst
)
1320 if (dst
.file
== file_x87
)
1321 emit_2ub(p
, 0xdd, 0xd0 + dst
.idx
);
1324 emit_modrm_noreg(p
, 2, dst
);
1328 void x87_fstp( struct x86_function
*p
, struct x86_reg dst
)
1331 if (dst
.file
== file_x87
)
1332 emit_2ub(p
, 0xdd, 0xd8 + dst
.idx
);
1335 emit_modrm_noreg(p
, 3, dst
);
1339 void x87_fcom( struct x86_function
*p
, struct x86_reg dst
)
1342 if (dst
.file
== file_x87
)
1343 emit_2ub(p
, 0xd8, 0xd0 + dst
.idx
);
1346 emit_modrm_noreg(p
, 2, dst
);
1350 void x87_fcomp( struct x86_function
*p
, struct x86_reg dst
)
1353 if (dst
.file
== file_x87
)
1354 emit_2ub(p
, 0xd8, 0xd8 + dst
.idx
);
1357 emit_modrm_noreg(p
, 3, dst
);
1362 void x87_fnstsw( struct x86_function
*p
, struct x86_reg dst
)
1365 assert(dst
.file
== file_REG32
);
1367 if (dst
.idx
== reg_AX
&&
1369 emit_2ub(p
, 0xdf, 0xe0);
1372 emit_modrm_noreg(p
, 7, dst
);
1379 /***********************************************************************
1383 void mmx_emms( struct x86_function
*p
)
1386 assert(p
->need_emms
);
1387 emit_2ub(p
, 0x0f, 0x77);
1391 void mmx_packssdw( struct x86_function
*p
,
1393 struct x86_reg src
)
1395 DUMP_RR( dst
, src
);
1396 assert(dst
.file
== file_MMX
&&
1397 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1401 emit_2ub(p
, X86_TWOB
, 0x6b);
1402 emit_modrm( p
, dst
, src
);
1405 void mmx_packuswb( struct x86_function
*p
,
1407 struct x86_reg src
)
1409 DUMP_RR( dst
, src
);
1410 assert(dst
.file
== file_MMX
&&
1411 (src
.file
== file_MMX
|| src
.mod
!= mod_REG
));
1415 emit_2ub(p
, X86_TWOB
, 0x67);
1416 emit_modrm( p
, dst
, src
);
1419 void mmx_movd( struct x86_function
*p
,
1421 struct x86_reg src
)
1423 DUMP_RR( dst
, src
);
1425 emit_1ub(p
, X86_TWOB
);
1426 emit_op_modrm( p
, 0x6e, 0x7e, dst
, src
);
1429 void mmx_movq( struct x86_function
*p
,
1431 struct x86_reg src
)
1433 DUMP_RR( dst
, src
);
1435 emit_1ub(p
, X86_TWOB
);
1436 emit_op_modrm( p
, 0x6f, 0x7f, dst
, src
);
1440 /***********************************************************************
1445 /* Retreive a reference to one of the function arguments, taking into
1446 * account any push/pop activity:
1448 struct x86_reg
x86_fn_arg( struct x86_function
*p
,
1451 return x86_make_disp(x86_make_reg(file_REG32
, reg_SP
),
1452 p
->stack_offset
+ arg
* 4); /* ??? */
1456 void x86_init_func( struct x86_function
*p
)
1464 void x86_init_func_size( struct x86_function
*p
, unsigned code_size
)
1466 p
->size
= code_size
;
1467 p
->store
= rtasm_exec_malloc(code_size
);
1468 if (p
->store
== NULL
) {
1469 p
->store
= p
->error_overflow
;
1475 void x86_release_func( struct x86_function
*p
)
1477 if (p
->store
&& p
->store
!= p
->error_overflow
)
1478 rtasm_exec_free(p
->store
);
1486 void (*x86_get_func( struct x86_function
*p
))(void)
1489 if (DISASSEM
&& p
->store
)
1490 debug_printf("disassemble %p %p\n", p
->store
, p
->csr
);
1492 if (p
->store
== p
->error_overflow
)
1493 return (void (*)(void)) NULL
;
1495 return (void (*)(void)) p
->store
;
1500 void x86sse_dummy( void )