Merge branch 'master' into pipe-video
[mesa.git] / src / gallium / auxiliary / rtasm / rtasm_x86sse.c
1 /**************************************************************************
2 *
3 * Copyright (C) 1999-2005 Brian Paul All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
19 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 *
22 **************************************************************************/
23
24 #include "pipe/p_config.h"
25 #include "util/u_cpu_detect.h"
26
27 #if defined(PIPE_ARCH_X86) || defined(PIPE_ARCH_X86_64)
28
29 #include "pipe/p_compiler.h"
30 #include "util/u_debug.h"
31 #include "util/u_pointer.h"
32
33 #include "rtasm_execmem.h"
34 #include "rtasm_x86sse.h"
35
36 #define DISASSEM 0
37 #define X86_TWOB 0x0f
38
39
40 #define DUMP_SSE 0
41
42
43 void x86_print_reg( struct x86_reg reg )
44 {
45 if (reg.mod != mod_REG)
46 debug_printf( "[" );
47
48 switch( reg.file ) {
49 case file_REG32:
50 switch( reg.idx ) {
51 case reg_AX: debug_printf( "EAX" ); break;
52 case reg_CX: debug_printf( "ECX" ); break;
53 case reg_DX: debug_printf( "EDX" ); break;
54 case reg_BX: debug_printf( "EBX" ); break;
55 case reg_SP: debug_printf( "ESP" ); break;
56 case reg_BP: debug_printf( "EBP" ); break;
57 case reg_SI: debug_printf( "ESI" ); break;
58 case reg_DI: debug_printf( "EDI" ); break;
59 }
60 break;
61 case file_MMX:
62 debug_printf( "MMX%u", reg.idx );
63 break;
64 case file_XMM:
65 debug_printf( "XMM%u", reg.idx );
66 break;
67 case file_x87:
68 debug_printf( "fp%u", reg.idx );
69 break;
70 }
71
72 if (reg.mod == mod_DISP8 ||
73 reg.mod == mod_DISP32)
74 debug_printf("+%d", reg.disp);
75
76 if (reg.mod != mod_REG)
77 debug_printf( "]" );
78 }
79
80 #if DUMP_SSE
81
82 #define DUMP_START() debug_printf( "\n" )
83 #define DUMP_END() debug_printf( "\n" )
84
85 #define DUMP() do { \
86 const char *foo = __FUNCTION__; \
87 while (*foo && *foo != '_') \
88 foo++; \
89 if (*foo) \
90 foo++; \
91 debug_printf( "\n%4x %14s ", p->csr - p->store, foo ); \
92 } while (0)
93
94 #define DUMP_I( I ) do { \
95 DUMP(); \
96 debug_printf( "%u", I ); \
97 } while( 0 )
98
99 #define DUMP_R( R0 ) do { \
100 DUMP(); \
101 x86_print_reg( R0 ); \
102 } while( 0 )
103
104 #define DUMP_RR( R0, R1 ) do { \
105 DUMP(); \
106 x86_print_reg( R0 ); \
107 debug_printf( ", " ); \
108 x86_print_reg( R1 ); \
109 } while( 0 )
110
111 #define DUMP_RI( R0, I ) do { \
112 DUMP(); \
113 x86_print_reg( R0 ); \
114 debug_printf( ", %u", I ); \
115 } while( 0 )
116
117 #define DUMP_RRI( R0, R1, I ) do { \
118 DUMP(); \
119 x86_print_reg( R0 ); \
120 debug_printf( ", " ); \
121 x86_print_reg( R1 ); \
122 debug_printf( ", %u", I ); \
123 } while( 0 )
124
125 #else
126
127 #define DUMP_START()
128 #define DUMP_END()
129 #define DUMP( )
130 #define DUMP_I( I )
131 #define DUMP_R( R0 )
132 #define DUMP_RR( R0, R1 )
133 #define DUMP_RI( R0, I )
134 #define DUMP_RRI( R0, R1, I )
135
136 #endif
137
138
139 static void do_realloc( struct x86_function *p )
140 {
141 if (p->store == p->error_overflow) {
142 p->csr = p->store;
143 }
144 else if (p->size == 0) {
145 p->size = 1024;
146 p->store = rtasm_exec_malloc(p->size);
147 p->csr = p->store;
148 }
149 else {
150 uintptr_t used = pointer_to_uintptr( p->csr ) - pointer_to_uintptr( p->store );
151 unsigned char *tmp = p->store;
152 p->size *= 2;
153 p->store = rtasm_exec_malloc(p->size);
154
155 if (p->store) {
156 memcpy(p->store, tmp, used);
157 p->csr = p->store + used;
158 }
159 else {
160 p->csr = p->store;
161 }
162
163 rtasm_exec_free(tmp);
164 }
165
166 if (p->store == NULL) {
167 p->store = p->csr = p->error_overflow;
168 p->size = sizeof(p->error_overflow);
169 }
170 }
171
172 /* Emit bytes to the instruction stream:
173 */
174 static unsigned char *reserve( struct x86_function *p, int bytes )
175 {
176 if (p->csr + bytes - p->store > (int) p->size)
177 do_realloc(p);
178
179 {
180 unsigned char *csr = p->csr;
181 p->csr += bytes;
182 return csr;
183 }
184 }
185
186
187
188 static void emit_1b( struct x86_function *p, char b0 )
189 {
190 char *csr = (char *)reserve(p, 1);
191 *csr = b0;
192 }
193
194 static void emit_1i( struct x86_function *p, int i0 )
195 {
196 int *icsr = (int *)reserve(p, sizeof(i0));
197 *icsr = i0;
198 }
199
200 static void emit_1ub( struct x86_function *p, unsigned char b0 )
201 {
202 unsigned char *csr = reserve(p, 1);
203 *csr++ = b0;
204 }
205
206 static void emit_2ub( struct x86_function *p, unsigned char b0, unsigned char b1 )
207 {
208 unsigned char *csr = reserve(p, 2);
209 *csr++ = b0;
210 *csr++ = b1;
211 }
212
213 static void emit_3ub( struct x86_function *p, unsigned char b0, unsigned char b1, unsigned char b2 )
214 {
215 unsigned char *csr = reserve(p, 3);
216 *csr++ = b0;
217 *csr++ = b1;
218 *csr++ = b2;
219 }
220
221
222 /* Build a modRM byte + possible displacement. No treatment of SIB
223 * indexing. BZZT - no way to encode an absolute address.
224 *
225 * This is the "/r" field in the x86 manuals...
226 */
227 static void emit_modrm( struct x86_function *p,
228 struct x86_reg reg,
229 struct x86_reg regmem )
230 {
231 unsigned char val = 0;
232
233 assert(reg.mod == mod_REG);
234
235 /* TODO: support extended x86-64 registers */
236 assert(reg.idx < 8);
237 assert(regmem.idx < 8);
238
239 val |= regmem.mod << 6; /* mod field */
240 val |= reg.idx << 3; /* reg field */
241 val |= regmem.idx; /* r/m field */
242
243 emit_1ub(p, val);
244
245 /* Oh-oh we've stumbled into the SIB thing.
246 */
247 if (regmem.file == file_REG32 &&
248 regmem.idx == reg_SP &&
249 regmem.mod != mod_REG) {
250 emit_1ub(p, 0x24); /* simplistic! */
251 }
252
253 switch (regmem.mod) {
254 case mod_REG:
255 case mod_INDIRECT:
256 break;
257 case mod_DISP8:
258 emit_1b(p, (char) regmem.disp);
259 break;
260 case mod_DISP32:
261 emit_1i(p, regmem.disp);
262 break;
263 default:
264 assert(0);
265 break;
266 }
267 }
268
269 /* Emits the "/0".."/7" specialized versions of the modrm ("/r") bytes.
270 */
271 static void emit_modrm_noreg( struct x86_function *p,
272 unsigned op,
273 struct x86_reg regmem )
274 {
275 struct x86_reg dummy = x86_make_reg(file_REG32, op);
276 emit_modrm(p, dummy, regmem);
277 }
278
279 /* Many x86 instructions have two opcodes to cope with the situations
280 * where the destination is a register or memory reference
281 * respectively. This function selects the correct opcode based on
282 * the arguments presented.
283 */
284 static void emit_op_modrm( struct x86_function *p,
285 unsigned char op_dst_is_reg,
286 unsigned char op_dst_is_mem,
287 struct x86_reg dst,
288 struct x86_reg src )
289 {
290 switch (dst.mod) {
291 case mod_REG:
292 emit_1ub(p, op_dst_is_reg);
293 emit_modrm(p, dst, src);
294 break;
295 case mod_INDIRECT:
296 case mod_DISP32:
297 case mod_DISP8:
298 assert(src.mod == mod_REG);
299 emit_1ub(p, op_dst_is_mem);
300 emit_modrm(p, src, dst);
301 break;
302 default:
303 assert(0);
304 break;
305 }
306 }
307
308
309
310
311
312
313
314 /* Create and manipulate registers and regmem values:
315 */
316 struct x86_reg x86_make_reg( enum x86_reg_file file,
317 enum x86_reg_name idx )
318 {
319 struct x86_reg reg;
320
321 reg.file = file;
322 reg.idx = idx;
323 reg.mod = mod_REG;
324 reg.disp = 0;
325
326 return reg;
327 }
328
329 struct x86_reg x86_make_disp( struct x86_reg reg,
330 int disp )
331 {
332 assert(reg.file == file_REG32);
333
334 if (reg.mod == mod_REG)
335 reg.disp = disp;
336 else
337 reg.disp += disp;
338
339 if (reg.disp == 0 && reg.idx != reg_BP)
340 reg.mod = mod_INDIRECT;
341 else if (reg.disp <= 127 && reg.disp >= -128)
342 reg.mod = mod_DISP8;
343 else
344 reg.mod = mod_DISP32;
345
346 return reg;
347 }
348
349 struct x86_reg x86_deref( struct x86_reg reg )
350 {
351 return x86_make_disp(reg, 0);
352 }
353
354 struct x86_reg x86_get_base_reg( struct x86_reg reg )
355 {
356 return x86_make_reg( reg.file, reg.idx );
357 }
358
359 int x86_get_label( struct x86_function *p )
360 {
361 return p->csr - p->store;
362 }
363
364
365
366 /***********************************************************************
367 * x86 instructions
368 */
369
370
371 void x64_rexw(struct x86_function *p)
372 {
373 if(x86_target(p) != X86_32)
374 emit_1ub(p, 0x48);
375 }
376
377 void x86_jcc( struct x86_function *p,
378 enum x86_cc cc,
379 int label )
380 {
381 int offset = label - (x86_get_label(p) + 2);
382 DUMP_I(cc);
383
384 if (offset < 0) {
385 /*assert(p->csr - p->store > -offset);*/
386 if (p->csr - p->store <= -offset) {
387 /* probably out of memory (using the error_overflow buffer) */
388 return;
389 }
390 }
391
392 if (offset <= 127 && offset >= -128) {
393 emit_1ub(p, 0x70 + cc);
394 emit_1b(p, (char) offset);
395 }
396 else {
397 offset = label - (x86_get_label(p) + 6);
398 emit_2ub(p, 0x0f, 0x80 + cc);
399 emit_1i(p, offset);
400 }
401 }
402
403 /* Always use a 32bit offset for forward jumps:
404 */
405 int x86_jcc_forward( struct x86_function *p,
406 enum x86_cc cc )
407 {
408 DUMP_I(cc);
409 emit_2ub(p, 0x0f, 0x80 + cc);
410 emit_1i(p, 0);
411 return x86_get_label(p);
412 }
413
414 int x86_jmp_forward( struct x86_function *p)
415 {
416 DUMP();
417 emit_1ub(p, 0xe9);
418 emit_1i(p, 0);
419 return x86_get_label(p);
420 }
421
422 int x86_call_forward( struct x86_function *p)
423 {
424 DUMP();
425
426 emit_1ub(p, 0xe8);
427 emit_1i(p, 0);
428 return x86_get_label(p);
429 }
430
431 /* Fixup offset from forward jump:
432 */
433 void x86_fixup_fwd_jump( struct x86_function *p,
434 int fixup )
435 {
436 *(int *)(p->store + fixup - 4) = x86_get_label(p) - fixup;
437 }
438
439 void x86_jmp( struct x86_function *p, int label)
440 {
441 DUMP_I( label );
442 emit_1ub(p, 0xe9);
443 emit_1i(p, label - x86_get_label(p) - 4);
444 }
445
446 void x86_call( struct x86_function *p, struct x86_reg reg)
447 {
448 DUMP_R( reg );
449 emit_1ub(p, 0xff);
450 emit_modrm_noreg(p, 2, reg);
451 }
452
453
454 void x86_mov_reg_imm( struct x86_function *p, struct x86_reg dst, int imm )
455 {
456 DUMP_RI( dst, imm );
457 assert(dst.file == file_REG32);
458 assert(dst.mod == mod_REG);
459 emit_1ub(p, 0xb8 + dst.idx);
460 emit_1i(p, imm);
461 }
462
463 void x86_mov_imm( struct x86_function *p, struct x86_reg dst, int imm )
464 {
465 DUMP_RI( dst, imm );
466 if(dst.mod == mod_REG)
467 x86_mov_reg_imm(p, dst, imm);
468 else
469 {
470 emit_1ub(p, 0xc7);
471 emit_modrm_noreg(p, 0, dst);
472 emit_1i(p, imm);
473 }
474 }
475
476 void x86_mov16_imm( struct x86_function *p, struct x86_reg dst, uint16_t imm )
477 {
478 DUMP_RI( dst, imm );
479 emit_1ub(p, 0x66);
480 if(dst.mod == mod_REG)
481 {
482 emit_1ub(p, 0xb8 + dst.idx);
483 emit_2ub(p, imm & 0xff, imm >> 8);
484 }
485 else
486 {
487 emit_1ub(p, 0xc7);
488 emit_modrm_noreg(p, 0, dst);
489 emit_2ub(p, imm & 0xff, imm >> 8);
490 }
491 }
492
493 void x86_mov8_imm( struct x86_function *p, struct x86_reg dst, uint8_t imm )
494 {
495 DUMP_RI( dst, imm );
496 if(dst.mod == mod_REG)
497 {
498 emit_1ub(p, 0xb0 + dst.idx);
499 emit_1ub(p, imm);
500 }
501 else
502 {
503 emit_1ub(p, 0xc6);
504 emit_modrm_noreg(p, 0, dst);
505 emit_1ub(p, imm);
506 }
507 }
508
509 /**
510 * Immediate group 1 instructions.
511 */
512 static INLINE void
513 x86_group1_imm( struct x86_function *p,
514 unsigned op, struct x86_reg dst, int imm )
515 {
516 assert(dst.file == file_REG32);
517 assert(dst.mod == mod_REG);
518 if(-0x80 <= imm && imm < 0x80) {
519 emit_1ub(p, 0x83);
520 emit_modrm_noreg(p, op, dst);
521 emit_1b(p, (char)imm);
522 }
523 else {
524 emit_1ub(p, 0x81);
525 emit_modrm_noreg(p, op, dst);
526 emit_1i(p, imm);
527 }
528 }
529
530 void x86_add_imm( struct x86_function *p, struct x86_reg dst, int imm )
531 {
532 DUMP_RI( dst, imm );
533 x86_group1_imm(p, 0, dst, imm);
534 }
535
536 void x86_or_imm( struct x86_function *p, struct x86_reg dst, int imm )
537 {
538 DUMP_RI( dst, imm );
539 x86_group1_imm(p, 1, dst, imm);
540 }
541
542 void x86_and_imm( struct x86_function *p, struct x86_reg dst, int imm )
543 {
544 DUMP_RI( dst, imm );
545 x86_group1_imm(p, 4, dst, imm);
546 }
547
548 void x86_sub_imm( struct x86_function *p, struct x86_reg dst, int imm )
549 {
550 DUMP_RI( dst, imm );
551 x86_group1_imm(p, 5, dst, imm);
552 }
553
554 void x86_xor_imm( struct x86_function *p, struct x86_reg dst, int imm )
555 {
556 DUMP_RI( dst, imm );
557 x86_group1_imm(p, 6, dst, imm);
558 }
559
560 void x86_cmp_imm( struct x86_function *p, struct x86_reg dst, int imm )
561 {
562 DUMP_RI( dst, imm );
563 x86_group1_imm(p, 7, dst, imm);
564 }
565
566
567 void x86_push( struct x86_function *p,
568 struct x86_reg reg )
569 {
570 DUMP_R( reg );
571 if (reg.mod == mod_REG)
572 emit_1ub(p, 0x50 + reg.idx);
573 else
574 {
575 emit_1ub(p, 0xff);
576 emit_modrm_noreg(p, 6, reg);
577 }
578
579
580 p->stack_offset += sizeof(void*);
581 }
582
583 void x86_push_imm32( struct x86_function *p,
584 int imm32 )
585 {
586 DUMP_I( imm32 );
587 emit_1ub(p, 0x68);
588 emit_1i(p, imm32);
589
590 p->stack_offset += sizeof(void*);
591 }
592
593
594 void x86_pop( struct x86_function *p,
595 struct x86_reg reg )
596 {
597 DUMP_R( reg );
598 assert(reg.mod == mod_REG);
599 emit_1ub(p, 0x58 + reg.idx);
600 p->stack_offset -= sizeof(void*);
601 }
602
603 void x86_inc( struct x86_function *p,
604 struct x86_reg reg )
605 {
606 DUMP_R( reg );
607 if(x86_target(p) == X86_32 && reg.mod == mod_REG)
608 {
609 emit_1ub(p, 0x40 + reg.idx);
610 return;
611 }
612 emit_1ub(p, 0xff);
613 emit_modrm_noreg(p, 0, reg);
614 }
615
616 void x86_dec( struct x86_function *p,
617 struct x86_reg reg )
618 {
619 DUMP_R( reg );
620 if(x86_target(p) == X86_32 && reg.mod == mod_REG)
621 {
622 emit_1ub(p, 0x48 + reg.idx);
623 return;
624 }
625 emit_1ub(p, 0xff);
626 emit_modrm_noreg(p, 1, reg);
627 }
628
629 void x86_ret( struct x86_function *p )
630 {
631 DUMP();
632 assert(p->stack_offset == 0);
633 emit_1ub(p, 0xc3);
634 }
635
636 void x86_retw( struct x86_function *p, unsigned short imm )
637 {
638 DUMP();
639 emit_3ub(p, 0xc2, imm & 0xff, (imm >> 8) & 0xff);
640 }
641
642 void x86_sahf( struct x86_function *p )
643 {
644 DUMP();
645 emit_1ub(p, 0x9e);
646 }
647
648 void x86_mov( struct x86_function *p,
649 struct x86_reg dst,
650 struct x86_reg src )
651 {
652 DUMP_RR( dst, src );
653 /* special hack for reading arguments until we support x86-64 registers everywhere */
654 if(src.mod == mod_REG && dst.mod == mod_REG && (src.idx >= 8 || dst.idx >= 8))
655 {
656 uint8_t rex = 0x40;
657 if(dst.idx >= 8)
658 {
659 rex |= 4;
660 dst.idx -= 8;
661 }
662 if(src.idx >= 8)
663 {
664 rex |= 1;
665 src.idx -= 8;
666 }
667 emit_1ub(p, rex);
668 }
669 emit_op_modrm( p, 0x8b, 0x89, dst, src );
670 }
671
672 void x86_mov16( struct x86_function *p,
673 struct x86_reg dst,
674 struct x86_reg src )
675 {
676 DUMP_RR( dst, src );
677 emit_1ub(p, 0x66);
678 emit_op_modrm( p, 0x8b, 0x89, dst, src );
679 }
680
681 void x86_mov8( struct x86_function *p,
682 struct x86_reg dst,
683 struct x86_reg src )
684 {
685 DUMP_RR( dst, src );
686 emit_op_modrm( p, 0x8a, 0x88, dst, src );
687 }
688
689 void x64_mov64( struct x86_function *p,
690 struct x86_reg dst,
691 struct x86_reg src )
692 {
693 uint8_t rex = 0x48;
694 DUMP_RR( dst, src );
695 assert(x86_target(p) != X86_32);
696
697 /* special hack for reading arguments until we support x86-64 registers everywhere */
698 if(src.mod == mod_REG && dst.mod == mod_REG && (src.idx >= 8 || dst.idx >= 8))
699 {
700 if(dst.idx >= 8)
701 {
702 rex |= 4;
703 dst.idx -= 8;
704 }
705 if(src.idx >= 8)
706 {
707 rex |= 1;
708 src.idx -= 8;
709 }
710 }
711 emit_1ub(p, rex);
712 emit_op_modrm( p, 0x8b, 0x89, dst, src );
713 }
714
715 void x86_movzx8(struct x86_function *p, struct x86_reg dst, struct x86_reg src )
716 {
717 DUMP_RR( dst, src );
718 emit_2ub(p, 0x0f, 0xb6);
719 emit_modrm(p, dst, src);
720 }
721
722 void x86_movzx16(struct x86_function *p, struct x86_reg dst, struct x86_reg src )
723 {
724 DUMP_RR( dst, src );
725 emit_2ub(p, 0x0f, 0xb7);
726 emit_modrm(p, dst, src);
727 }
728
729 void x86_xor( struct x86_function *p,
730 struct x86_reg dst,
731 struct x86_reg src )
732 {
733 DUMP_RR( dst, src );
734 emit_op_modrm( p, 0x33, 0x31, dst, src );
735 }
736
737 void x86_cmp( struct x86_function *p,
738 struct x86_reg dst,
739 struct x86_reg src )
740 {
741 DUMP_RR( dst, src );
742 emit_op_modrm( p, 0x3b, 0x39, dst, src );
743 }
744
745 void x86_lea( struct x86_function *p,
746 struct x86_reg dst,
747 struct x86_reg src )
748 {
749 DUMP_RR( dst, src );
750 emit_1ub(p, 0x8d);
751 emit_modrm( p, dst, src );
752 }
753
754 void x86_test( struct x86_function *p,
755 struct x86_reg dst,
756 struct x86_reg src )
757 {
758 DUMP_RR( dst, src );
759 emit_1ub(p, 0x85);
760 emit_modrm( p, dst, src );
761 }
762
763 void x86_add( struct x86_function *p,
764 struct x86_reg dst,
765 struct x86_reg src )
766 {
767 DUMP_RR( dst, src );
768 emit_op_modrm(p, 0x03, 0x01, dst, src );
769 }
770
771 /* Calculate EAX * src, results in EDX:EAX.
772 */
773 void x86_mul( struct x86_function *p,
774 struct x86_reg src )
775 {
776 DUMP_R( src );
777 emit_1ub(p, 0xf7);
778 emit_modrm_noreg(p, 4, src );
779 }
780
781
782 void x86_imul( struct x86_function *p,
783 struct x86_reg dst,
784 struct x86_reg src )
785 {
786 DUMP_RR( dst, src );
787 emit_2ub(p, X86_TWOB, 0xAF);
788 emit_modrm(p, dst, src);
789 }
790
791
792 void x86_sub( struct x86_function *p,
793 struct x86_reg dst,
794 struct x86_reg src )
795 {
796 DUMP_RR( dst, src );
797 emit_op_modrm(p, 0x2b, 0x29, dst, src );
798 }
799
800 void x86_or( struct x86_function *p,
801 struct x86_reg dst,
802 struct x86_reg src )
803 {
804 DUMP_RR( dst, src );
805 emit_op_modrm( p, 0x0b, 0x09, dst, src );
806 }
807
808 void x86_and( struct x86_function *p,
809 struct x86_reg dst,
810 struct x86_reg src )
811 {
812 DUMP_RR( dst, src );
813 emit_op_modrm( p, 0x23, 0x21, dst, src );
814 }
815
816 void x86_div( struct x86_function *p,
817 struct x86_reg src )
818 {
819 assert(src.file == file_REG32 && src.mod == mod_REG);
820 emit_op_modrm(p, 0xf7, 0, x86_make_reg(file_REG32, 6), src);
821 }
822
823 void x86_bswap( struct x86_function *p, struct x86_reg reg )
824 {
825 DUMP_R(reg);
826 assert(reg.file == file_REG32);
827 assert(reg.mod == mod_REG);
828 emit_2ub(p, 0x0f, 0xc8 + reg.idx);
829 }
830
831 void x86_shr_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
832 {
833 DUMP_RI(reg, imm);
834 if(imm == 1)
835 {
836 emit_1ub(p, 0xd1);
837 emit_modrm_noreg(p, 5, reg);
838 }
839 else
840 {
841 emit_1ub(p, 0xc1);
842 emit_modrm_noreg(p, 5, reg);
843 emit_1ub(p, imm);
844 }
845 }
846
847 void x86_sar_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
848 {
849 DUMP_RI(reg, imm);
850 if(imm == 1)
851 {
852 emit_1ub(p, 0xd1);
853 emit_modrm_noreg(p, 7, reg);
854 }
855 else
856 {
857 emit_1ub(p, 0xc1);
858 emit_modrm_noreg(p, 7, reg);
859 emit_1ub(p, imm);
860 }
861 }
862
863 void x86_shl_imm( struct x86_function *p, struct x86_reg reg, unsigned imm )
864 {
865 DUMP_RI(reg, imm);
866 if(imm == 1)
867 {
868 emit_1ub(p, 0xd1);
869 emit_modrm_noreg(p, 4, reg);
870 }
871 else
872 {
873 emit_1ub(p, 0xc1);
874 emit_modrm_noreg(p, 4, reg);
875 emit_1ub(p, imm);
876 }
877 }
878
879
880 /***********************************************************************
881 * SSE instructions
882 */
883
884 void sse_prefetchnta( struct x86_function *p, struct x86_reg ptr)
885 {
886 DUMP_R( ptr );
887 assert(ptr.mod != mod_REG);
888 emit_2ub(p, 0x0f, 0x18);
889 emit_modrm_noreg(p, 0, ptr);
890 }
891
892 void sse_prefetch0( struct x86_function *p, struct x86_reg ptr)
893 {
894 DUMP_R( ptr );
895 assert(ptr.mod != mod_REG);
896 emit_2ub(p, 0x0f, 0x18);
897 emit_modrm_noreg(p, 1, ptr);
898 }
899
900 void sse_prefetch1( struct x86_function *p, struct x86_reg ptr)
901 {
902 DUMP_R( ptr );
903 assert(ptr.mod != mod_REG);
904 emit_2ub(p, 0x0f, 0x18);
905 emit_modrm_noreg(p, 2, ptr);
906 }
907
908 void sse_movntps( struct x86_function *p,
909 struct x86_reg dst,
910 struct x86_reg src)
911 {
912 DUMP_RR( dst, src );
913
914 assert(dst.mod != mod_REG);
915 assert(src.mod == mod_REG);
916 emit_2ub(p, 0x0f, 0x2b);
917 emit_modrm(p, src, dst);
918 }
919
920
921
922
923 void sse_movss( struct x86_function *p,
924 struct x86_reg dst,
925 struct x86_reg src )
926 {
927 DUMP_RR( dst, src );
928 emit_2ub(p, 0xF3, X86_TWOB);
929 emit_op_modrm( p, 0x10, 0x11, dst, src );
930 }
931
932 void sse_movaps( struct x86_function *p,
933 struct x86_reg dst,
934 struct x86_reg src )
935 {
936 DUMP_RR( dst, src );
937 emit_1ub(p, X86_TWOB);
938 emit_op_modrm( p, 0x28, 0x29, dst, src );
939 }
940
941 void sse_movups( struct x86_function *p,
942 struct x86_reg dst,
943 struct x86_reg src )
944 {
945 DUMP_RR( dst, src );
946 emit_1ub(p, X86_TWOB);
947 emit_op_modrm( p, 0x10, 0x11, dst, src );
948 }
949
950 void sse_movhps( struct x86_function *p,
951 struct x86_reg dst,
952 struct x86_reg src )
953 {
954 DUMP_RR( dst, src );
955 assert(dst.mod != mod_REG || src.mod != mod_REG);
956 emit_1ub(p, X86_TWOB);
957 emit_op_modrm( p, 0x16, 0x17, dst, src ); /* cf movlhps */
958 }
959
960 void sse_movlps( struct x86_function *p,
961 struct x86_reg dst,
962 struct x86_reg src )
963 {
964 DUMP_RR( dst, src );
965 assert(dst.mod != mod_REG || src.mod != mod_REG);
966 emit_1ub(p, X86_TWOB);
967 emit_op_modrm( p, 0x12, 0x13, dst, src ); /* cf movhlps */
968 }
969
970 void sse_maxps( struct x86_function *p,
971 struct x86_reg dst,
972 struct x86_reg src )
973 {
974 DUMP_RR( dst, src );
975 emit_2ub(p, X86_TWOB, 0x5F);
976 emit_modrm( p, dst, src );
977 }
978
979 void sse_maxss( struct x86_function *p,
980 struct x86_reg dst,
981 struct x86_reg src )
982 {
983 DUMP_RR( dst, src );
984 emit_3ub(p, 0xF3, X86_TWOB, 0x5F);
985 emit_modrm( p, dst, src );
986 }
987
988 void sse_divss( struct x86_function *p,
989 struct x86_reg dst,
990 struct x86_reg src )
991 {
992 DUMP_RR( dst, src );
993 emit_3ub(p, 0xF3, X86_TWOB, 0x5E);
994 emit_modrm( p, dst, src );
995 }
996
997 void sse_minps( struct x86_function *p,
998 struct x86_reg dst,
999 struct x86_reg src )
1000 {
1001 DUMP_RR( dst, src );
1002 emit_2ub(p, X86_TWOB, 0x5D);
1003 emit_modrm( p, dst, src );
1004 }
1005
1006 void sse_subps( struct x86_function *p,
1007 struct x86_reg dst,
1008 struct x86_reg src )
1009 {
1010 DUMP_RR( dst, src );
1011 emit_2ub(p, X86_TWOB, 0x5C);
1012 emit_modrm( p, dst, src );
1013 }
1014
1015 void sse_mulps( struct x86_function *p,
1016 struct x86_reg dst,
1017 struct x86_reg src )
1018 {
1019 DUMP_RR( dst, src );
1020 emit_2ub(p, X86_TWOB, 0x59);
1021 emit_modrm( p, dst, src );
1022 }
1023
1024 void sse_mulss( struct x86_function *p,
1025 struct x86_reg dst,
1026 struct x86_reg src )
1027 {
1028 DUMP_RR( dst, src );
1029 emit_3ub(p, 0xF3, X86_TWOB, 0x59);
1030 emit_modrm( p, dst, src );
1031 }
1032
1033 void sse_addps( struct x86_function *p,
1034 struct x86_reg dst,
1035 struct x86_reg src )
1036 {
1037 DUMP_RR( dst, src );
1038 emit_2ub(p, X86_TWOB, 0x58);
1039 emit_modrm( p, dst, src );
1040 }
1041
1042 void sse_addss( struct x86_function *p,
1043 struct x86_reg dst,
1044 struct x86_reg src )
1045 {
1046 DUMP_RR( dst, src );
1047 emit_3ub(p, 0xF3, X86_TWOB, 0x58);
1048 emit_modrm( p, dst, src );
1049 }
1050
1051 void sse_andnps( struct x86_function *p,
1052 struct x86_reg dst,
1053 struct x86_reg src )
1054 {
1055 DUMP_RR( dst, src );
1056 emit_2ub(p, X86_TWOB, 0x55);
1057 emit_modrm( p, dst, src );
1058 }
1059
1060 void sse_andps( struct x86_function *p,
1061 struct x86_reg dst,
1062 struct x86_reg src )
1063 {
1064 DUMP_RR( dst, src );
1065 emit_2ub(p, X86_TWOB, 0x54);
1066 emit_modrm( p, dst, src );
1067 }
1068
1069 void sse_rsqrtps( struct x86_function *p,
1070 struct x86_reg dst,
1071 struct x86_reg src )
1072 {
1073 DUMP_RR( dst, src );
1074 emit_2ub(p, X86_TWOB, 0x52);
1075 emit_modrm( p, dst, src );
1076 }
1077
1078 void sse_rsqrtss( struct x86_function *p,
1079 struct x86_reg dst,
1080 struct x86_reg src )
1081 {
1082 DUMP_RR( dst, src );
1083 emit_3ub(p, 0xF3, X86_TWOB, 0x52);
1084 emit_modrm( p, dst, src );
1085
1086 }
1087
1088 void sse_movhlps( struct x86_function *p,
1089 struct x86_reg dst,
1090 struct x86_reg src )
1091 {
1092 DUMP_RR( dst, src );
1093 assert(dst.mod == mod_REG && src.mod == mod_REG);
1094 emit_2ub(p, X86_TWOB, 0x12);
1095 emit_modrm( p, dst, src );
1096 }
1097
1098 void sse_movlhps( struct x86_function *p,
1099 struct x86_reg dst,
1100 struct x86_reg src )
1101 {
1102 DUMP_RR( dst, src );
1103 assert(dst.mod == mod_REG && src.mod == mod_REG);
1104 emit_2ub(p, X86_TWOB, 0x16);
1105 emit_modrm( p, dst, src );
1106 }
1107
1108 void sse_orps( struct x86_function *p,
1109 struct x86_reg dst,
1110 struct x86_reg src )
1111 {
1112 DUMP_RR( dst, src );
1113 emit_2ub(p, X86_TWOB, 0x56);
1114 emit_modrm( p, dst, src );
1115 }
1116
1117 void sse_xorps( struct x86_function *p,
1118 struct x86_reg dst,
1119 struct x86_reg src )
1120 {
1121 DUMP_RR( dst, src );
1122 emit_2ub(p, X86_TWOB, 0x57);
1123 emit_modrm( p, dst, src );
1124 }
1125
1126 void sse_cvtps2pi( struct x86_function *p,
1127 struct x86_reg dst,
1128 struct x86_reg src )
1129 {
1130 DUMP_RR( dst, src );
1131 assert(dst.file == file_MMX &&
1132 (src.file == file_XMM || src.mod != mod_REG));
1133
1134 p->need_emms = 1;
1135
1136 emit_2ub(p, X86_TWOB, 0x2d);
1137 emit_modrm( p, dst, src );
1138 }
1139
1140 void sse2_cvtdq2ps( struct x86_function *p,
1141 struct x86_reg dst,
1142 struct x86_reg src )
1143 {
1144 DUMP_RR( dst, src );
1145 emit_2ub(p, X86_TWOB, 0x5b);
1146 emit_modrm( p, dst, src );
1147 }
1148
1149
1150 /* Shufps can also be used to implement a reduced swizzle when dest ==
1151 * arg0.
1152 */
1153 void sse_shufps( struct x86_function *p,
1154 struct x86_reg dst,
1155 struct x86_reg src,
1156 unsigned char shuf)
1157 {
1158 DUMP_RRI( dst, src, shuf );
1159 emit_2ub(p, X86_TWOB, 0xC6);
1160 emit_modrm(p, dst, src);
1161 emit_1ub(p, shuf);
1162 }
1163
1164 void sse_unpckhps( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1165 {
1166 DUMP_RR( dst, src );
1167 emit_2ub( p, X86_TWOB, 0x15 );
1168 emit_modrm( p, dst, src );
1169 }
1170
1171 void sse_unpcklps( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1172 {
1173 DUMP_RR( dst, src );
1174 emit_2ub( p, X86_TWOB, 0x14 );
1175 emit_modrm( p, dst, src );
1176 }
1177
1178 void sse_cmpps( struct x86_function *p,
1179 struct x86_reg dst,
1180 struct x86_reg src,
1181 enum sse_cc cc)
1182 {
1183 DUMP_RRI( dst, src, cc );
1184 emit_2ub(p, X86_TWOB, 0xC2);
1185 emit_modrm(p, dst, src);
1186 emit_1ub(p, cc);
1187 }
1188
1189 void sse_pmovmskb( struct x86_function *p,
1190 struct x86_reg dst,
1191 struct x86_reg src)
1192 {
1193 DUMP_RR( dst, src );
1194 emit_3ub(p, 0x66, X86_TWOB, 0xD7);
1195 emit_modrm(p, dst, src);
1196 }
1197
1198 void sse_movmskps( struct x86_function *p,
1199 struct x86_reg dst,
1200 struct x86_reg src)
1201 {
1202 DUMP_RR( dst, src );
1203 emit_2ub(p, X86_TWOB, 0x50);
1204 emit_modrm(p, dst, src);
1205 }
1206
1207 /***********************************************************************
1208 * SSE2 instructions
1209 */
1210
1211 void sse2_movd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1212 {
1213 DUMP_RR(dst, src);
1214 emit_2ub(p, 0x66, 0x0f);
1215 if(dst.mod == mod_REG && dst.file == file_REG32)
1216 {
1217 emit_1ub(p, 0x7e);
1218 emit_modrm(p, src, dst);
1219 }
1220 else
1221 {
1222 emit_op_modrm(p, 0x6e, 0x7e, dst, src);
1223 }
1224 }
1225
1226 void sse2_movq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1227 {
1228 DUMP_RR(dst, src);
1229 switch (dst.mod) {
1230 case mod_REG:
1231 emit_3ub(p, 0xf3, 0x0f, 0x7e);
1232 emit_modrm(p, dst, src);
1233 break;
1234 case mod_INDIRECT:
1235 case mod_DISP32:
1236 case mod_DISP8:
1237 assert(src.mod == mod_REG);
1238 emit_3ub(p, 0x66, 0x0f, 0xd6);
1239 emit_modrm(p, src, dst);
1240 break;
1241 default:
1242 assert(0);
1243 break;
1244 }
1245 }
1246
1247 void sse2_movdqu( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1248 {
1249 DUMP_RR(dst, src);
1250 emit_2ub(p, 0xf3, 0x0f);
1251 emit_op_modrm(p, 0x6f, 0x7f, dst, src);
1252 }
1253
1254 void sse2_movdqa( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1255 {
1256 DUMP_RR(dst, src);
1257 emit_2ub(p, 0x66, 0x0f);
1258 emit_op_modrm(p, 0x6f, 0x7f, dst, src);
1259 }
1260
1261 void sse2_movsd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1262 {
1263 DUMP_RR(dst, src);
1264 emit_2ub(p, 0xf2, 0x0f);
1265 emit_op_modrm(p, 0x10, 0x11, dst, src);
1266 }
1267
1268 void sse2_movupd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1269 {
1270 DUMP_RR(dst, src);
1271 emit_2ub(p, 0x66, 0x0f);
1272 emit_op_modrm(p, 0x10, 0x11, dst, src);
1273 }
1274
1275 void sse2_movapd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1276 {
1277 DUMP_RR(dst, src);
1278 emit_2ub(p, 0x66, 0x0f);
1279 emit_op_modrm(p, 0x28, 0x29, dst, src);
1280 }
1281
1282 /**
1283 * Perform a reduced swizzle:
1284 */
1285 void sse2_pshufd( struct x86_function *p,
1286 struct x86_reg dst,
1287 struct x86_reg src,
1288 unsigned char shuf)
1289 {
1290 DUMP_RRI( dst, src, shuf );
1291 emit_3ub(p, 0x66, X86_TWOB, 0x70);
1292 emit_modrm(p, dst, src);
1293 emit_1ub(p, shuf);
1294 }
1295
1296 void sse2_pshuflw( struct x86_function *p,
1297 struct x86_reg dst,
1298 struct x86_reg src,
1299 unsigned char shuf)
1300 {
1301 DUMP_RRI( dst, src, shuf );
1302 emit_3ub(p, 0xf2, X86_TWOB, 0x70);
1303 emit_modrm(p, dst, src);
1304 emit_1ub(p, shuf);
1305 }
1306
1307 void sse2_pshufhw( struct x86_function *p,
1308 struct x86_reg dst,
1309 struct x86_reg src,
1310 unsigned char shuf)
1311 {
1312 DUMP_RRI( dst, src, shuf );
1313 emit_3ub(p, 0xf3, X86_TWOB, 0x70);
1314 emit_modrm(p, dst, src);
1315 emit_1ub(p, shuf);
1316 }
1317
1318 void sse2_cvttps2dq( struct x86_function *p,
1319 struct x86_reg dst,
1320 struct x86_reg src )
1321 {
1322 DUMP_RR( dst, src );
1323 emit_3ub( p, 0xF3, X86_TWOB, 0x5B );
1324 emit_modrm( p, dst, src );
1325 }
1326
1327 void sse2_cvtps2dq( struct x86_function *p,
1328 struct x86_reg dst,
1329 struct x86_reg src )
1330 {
1331 DUMP_RR( dst, src );
1332 emit_3ub(p, 0x66, X86_TWOB, 0x5B);
1333 emit_modrm( p, dst, src );
1334 }
1335
1336 void sse2_cvtsd2ss( struct x86_function *p,
1337 struct x86_reg dst,
1338 struct x86_reg src )
1339 {
1340 DUMP_RR( dst, src );
1341 emit_3ub(p, 0xf2, 0x0f, 0x5a);
1342 emit_modrm( p, dst, src );
1343 }
1344
1345 void sse2_cvtpd2ps( struct x86_function *p,
1346 struct x86_reg dst,
1347 struct x86_reg src )
1348 {
1349 DUMP_RR( dst, src );
1350 emit_3ub(p, 0x66, 0x0f, 0x5a);
1351 emit_modrm( p, dst, src );
1352 }
1353
1354 void sse2_packssdw( struct x86_function *p,
1355 struct x86_reg dst,
1356 struct x86_reg src )
1357 {
1358 DUMP_RR( dst, src );
1359 emit_3ub(p, 0x66, X86_TWOB, 0x6B);
1360 emit_modrm( p, dst, src );
1361 }
1362
1363 void sse2_packsswb( struct x86_function *p,
1364 struct x86_reg dst,
1365 struct x86_reg src )
1366 {
1367 DUMP_RR( dst, src );
1368 emit_3ub(p, 0x66, X86_TWOB, 0x63);
1369 emit_modrm( p, dst, src );
1370 }
1371
1372 void sse2_packuswb( struct x86_function *p,
1373 struct x86_reg dst,
1374 struct x86_reg src )
1375 {
1376 DUMP_RR( dst, src );
1377 emit_3ub(p, 0x66, X86_TWOB, 0x67);
1378 emit_modrm( p, dst, src );
1379 }
1380
1381 void sse2_punpcklbw( struct x86_function *p,
1382 struct x86_reg dst,
1383 struct x86_reg src )
1384 {
1385 DUMP_RR( dst, src );
1386 emit_3ub(p, 0x66, X86_TWOB, 0x60);
1387 emit_modrm( p, dst, src );
1388 }
1389
1390 void sse2_punpcklwd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1391 {
1392 DUMP_RR( dst, src );
1393 emit_3ub(p, 0x66, 0x0f, 0x61);
1394 emit_modrm( p, dst, src );
1395 }
1396
1397 void sse2_punpckldq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1398 {
1399 DUMP_RR( dst, src );
1400 emit_3ub(p, 0x66, 0x0f, 0x62);
1401 emit_modrm( p, dst, src );
1402 }
1403
1404 void sse2_punpcklqdq( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1405 {
1406 DUMP_RR( dst, src );
1407 emit_3ub(p, 0x66, 0x0f, 0x6c);
1408 emit_modrm( p, dst, src );
1409 }
1410
1411 void sse2_psllw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1412 {
1413 DUMP_RI(dst, imm);
1414 emit_3ub(p, 0x66, 0x0f, 0x71);
1415 emit_modrm_noreg(p, 6, dst);
1416 emit_1ub(p, imm);
1417 }
1418
1419 void sse2_pslld_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1420 {
1421 DUMP_RI(dst, imm);
1422 emit_3ub(p, 0x66, 0x0f, 0x72);
1423 emit_modrm_noreg(p, 6, dst);
1424 emit_1ub(p, imm);
1425 }
1426
1427 void sse2_psllq_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1428 {
1429 DUMP_RI(dst, imm);
1430 emit_3ub(p, 0x66, 0x0f, 0x73);
1431 emit_modrm_noreg(p, 6, dst);
1432 emit_1ub(p, imm);
1433 }
1434
1435 void sse2_psrlw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1436 {
1437 DUMP_RI(dst, imm);
1438 emit_3ub(p, 0x66, 0x0f, 0x71);
1439 emit_modrm_noreg(p, 2, dst);
1440 emit_1ub(p, imm);
1441 }
1442
1443 void sse2_psrld_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1444 {
1445 DUMP_RI(dst, imm);
1446 emit_3ub(p, 0x66, 0x0f, 0x72);
1447 emit_modrm_noreg(p, 2, dst);
1448 emit_1ub(p, imm);
1449 }
1450
1451 void sse2_psrlq_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1452 {
1453 DUMP_RI(dst, imm);
1454 emit_3ub(p, 0x66, 0x0f, 0x73);
1455 emit_modrm_noreg(p, 2, dst);
1456 emit_1ub(p, imm);
1457 }
1458
1459 void sse2_psraw_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1460 {
1461 DUMP_RI(dst, imm);
1462 emit_3ub(p, 0x66, 0x0f, 0x71);
1463 emit_modrm_noreg(p, 4, dst);
1464 emit_1ub(p, imm);
1465 }
1466
1467 void sse2_psrad_imm( struct x86_function *p, struct x86_reg dst, unsigned imm )
1468 {
1469 DUMP_RI(dst, imm);
1470 emit_3ub(p, 0x66, 0x0f, 0x72);
1471 emit_modrm_noreg(p, 4, dst);
1472 emit_1ub(p, imm);
1473 }
1474
1475 void sse2_por( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1476 {
1477 DUMP_RR(dst, src);
1478 emit_3ub(p, 0x66, 0x0f, 0xeb);
1479 emit_modrm(p, dst, src);
1480 }
1481
1482 void sse2_rcpps( struct x86_function *p,
1483 struct x86_reg dst,
1484 struct x86_reg src )
1485 {
1486 DUMP_RR( dst, src );
1487 emit_2ub(p, X86_TWOB, 0x53);
1488 emit_modrm( p, dst, src );
1489 }
1490
1491 void sse2_rcpss( struct x86_function *p,
1492 struct x86_reg dst,
1493 struct x86_reg src )
1494 {
1495 DUMP_RR( dst, src );
1496 emit_3ub(p, 0xF3, X86_TWOB, 0x53);
1497 emit_modrm( p, dst, src );
1498 }
1499
1500 /***********************************************************************
1501 * x87 instructions
1502 */
1503 static void note_x87_pop( struct x86_function *p )
1504 {
1505 p->x87_stack--;
1506 assert(p->x87_stack >= 0);
1507 }
1508
1509 static void note_x87_push( struct x86_function *p )
1510 {
1511 p->x87_stack++;
1512 assert(p->x87_stack <= 7);
1513 }
1514
1515 void x87_assert_stack_empty( struct x86_function *p )
1516 {
1517 assert (p->x87_stack == 0);
1518 }
1519
1520
1521 void x87_fist( struct x86_function *p, struct x86_reg dst )
1522 {
1523 DUMP_R( dst );
1524 emit_1ub(p, 0xdb);
1525 emit_modrm_noreg(p, 2, dst);
1526 }
1527
1528 void x87_fistp( struct x86_function *p, struct x86_reg dst )
1529 {
1530 DUMP_R( dst );
1531 emit_1ub(p, 0xdb);
1532 emit_modrm_noreg(p, 3, dst);
1533 note_x87_pop(p);
1534 }
1535
1536 void x87_fild( struct x86_function *p, struct x86_reg arg )
1537 {
1538 DUMP_R( arg );
1539 emit_1ub(p, 0xdf);
1540 emit_modrm_noreg(p, 0, arg);
1541 note_x87_push(p);
1542 }
1543
1544 void x87_fldz( struct x86_function *p )
1545 {
1546 DUMP();
1547 emit_2ub(p, 0xd9, 0xee);
1548 note_x87_push(p);
1549 }
1550
1551
1552 void x87_fldcw( struct x86_function *p, struct x86_reg arg )
1553 {
1554 DUMP_R( arg );
1555 assert(arg.file == file_REG32);
1556 assert(arg.mod != mod_REG);
1557 emit_1ub(p, 0xd9);
1558 emit_modrm_noreg(p, 5, arg);
1559 }
1560
1561 void x87_fld1( struct x86_function *p )
1562 {
1563 DUMP();
1564 emit_2ub(p, 0xd9, 0xe8);
1565 note_x87_push(p);
1566 }
1567
1568 void x87_fldl2e( struct x86_function *p )
1569 {
1570 DUMP();
1571 emit_2ub(p, 0xd9, 0xea);
1572 note_x87_push(p);
1573 }
1574
1575 void x87_fldln2( struct x86_function *p )
1576 {
1577 DUMP();
1578 emit_2ub(p, 0xd9, 0xed);
1579 note_x87_push(p);
1580 }
1581
1582 void x87_fwait( struct x86_function *p )
1583 {
1584 DUMP();
1585 emit_1ub(p, 0x9b);
1586 }
1587
1588 void x87_fnclex( struct x86_function *p )
1589 {
1590 DUMP();
1591 emit_2ub(p, 0xdb, 0xe2);
1592 }
1593
1594 void x87_fclex( struct x86_function *p )
1595 {
1596 x87_fwait(p);
1597 x87_fnclex(p);
1598 }
1599
1600 void x87_fcmovb( struct x86_function *p, struct x86_reg arg )
1601 {
1602 DUMP_R( arg );
1603 assert(arg.file == file_x87);
1604 emit_2ub(p, 0xda, 0xc0+arg.idx);
1605 }
1606
1607 void x87_fcmove( struct x86_function *p, struct x86_reg arg )
1608 {
1609 DUMP_R( arg );
1610 assert(arg.file == file_x87);
1611 emit_2ub(p, 0xda, 0xc8+arg.idx);
1612 }
1613
1614 void x87_fcmovbe( struct x86_function *p, struct x86_reg arg )
1615 {
1616 DUMP_R( arg );
1617 assert(arg.file == file_x87);
1618 emit_2ub(p, 0xda, 0xd0+arg.idx);
1619 }
1620
1621 void x87_fcmovnb( struct x86_function *p, struct x86_reg arg )
1622 {
1623 DUMP_R( arg );
1624 assert(arg.file == file_x87);
1625 emit_2ub(p, 0xdb, 0xc0+arg.idx);
1626 }
1627
1628 void x87_fcmovne( struct x86_function *p, struct x86_reg arg )
1629 {
1630 DUMP_R( arg );
1631 assert(arg.file == file_x87);
1632 emit_2ub(p, 0xdb, 0xc8+arg.idx);
1633 }
1634
1635 void x87_fcmovnbe( struct x86_function *p, struct x86_reg arg )
1636 {
1637 DUMP_R( arg );
1638 assert(arg.file == file_x87);
1639 emit_2ub(p, 0xdb, 0xd0+arg.idx);
1640 }
1641
1642
1643
1644 static void x87_arith_op( struct x86_function *p, struct x86_reg dst, struct x86_reg arg,
1645 unsigned char dst0ub0,
1646 unsigned char dst0ub1,
1647 unsigned char arg0ub0,
1648 unsigned char arg0ub1,
1649 unsigned char argmem_noreg)
1650 {
1651 assert(dst.file == file_x87);
1652
1653 if (arg.file == file_x87) {
1654 if (dst.idx == 0)
1655 emit_2ub(p, dst0ub0, dst0ub1+arg.idx);
1656 else if (arg.idx == 0)
1657 emit_2ub(p, arg0ub0, arg0ub1+arg.idx);
1658 else
1659 assert(0);
1660 }
1661 else if (dst.idx == 0) {
1662 assert(arg.file == file_REG32);
1663 emit_1ub(p, 0xd8);
1664 emit_modrm_noreg(p, argmem_noreg, arg);
1665 }
1666 else
1667 assert(0);
1668 }
1669
1670 void x87_fmul( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1671 {
1672 DUMP_RR( dst, src );
1673 x87_arith_op(p, dst, src,
1674 0xd8, 0xc8,
1675 0xdc, 0xc8,
1676 4);
1677 }
1678
1679 void x87_fsub( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1680 {
1681 DUMP_RR( dst, src );
1682 x87_arith_op(p, dst, src,
1683 0xd8, 0xe0,
1684 0xdc, 0xe8,
1685 4);
1686 }
1687
1688 void x87_fsubr( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1689 {
1690 DUMP_RR( dst, src );
1691 x87_arith_op(p, dst, src,
1692 0xd8, 0xe8,
1693 0xdc, 0xe0,
1694 5);
1695 }
1696
1697 void x87_fadd( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1698 {
1699 DUMP_RR( dst, src );
1700 x87_arith_op(p, dst, src,
1701 0xd8, 0xc0,
1702 0xdc, 0xc0,
1703 0);
1704 }
1705
1706 void x87_fdiv( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1707 {
1708 DUMP_RR( dst, src );
1709 x87_arith_op(p, dst, src,
1710 0xd8, 0xf0,
1711 0xdc, 0xf8,
1712 6);
1713 }
1714
1715 void x87_fdivr( struct x86_function *p, struct x86_reg dst, struct x86_reg src )
1716 {
1717 DUMP_RR( dst, src );
1718 x87_arith_op(p, dst, src,
1719 0xd8, 0xf8,
1720 0xdc, 0xf0,
1721 7);
1722 }
1723
1724 void x87_fmulp( struct x86_function *p, struct x86_reg dst )
1725 {
1726 DUMP_R( dst );
1727 assert(dst.file == file_x87);
1728 assert(dst.idx >= 1);
1729 emit_2ub(p, 0xde, 0xc8+dst.idx);
1730 note_x87_pop(p);
1731 }
1732
1733 void x87_fsubp( struct x86_function *p, struct x86_reg dst )
1734 {
1735 DUMP_R( dst );
1736 assert(dst.file == file_x87);
1737 assert(dst.idx >= 1);
1738 emit_2ub(p, 0xde, 0xe8+dst.idx);
1739 note_x87_pop(p);
1740 }
1741
1742 void x87_fsubrp( struct x86_function *p, struct x86_reg dst )
1743 {
1744 DUMP_R( dst );
1745 assert(dst.file == file_x87);
1746 assert(dst.idx >= 1);
1747 emit_2ub(p, 0xde, 0xe0+dst.idx);
1748 note_x87_pop(p);
1749 }
1750
1751 void x87_faddp( struct x86_function *p, struct x86_reg dst )
1752 {
1753 DUMP_R( dst );
1754 assert(dst.file == file_x87);
1755 assert(dst.idx >= 1);
1756 emit_2ub(p, 0xde, 0xc0+dst.idx);
1757 note_x87_pop(p);
1758 }
1759
1760 void x87_fdivp( struct x86_function *p, struct x86_reg dst )
1761 {
1762 DUMP_R( dst );
1763 assert(dst.file == file_x87);
1764 assert(dst.idx >= 1);
1765 emit_2ub(p, 0xde, 0xf8+dst.idx);
1766 note_x87_pop(p);
1767 }
1768
1769 void x87_fdivrp( struct x86_function *p, struct x86_reg dst )
1770 {
1771 DUMP_R( dst );
1772 assert(dst.file == file_x87);
1773 assert(dst.idx >= 1);
1774 emit_2ub(p, 0xde, 0xf0+dst.idx);
1775 note_x87_pop(p);
1776 }
1777
1778 void x87_ftst( struct x86_function *p )
1779 {
1780 DUMP();
1781 emit_2ub(p, 0xd9, 0xe4);
1782 }
1783
1784 void x87_fucom( struct x86_function *p, struct x86_reg arg )
1785 {
1786 DUMP_R( arg );
1787 assert(arg.file == file_x87);
1788 emit_2ub(p, 0xdd, 0xe0+arg.idx);
1789 }
1790
1791 void x87_fucomp( struct x86_function *p, struct x86_reg arg )
1792 {
1793 DUMP_R( arg );
1794 assert(arg.file == file_x87);
1795 emit_2ub(p, 0xdd, 0xe8+arg.idx);
1796 note_x87_pop(p);
1797 }
1798
1799 void x87_fucompp( struct x86_function *p )
1800 {
1801 DUMP();
1802 emit_2ub(p, 0xda, 0xe9);
1803 note_x87_pop(p); /* pop twice */
1804 note_x87_pop(p); /* pop twice */
1805 }
1806
1807 void x87_fxch( struct x86_function *p, struct x86_reg arg )
1808 {
1809 DUMP_R( arg );
1810 assert(arg.file == file_x87);
1811 emit_2ub(p, 0xd9, 0xc8+arg.idx);
1812 }
1813
1814 void x87_fabs( struct x86_function *p )
1815 {
1816 DUMP();
1817 emit_2ub(p, 0xd9, 0xe1);
1818 }
1819
1820 void x87_fchs( struct x86_function *p )
1821 {
1822 DUMP();
1823 emit_2ub(p, 0xd9, 0xe0);
1824 }
1825
1826 void x87_fcos( struct x86_function *p )
1827 {
1828 DUMP();
1829 emit_2ub(p, 0xd9, 0xff);
1830 }
1831
1832
1833 void x87_fprndint( struct x86_function *p )
1834 {
1835 DUMP();
1836 emit_2ub(p, 0xd9, 0xfc);
1837 }
1838
1839 void x87_fscale( struct x86_function *p )
1840 {
1841 DUMP();
1842 emit_2ub(p, 0xd9, 0xfd);
1843 }
1844
1845 void x87_fsin( struct x86_function *p )
1846 {
1847 DUMP();
1848 emit_2ub(p, 0xd9, 0xfe);
1849 }
1850
1851 void x87_fsincos( struct x86_function *p )
1852 {
1853 DUMP();
1854 emit_2ub(p, 0xd9, 0xfb);
1855 }
1856
1857 void x87_fsqrt( struct x86_function *p )
1858 {
1859 DUMP();
1860 emit_2ub(p, 0xd9, 0xfa);
1861 }
1862
1863 void x87_fxtract( struct x86_function *p )
1864 {
1865 DUMP();
1866 emit_2ub(p, 0xd9, 0xf4);
1867 }
1868
1869 /* st0 = (2^st0)-1
1870 *
1871 * Restrictions: -1.0 <= st0 <= 1.0
1872 */
1873 void x87_f2xm1( struct x86_function *p )
1874 {
1875 DUMP();
1876 emit_2ub(p, 0xd9, 0xf0);
1877 }
1878
1879 /* st1 = st1 * log2(st0);
1880 * pop_stack;
1881 */
1882 void x87_fyl2x( struct x86_function *p )
1883 {
1884 DUMP();
1885 emit_2ub(p, 0xd9, 0xf1);
1886 note_x87_pop(p);
1887 }
1888
1889 /* st1 = st1 * log2(st0 + 1.0);
1890 * pop_stack;
1891 *
1892 * A fast operation, with restrictions: -.29 < st0 < .29
1893 */
1894 void x87_fyl2xp1( struct x86_function *p )
1895 {
1896 DUMP();
1897 emit_2ub(p, 0xd9, 0xf9);
1898 note_x87_pop(p);
1899 }
1900
1901
1902 void x87_fld( struct x86_function *p, struct x86_reg arg )
1903 {
1904 DUMP_R( arg );
1905 if (arg.file == file_x87)
1906 emit_2ub(p, 0xd9, 0xc0 + arg.idx);
1907 else {
1908 emit_1ub(p, 0xd9);
1909 emit_modrm_noreg(p, 0, arg);
1910 }
1911 note_x87_push(p);
1912 }
1913
1914 void x87_fst( struct x86_function *p, struct x86_reg dst )
1915 {
1916 DUMP_R( dst );
1917 if (dst.file == file_x87)
1918 emit_2ub(p, 0xdd, 0xd0 + dst.idx);
1919 else {
1920 emit_1ub(p, 0xd9);
1921 emit_modrm_noreg(p, 2, dst);
1922 }
1923 }
1924
1925 void x87_fstp( struct x86_function *p, struct x86_reg dst )
1926 {
1927 DUMP_R( dst );
1928 if (dst.file == file_x87)
1929 emit_2ub(p, 0xdd, 0xd8 + dst.idx);
1930 else {
1931 emit_1ub(p, 0xd9);
1932 emit_modrm_noreg(p, 3, dst);
1933 }
1934 note_x87_pop(p);
1935 }
1936
1937 void x87_fpop( struct x86_function *p )
1938 {
1939 x87_fstp( p, x86_make_reg( file_x87, 0 ));
1940 }
1941
1942
1943 void x87_fcom( struct x86_function *p, struct x86_reg dst )
1944 {
1945 DUMP_R( dst );
1946 if (dst.file == file_x87)
1947 emit_2ub(p, 0xd8, 0xd0 + dst.idx);
1948 else {
1949 emit_1ub(p, 0xd8);
1950 emit_modrm_noreg(p, 2, dst);
1951 }
1952 }
1953
1954
1955 void x87_fcomp( struct x86_function *p, struct x86_reg dst )
1956 {
1957 DUMP_R( dst );
1958 if (dst.file == file_x87)
1959 emit_2ub(p, 0xd8, 0xd8 + dst.idx);
1960 else {
1961 emit_1ub(p, 0xd8);
1962 emit_modrm_noreg(p, 3, dst);
1963 }
1964 note_x87_pop(p);
1965 }
1966
1967 void x87_fcomi( struct x86_function *p, struct x86_reg arg )
1968 {
1969 DUMP_R( arg );
1970 emit_2ub(p, 0xdb, 0xf0+arg.idx);
1971 }
1972
1973 void x87_fcomip( struct x86_function *p, struct x86_reg arg )
1974 {
1975 DUMP_R( arg );
1976 emit_2ub(p, 0xdb, 0xf0+arg.idx);
1977 note_x87_pop(p);
1978 }
1979
1980
1981 void x87_fnstsw( struct x86_function *p, struct x86_reg dst )
1982 {
1983 DUMP_R( dst );
1984 assert(dst.file == file_REG32);
1985
1986 if (dst.idx == reg_AX &&
1987 dst.mod == mod_REG)
1988 emit_2ub(p, 0xdf, 0xe0);
1989 else {
1990 emit_1ub(p, 0xdd);
1991 emit_modrm_noreg(p, 7, dst);
1992 }
1993 }
1994
1995
1996 void x87_fnstcw( struct x86_function *p, struct x86_reg dst )
1997 {
1998 DUMP_R( dst );
1999 assert(dst.file == file_REG32);
2000
2001 emit_1ub(p, 0x9b); /* WAIT -- needed? */
2002 emit_1ub(p, 0xd9);
2003 emit_modrm_noreg(p, 7, dst);
2004 }
2005
2006
2007
2008
2009 /***********************************************************************
2010 * MMX instructions
2011 */
2012
2013 void mmx_emms( struct x86_function *p )
2014 {
2015 DUMP();
2016 assert(p->need_emms);
2017 emit_2ub(p, 0x0f, 0x77);
2018 p->need_emms = 0;
2019 }
2020
2021 void mmx_packssdw( struct x86_function *p,
2022 struct x86_reg dst,
2023 struct x86_reg src )
2024 {
2025 DUMP_RR( dst, src );
2026 assert(dst.file == file_MMX &&
2027 (src.file == file_MMX || src.mod != mod_REG));
2028
2029 p->need_emms = 1;
2030
2031 emit_2ub(p, X86_TWOB, 0x6b);
2032 emit_modrm( p, dst, src );
2033 }
2034
2035 void mmx_packuswb( struct x86_function *p,
2036 struct x86_reg dst,
2037 struct x86_reg src )
2038 {
2039 DUMP_RR( dst, src );
2040 assert(dst.file == file_MMX &&
2041 (src.file == file_MMX || src.mod != mod_REG));
2042
2043 p->need_emms = 1;
2044
2045 emit_2ub(p, X86_TWOB, 0x67);
2046 emit_modrm( p, dst, src );
2047 }
2048
2049 void mmx_movd( struct x86_function *p,
2050 struct x86_reg dst,
2051 struct x86_reg src )
2052 {
2053 DUMP_RR( dst, src );
2054 p->need_emms = 1;
2055 emit_1ub(p, X86_TWOB);
2056 emit_op_modrm( p, 0x6e, 0x7e, dst, src );
2057 }
2058
2059 void mmx_movq( struct x86_function *p,
2060 struct x86_reg dst,
2061 struct x86_reg src )
2062 {
2063 DUMP_RR( dst, src );
2064 p->need_emms = 1;
2065 emit_1ub(p, X86_TWOB);
2066 emit_op_modrm( p, 0x6f, 0x7f, dst, src );
2067 }
2068
2069
2070 /***********************************************************************
2071 * Helper functions
2072 */
2073
2074
2075 void x86_cdecl_caller_push_regs( struct x86_function *p )
2076 {
2077 x86_push(p, x86_make_reg(file_REG32, reg_AX));
2078 x86_push(p, x86_make_reg(file_REG32, reg_CX));
2079 x86_push(p, x86_make_reg(file_REG32, reg_DX));
2080 }
2081
2082 void x86_cdecl_caller_pop_regs( struct x86_function *p )
2083 {
2084 x86_pop(p, x86_make_reg(file_REG32, reg_DX));
2085 x86_pop(p, x86_make_reg(file_REG32, reg_CX));
2086 x86_pop(p, x86_make_reg(file_REG32, reg_AX));
2087 }
2088
2089
2090 struct x86_reg x86_fn_arg( struct x86_function *p,
2091 unsigned arg )
2092 {
2093 switch(x86_target(p))
2094 {
2095 case X86_64_WIN64_ABI:
2096 /* Microsoft uses a different calling convention than the rest of the world */
2097 switch(arg)
2098 {
2099 case 1:
2100 return x86_make_reg(file_REG32, reg_CX);
2101 case 2:
2102 return x86_make_reg(file_REG32, reg_DX);
2103 case 3:
2104 return x86_make_reg(file_REG32, reg_R8);
2105 case 4:
2106 return x86_make_reg(file_REG32, reg_R9);
2107 default:
2108 /* Win64 allocates stack slots as if it pushed the first 4 arguments too */
2109 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2110 p->stack_offset + arg * 8);
2111 }
2112 case X86_64_STD_ABI:
2113 switch(arg)
2114 {
2115 case 1:
2116 return x86_make_reg(file_REG32, reg_DI);
2117 case 2:
2118 return x86_make_reg(file_REG32, reg_SI);
2119 case 3:
2120 return x86_make_reg(file_REG32, reg_DX);
2121 case 4:
2122 return x86_make_reg(file_REG32, reg_CX);
2123 case 5:
2124 return x86_make_reg(file_REG32, reg_R8);
2125 case 6:
2126 return x86_make_reg(file_REG32, reg_R9);
2127 default:
2128 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2129 p->stack_offset + (arg - 6) * 8); /* ??? */
2130 }
2131 case X86_32:
2132 return x86_make_disp(x86_make_reg(file_REG32, reg_SP),
2133 p->stack_offset + arg * 4); /* ??? */
2134 default:
2135 abort();
2136 }
2137 }
2138
2139 static void x86_init_func_common( struct x86_function *p )
2140 {
2141 util_cpu_detect();
2142 p->caps = 0;
2143 if(util_cpu_caps.has_mmx)
2144 p->caps |= X86_MMX;
2145 if(util_cpu_caps.has_mmx2)
2146 p->caps |= X86_MMX2;
2147 if(util_cpu_caps.has_sse)
2148 p->caps |= X86_SSE;
2149 if(util_cpu_caps.has_sse2)
2150 p->caps |= X86_SSE2;
2151 if(util_cpu_caps.has_sse3)
2152 p->caps |= X86_SSE3;
2153 if(util_cpu_caps.has_sse4_1)
2154 p->caps |= X86_SSE4_1;
2155 p->csr = p->store;
2156 DUMP_START();
2157 }
2158
2159 void x86_init_func( struct x86_function *p )
2160 {
2161 p->size = 0;
2162 p->store = NULL;
2163 x86_init_func_common(p);
2164 }
2165
2166 void x86_init_func_size( struct x86_function *p, unsigned code_size )
2167 {
2168 p->size = code_size;
2169 p->store = rtasm_exec_malloc(code_size);
2170 if (p->store == NULL) {
2171 p->store = p->error_overflow;
2172 }
2173 x86_init_func_common(p);
2174 }
2175
2176 void x86_release_func( struct x86_function *p )
2177 {
2178 if (p->store && p->store != p->error_overflow)
2179 rtasm_exec_free(p->store);
2180
2181 p->store = NULL;
2182 p->csr = NULL;
2183 p->size = 0;
2184 }
2185
2186
2187 static INLINE x86_func
2188 voidptr_to_x86_func(void *v)
2189 {
2190 union {
2191 void *v;
2192 x86_func f;
2193 } u;
2194 assert(sizeof(u.v) == sizeof(u.f));
2195 u.v = v;
2196 return u.f;
2197 }
2198
2199
2200 x86_func x86_get_func( struct x86_function *p )
2201 {
2202 DUMP_END();
2203 if (DISASSEM && p->store)
2204 debug_printf("disassemble %p %p\n", p->store, p->csr);
2205
2206 if (p->store == p->error_overflow)
2207 return voidptr_to_x86_func(NULL);
2208 else
2209 return voidptr_to_x86_func(p->store);
2210 }
2211
2212 #else
2213
2214 void x86sse_dummy( void );
2215
2216 void x86sse_dummy( void )
2217 {
2218 }
2219
2220 #endif