1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_info.h"
45 #include "tgsi/tgsi_parse.h"
46 #include "tgsi/tgsi_util.h"
47 #include "tgsi/tgsi_exec.h"
48 #include "lp_bld_type.h"
49 #include "lp_bld_const.h"
50 #include "lp_bld_arit.h"
51 #include "lp_bld_logic.h"
52 #include "lp_bld_swizzle.h"
53 #include "lp_bld_flow.h"
54 #include "lp_bld_tgsi.h"
55 #include "lp_bld_debug.h"
58 #define LP_MAX_TEMPS 256
59 #define LP_MAX_IMMEDIATES 256
62 #define FOR_EACH_CHANNEL( CHAN )\
63 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
65 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
66 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
68 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
69 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
71 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
72 FOR_EACH_CHANNEL( CHAN )\
73 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
80 #define QUAD_TOP_LEFT 0
81 #define QUAD_TOP_RIGHT 1
82 #define QUAD_BOTTOM_LEFT 2
83 #define QUAD_BOTTOM_RIGHT 3
85 #define LP_TGSI_MAX_NESTING 16
88 struct lp_build_context
*bld
;
92 LLVMTypeRef int_vec_type
;
94 LLVMValueRef cond_stack
[LP_TGSI_MAX_NESTING
];
96 LLVMValueRef cond_mask
;
98 LLVMValueRef break_stack
[LP_TGSI_MAX_NESTING
];
100 LLVMValueRef break_mask
;
102 LLVMValueRef cont_stack
[LP_TGSI_MAX_NESTING
];
104 LLVMValueRef cont_mask
;
106 LLVMBasicBlockRef loop_stack
[LP_TGSI_MAX_NESTING
];
108 LLVMBasicBlockRef loop_block
;
111 LLVMValueRef exec_mask
;
114 struct lp_build_tgsi_soa_context
116 struct lp_build_context base
;
118 LLVMValueRef consts_ptr
;
119 const LLVMValueRef
*pos
;
120 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
121 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
123 struct lp_build_sampler_soa
*sampler
;
125 LLVMValueRef immediates
[LP_MAX_IMMEDIATES
][NUM_CHANNELS
];
126 LLVMValueRef temps
[LP_MAX_TEMPS
][NUM_CHANNELS
];
128 struct lp_build_mask_context
*mask
;
129 struct lp_exec_mask exec_mask
;
132 static const unsigned char
134 QUAD_TOP_LEFT
, QUAD_TOP_LEFT
,
135 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_LEFT
138 static const unsigned char
140 QUAD_TOP_RIGHT
, QUAD_TOP_RIGHT
,
141 QUAD_BOTTOM_RIGHT
, QUAD_BOTTOM_RIGHT
144 static const unsigned char
146 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
,
147 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
150 static const unsigned char
151 swizzle_bottom
[4] = {
152 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
,
153 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
156 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
159 mask
->has_mask
= FALSE
;
160 mask
->cond_stack_size
= 0;
161 mask
->loop_stack_size
= 0;
162 mask
->break_stack_size
= 0;
163 mask
->cont_stack_size
= 0;
165 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
168 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
170 if (mask
->loop_stack_size
) {
171 /*for loops we need to update the entire mask at
174 tmp
= LLVMBuildAnd(mask
->bld
->builder
,
178 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
183 mask
->exec_mask
= mask
->cond_mask
;
186 mask
->has_mask
= (mask
->cond_stack_size
> 0 ||
187 mask
->loop_stack_size
> 0);
190 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
193 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
194 mask
->cond_mask
= LLVMBuildBitCast(mask
->bld
->builder
, val
,
195 mask
->int_vec_type
, "");
197 lp_exec_mask_update(mask
);
200 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
202 LLVMValueRef prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
203 LLVMValueRef inv_mask
= LLVMBuildNot(mask
->bld
->builder
,
204 mask
->cond_mask
, "");
206 /* means that we didn't have any mask before and that
207 * we were fully enabled */
208 if (mask
->cond_stack_size
<= 1) {
209 prev_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
212 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
215 lp_exec_mask_update(mask
);
218 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
220 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
221 lp_exec_mask_update(mask
);
224 static void lp_exec_bgnloop(struct lp_exec_mask
*mask
)
227 if (mask
->cont_stack_size
== 0)
228 mask
->cont_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
229 if (mask
->cont_stack_size
== 0)
230 mask
->break_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
231 if (mask
->cond_stack_size
== 0)
232 mask
->cond_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
233 mask
->loop_stack
[mask
->loop_stack_size
++] = mask
->loop_block
;
234 mask
->loop_block
= lp_build_insert_new_block(mask
->bld
->builder
, "bgnloop");
235 LLVMBuildBr(mask
->bld
->builder
, mask
->loop_block
);
236 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, mask
->loop_block
);
238 lp_exec_mask_update(mask
);
241 static void lp_exec_break(struct lp_exec_mask
*mask
)
243 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
247 mask
->break_stack
[mask
->break_stack_size
++] = mask
->break_mask
;
248 if (mask
->break_stack_size
> 1) {
249 mask
->break_mask
= LLVMBuildAnd(mask
->bld
->builder
,
251 exec_mask
, "break_full");
253 mask
->break_mask
= exec_mask
;
255 lp_exec_mask_update(mask
);
258 static void lp_exec_continue(struct lp_exec_mask
*mask
)
260 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
264 mask
->cont_stack
[mask
->cont_stack_size
++] = mask
->cont_mask
;
265 if (mask
->cont_stack_size
> 1) {
266 mask
->cont_mask
= LLVMBuildAnd(mask
->bld
->builder
,
270 mask
->cont_mask
= exec_mask
;
272 lp_exec_mask_update(mask
);
276 static void lp_exec_endloop(struct lp_exec_mask
*mask
)
278 LLVMBasicBlockRef endloop
;
281 { /* convert our soa vector into i1 */
283 LLVMValueRef packed
= 0;
284 for (i
= 0; i
< mask
->bld
->type
.length
; ++i
) {
285 LLVMValueRef component
= LLVMBuildExtractElement(
288 LLVMConstInt(LLVMInt32Type(), i
, 0), "");
290 packed
= LLVMBuildOr(mask
->bld
->builder
,
291 packed
, component
, "");
295 i1cond
= LLVMBuildICmp(mask
->bld
->builder
, LLVMIntNE
,
297 LLVMConstNull(LLVMTypeOf(packed
)),
301 endloop
= lp_build_insert_new_block(mask
->bld
->builder
, "endloop");
303 LLVMBuildCondBr(mask
->bld
->builder
,
304 i1cond
, endloop
, mask
->loop_block
);
306 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, endloop
);
308 mask
->loop_block
= mask
->loop_stack
[--mask
->loop_stack_size
];
309 /* pop the break mask */
310 if (mask
->cont_stack_size
) {
311 mask
->cont_mask
= mask
->cont_stack
[--mask
->cont_stack_size
];
313 if (mask
->break_stack_size
) {
314 mask
->break_mask
= mask
->cont_stack
[--mask
->break_stack_size
];
317 lp_exec_mask_update(mask
);
320 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
324 if (mask
->has_mask
) {
325 LLVMValueRef real_val
, dst_val
;
327 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
328 real_val
= lp_build_select(mask
->bld
,
332 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
334 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
339 emit_ddx(struct lp_build_tgsi_soa_context
*bld
,
342 LLVMValueRef src_left
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_left
);
343 LLVMValueRef src_right
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_right
);
344 return lp_build_sub(&bld
->base
, src_right
, src_left
);
349 emit_ddy(struct lp_build_tgsi_soa_context
*bld
,
352 LLVMValueRef src_top
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_top
);
353 LLVMValueRef src_bottom
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_bottom
);
354 return lp_build_sub(&bld
->base
, src_top
, src_bottom
);
363 struct lp_build_tgsi_soa_context
*bld
,
364 const struct tgsi_full_instruction
*inst
,
366 const unsigned chan_index
)
368 const struct tgsi_full_src_register
*reg
= &inst
->Src
[index
];
369 unsigned swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
378 switch (reg
->Register
.File
) {
379 case TGSI_FILE_CONSTANT
: {
380 LLVMValueRef index
= LLVMConstInt(LLVMInt32Type(), reg
->Register
.Index
*4 + swizzle
, 0);
381 LLVMValueRef scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
, &index
, 1, "");
382 LLVMValueRef scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
383 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
387 case TGSI_FILE_IMMEDIATE
:
388 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
392 case TGSI_FILE_INPUT
:
393 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
397 case TGSI_FILE_TEMPORARY
:
398 res
= LLVMBuildLoad(bld
->base
.builder
, bld
->temps
[reg
->Register
.Index
][swizzle
], "");
400 return bld
->base
.undef
;
405 return bld
->base
.undef
;
411 return bld
->base
.undef
;
414 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
415 case TGSI_UTIL_SIGN_CLEAR
:
416 res
= lp_build_abs( &bld
->base
, res
);
419 case TGSI_UTIL_SIGN_SET
:
420 /* TODO: Use bitwese OR for floating point */
421 res
= lp_build_abs( &bld
->base
, res
);
422 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
425 case TGSI_UTIL_SIGN_TOGGLE
:
426 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
429 case TGSI_UTIL_SIGN_KEEP
:
438 * Register fetch with derivatives.
442 struct lp_build_tgsi_soa_context
*bld
,
443 const struct tgsi_full_instruction
*inst
,
445 const unsigned chan_index
,
452 src
= emit_fetch(bld
, inst
, index
, chan_index
);
457 /* TODO: use interpolation coeffs for inputs */
460 *ddx
= emit_ddx(bld
, src
);
463 *ddy
= emit_ddy(bld
, src
);
472 struct lp_build_tgsi_soa_context
*bld
,
473 const struct tgsi_full_instruction
*inst
,
478 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
480 switch( inst
->Instruction
.Saturate
) {
484 case TGSI_SAT_ZERO_ONE
:
485 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
486 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
489 case TGSI_SAT_MINUS_PLUS_ONE
:
490 value
= lp_build_max(&bld
->base
, value
, lp_build_const_scalar(bld
->base
.type
, -1.0));
491 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
498 switch( reg
->Register
.File
) {
499 case TGSI_FILE_OUTPUT
:
500 lp_exec_mask_store(&bld
->exec_mask
, value
,
501 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
504 case TGSI_FILE_TEMPORARY
:
505 lp_exec_mask_store(&bld
->exec_mask
, value
,
506 bld
->temps
[reg
->Register
.Index
][chan_index
]);
509 case TGSI_FILE_ADDRESS
:
514 case TGSI_FILE_PREDICATE
:
526 * High-level instruction translators.
531 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
532 const struct tgsi_full_instruction
*inst
,
533 boolean apply_lodbias
,
537 const uint unit
= inst
->Src
[1].Register
.Index
;
538 LLVMValueRef lodbias
;
539 LLVMValueRef oow
= NULL
;
540 LLVMValueRef coords
[3];
544 switch (inst
->Texture
.Texture
) {
545 case TGSI_TEXTURE_1D
:
548 case TGSI_TEXTURE_2D
:
549 case TGSI_TEXTURE_RECT
:
552 case TGSI_TEXTURE_SHADOW1D
:
553 case TGSI_TEXTURE_SHADOW2D
:
554 case TGSI_TEXTURE_SHADOWRECT
:
555 case TGSI_TEXTURE_3D
:
556 case TGSI_TEXTURE_CUBE
:
565 lodbias
= emit_fetch( bld
, inst
, 0, 3 );
567 lodbias
= bld
->base
.zero
;
570 oow
= emit_fetch( bld
, inst
, 0, 3 );
571 oow
= lp_build_rcp(&bld
->base
, oow
);
574 for (i
= 0; i
< num_coords
; i
++) {
575 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
577 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
579 for (i
= num_coords
; i
< 3; i
++) {
580 coords
[i
] = bld
->base
.undef
;
583 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
586 unit
, num_coords
, coords
, lodbias
,
593 struct lp_build_tgsi_soa_context
*bld
,
594 const struct tgsi_full_instruction
*inst
)
596 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
597 LLVMValueRef terms
[NUM_CHANNELS
];
601 memset(&terms
, 0, sizeof terms
);
603 FOR_EACH_CHANNEL( chan_index
) {
606 /* Unswizzle channel */
607 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
609 /* Check if the component has not been already tested. */
610 assert(swizzle
< NUM_CHANNELS
);
611 if( !terms
[swizzle
] )
612 /* TODO: change the comparison operator instead of setting the sign */
613 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
617 FOR_EACH_CHANNEL( chan_index
) {
618 if(terms
[chan_index
]) {
619 LLVMValueRef chan_mask
;
621 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
624 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
631 lp_build_mask_update(bld
->mask
, mask
);
636 * Check if inst src/dest regs use indirect addressing into temporary
640 indirect_temp_reference(const struct tgsi_full_instruction
*inst
)
643 for (i
= 0; i
< inst
->Instruction
.NumSrcRegs
; i
++) {
644 const struct tgsi_full_src_register
*reg
= &inst
->Src
[i
];
645 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
646 reg
->Register
.Indirect
)
649 for (i
= 0; i
< inst
->Instruction
.NumDstRegs
; i
++) {
650 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[i
];
651 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
652 reg
->Register
.Indirect
)
660 struct lp_build_tgsi_soa_context
*bld
,
661 const struct tgsi_full_declaration
*decl
)
663 unsigned first
= decl
->Range
.First
;
664 unsigned last
= decl
->Range
.Last
;
667 for (idx
= first
; idx
<= last
; ++idx
) {
670 switch (decl
->Declaration
.File
) {
671 case TGSI_FILE_TEMPORARY
:
672 for (i
= 0; i
< NUM_CHANNELS
; i
++)
673 bld
->temps
[idx
][i
] = lp_build_alloca(&bld
->base
);
677 case TGSI_FILE_OUTPUT
:
678 for (i
= 0; i
< NUM_CHANNELS
; i
++)
679 bld
->outputs
[idx
][i
] = lp_build_alloca(&bld
->base
);
684 /* don't need to declare other vars */
697 struct lp_build_tgsi_soa_context
*bld
,
698 const struct tgsi_full_instruction
*inst
,
699 const struct tgsi_opcode_info
*info
)
702 LLVMValueRef src0
, src1
, src2
;
703 LLVMValueRef tmp0
, tmp1
, tmp2
;
704 LLVMValueRef tmp3
= NULL
;
705 LLVMValueRef tmp4
= NULL
;
706 LLVMValueRef tmp5
= NULL
;
707 LLVMValueRef tmp6
= NULL
;
708 LLVMValueRef tmp7
= NULL
;
710 LLVMValueRef dst0
[NUM_CHANNELS
];
712 /* we can't handle indirect addressing into temp register file yet */
713 if (indirect_temp_reference(inst
))
717 * Stores and write masks are handled in a general fashion after the long
718 * instruction opcode switch statement.
720 * Although not stricitly necessary, we avoid generating instructions for
721 * channels which won't be stored, in cases where's that easy. For some
722 * complex instructions, like texture sampling, it is more convenient to
723 * assume a full writemask and then let LLVM optimization passes eliminate
727 assert(info
->num_dst
<= 1);
729 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
730 dst0
[chan_index
] = bld
->base
.undef
;
734 switch (inst
->Instruction
.Opcode
) {
736 case TGSI_OPCODE_ARL
:
738 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
739 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
742 dst0
[chan_index
] = tmp0
;
747 case TGSI_OPCODE_MOV
:
748 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
749 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
753 case TGSI_OPCODE_LIT
:
754 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
755 dst0
[CHAN_X
] = bld
->base
.one
;
757 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
758 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
759 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
761 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
762 /* XMM[1] = SrcReg[0].yyyy */
763 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
764 /* XMM[1] = max(XMM[1], 0) */
765 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
766 /* XMM[2] = SrcReg[0].wwww */
767 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
768 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
769 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
770 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
771 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
773 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
774 dst0
[CHAN_W
] = bld
->base
.one
;
778 case TGSI_OPCODE_RCP
:
779 /* TGSI_OPCODE_RECIP */
780 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
781 res
= lp_build_rcp(&bld
->base
, src0
);
782 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
783 dst0
[chan_index
] = res
;
787 case TGSI_OPCODE_RSQ
:
788 /* TGSI_OPCODE_RECIPSQRT */
789 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
790 src0
= lp_build_abs(&bld
->base
, src0
);
791 res
= lp_build_rsqrt(&bld
->base
, src0
);
792 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
793 dst0
[chan_index
] = res
;
797 case TGSI_OPCODE_EXP
:
798 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
799 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
800 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
801 LLVMValueRef
*p_exp2_int_part
= NULL
;
802 LLVMValueRef
*p_frac_part
= NULL
;
803 LLVMValueRef
*p_exp2
= NULL
;
805 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
807 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
808 p_exp2_int_part
= &tmp0
;
809 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
811 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
814 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
816 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
818 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
820 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
824 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
825 dst0
[CHAN_W
] = bld
->base
.one
;
829 case TGSI_OPCODE_LOG
:
830 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
831 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
832 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
833 LLVMValueRef
*p_floor_log2
= NULL
;
834 LLVMValueRef
*p_exp
= NULL
;
835 LLVMValueRef
*p_log2
= NULL
;
837 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
838 src0
= lp_build_abs( &bld
->base
, src0
);
840 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
841 p_floor_log2
= &tmp0
;
842 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
844 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
847 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
849 /* dst.x = floor(lg2(abs(src.x))) */
850 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
852 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
853 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
854 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
856 /* dst.z = lg2(abs(src.x)) */
857 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
861 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
862 dst0
[CHAN_W
] = bld
->base
.one
;
866 case TGSI_OPCODE_MUL
:
867 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
868 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
869 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
870 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
874 case TGSI_OPCODE_ADD
:
875 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
876 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
877 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
878 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
882 case TGSI_OPCODE_DP3
:
883 /* TGSI_OPCODE_DOT3 */
884 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
885 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
886 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
887 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
888 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
889 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
890 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
891 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
892 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
893 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
894 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
895 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
896 dst0
[chan_index
] = tmp0
;
900 case TGSI_OPCODE_DP4
:
901 /* TGSI_OPCODE_DOT4 */
902 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
903 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
904 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
905 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
906 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
907 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
908 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
909 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
910 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
911 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
912 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
913 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
914 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
915 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
916 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
917 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
918 dst0
[chan_index
] = tmp0
;
922 case TGSI_OPCODE_DST
:
923 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
924 dst0
[CHAN_X
] = bld
->base
.one
;
926 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
927 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
928 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
929 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
931 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
932 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
934 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
935 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
939 case TGSI_OPCODE_MIN
:
940 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
941 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
942 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
943 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
947 case TGSI_OPCODE_MAX
:
948 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
949 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
950 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
951 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
955 case TGSI_OPCODE_SLT
:
956 /* TGSI_OPCODE_SETLT */
957 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
958 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
959 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
960 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
961 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
965 case TGSI_OPCODE_SGE
:
966 /* TGSI_OPCODE_SETGE */
967 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
968 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
969 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
970 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
971 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
975 case TGSI_OPCODE_MAD
:
976 /* TGSI_OPCODE_MADD */
977 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
978 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
979 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
980 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
981 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
982 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
983 dst0
[chan_index
] = tmp0
;
987 case TGSI_OPCODE_SUB
:
988 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
989 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
990 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
991 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
995 case TGSI_OPCODE_LRP
:
996 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
997 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
998 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
999 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1000 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
1001 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
1002 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
1006 case TGSI_OPCODE_CND
:
1007 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1008 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1009 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1010 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1011 tmp1
= lp_build_const_scalar(bld
->base
.type
, 0.5);
1012 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
1013 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
1017 case TGSI_OPCODE_DP2A
:
1018 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1019 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1020 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1021 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1022 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1023 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1024 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1025 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
1026 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1027 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1028 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1032 case TGSI_OPCODE_FRC
:
1033 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1034 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1035 tmp0
= lp_build_floor(&bld
->base
, src0
);
1036 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
1037 dst0
[chan_index
] = tmp0
;
1041 case TGSI_OPCODE_CLAMP
:
1042 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1043 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1044 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1045 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1046 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
1047 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
1048 dst0
[chan_index
] = tmp0
;
1052 case TGSI_OPCODE_FLR
:
1053 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1054 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1055 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
1059 case TGSI_OPCODE_ROUND
:
1060 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1061 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1062 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
1066 case TGSI_OPCODE_EX2
: {
1067 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1068 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
1069 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1070 dst0
[chan_index
] = tmp0
;
1075 case TGSI_OPCODE_LG2
:
1076 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1077 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
1078 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1079 dst0
[chan_index
] = tmp0
;
1083 case TGSI_OPCODE_POW
:
1084 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1085 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1086 res
= lp_build_pow( &bld
->base
, src0
, src1
);
1087 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1088 dst0
[chan_index
] = res
;
1092 case TGSI_OPCODE_XPD
:
1093 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1094 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1095 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1096 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1098 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1099 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1100 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1101 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1103 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1105 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
1107 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1108 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
1109 dst0
[CHAN_X
] = tmp2
;
1111 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1112 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1113 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1114 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1116 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1117 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
1118 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
1119 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
1120 dst0
[CHAN_Y
] = tmp3
;
1122 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1123 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1124 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
1125 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1126 dst0
[CHAN_Z
] = tmp5
;
1128 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1129 dst0
[CHAN_W
] = bld
->base
.one
;
1133 case TGSI_OPCODE_ABS
:
1134 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1135 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1136 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1140 case TGSI_OPCODE_RCC
:
1145 case TGSI_OPCODE_DPH
:
1146 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1147 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1148 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1149 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1150 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1151 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1152 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1153 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1154 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1155 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1156 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1157 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1158 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1159 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1160 dst0
[chan_index
] = tmp0
;
1164 case TGSI_OPCODE_COS
:
1165 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1166 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1167 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1168 dst0
[chan_index
] = tmp0
;
1172 case TGSI_OPCODE_DDX
:
1173 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1174 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1178 case TGSI_OPCODE_DDY
:
1179 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1180 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1184 case TGSI_OPCODE_KILP
:
1185 /* predicated kill */
1190 case TGSI_OPCODE_KIL
:
1191 /* conditional kill */
1192 emit_kil( bld
, inst
);
1195 case TGSI_OPCODE_PK2H
:
1199 case TGSI_OPCODE_PK2US
:
1203 case TGSI_OPCODE_PK4B
:
1207 case TGSI_OPCODE_PK4UB
:
1211 case TGSI_OPCODE_RFL
:
1215 case TGSI_OPCODE_SEQ
:
1216 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1217 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1218 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1219 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1220 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1224 case TGSI_OPCODE_SFL
:
1225 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1226 dst0
[chan_index
] = bld
->base
.zero
;
1230 case TGSI_OPCODE_SGT
:
1231 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1232 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1233 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1234 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1235 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1239 case TGSI_OPCODE_SIN
:
1240 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1241 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1242 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1243 dst0
[chan_index
] = tmp0
;
1247 case TGSI_OPCODE_SLE
:
1248 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1249 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1250 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1251 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1252 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1256 case TGSI_OPCODE_SNE
:
1257 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1258 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1259 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1260 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1261 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1265 case TGSI_OPCODE_STR
:
1266 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1267 dst0
[chan_index
] = bld
->base
.one
;
1271 case TGSI_OPCODE_TEX
:
1272 emit_tex( bld
, inst
, FALSE
, FALSE
, dst0
);
1275 case TGSI_OPCODE_TXD
:
1280 case TGSI_OPCODE_UP2H
:
1286 case TGSI_OPCODE_UP2US
:
1292 case TGSI_OPCODE_UP4B
:
1298 case TGSI_OPCODE_UP4UB
:
1304 case TGSI_OPCODE_X2D
:
1310 case TGSI_OPCODE_ARA
:
1317 case TGSI_OPCODE_ARR
:
1319 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1320 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1321 emit_rnd( bld
, 0, 0 );
1322 emit_f2it( bld
, 0 );
1323 dst0
[chan_index
] = tmp0
;
1328 case TGSI_OPCODE_BRA
:
1334 case TGSI_OPCODE_CAL
:
1339 case TGSI_OPCODE_RET
:
1344 case TGSI_OPCODE_END
:
1347 case TGSI_OPCODE_SSG
:
1348 /* TGSI_OPCODE_SGN */
1349 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1350 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1351 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1355 case TGSI_OPCODE_CMP
:
1356 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1357 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1358 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1359 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1360 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1361 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1365 case TGSI_OPCODE_SCS
:
1366 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1367 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1368 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1370 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1371 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1372 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1374 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1375 dst0
[CHAN_Z
] = bld
->base
.zero
;
1377 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1378 dst0
[CHAN_W
] = bld
->base
.one
;
1382 case TGSI_OPCODE_TXB
:
1383 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1386 case TGSI_OPCODE_NRM
:
1388 case TGSI_OPCODE_NRM4
:
1389 /* 3 or 4-component normalization */
1391 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1393 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1394 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1395 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1396 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1398 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1401 /* xmm0 = src.x * src.x */
1402 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1403 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1406 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1409 /* xmm0 = xmm0 + src.y * src.y */
1410 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1411 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1414 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1415 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1418 /* xmm0 = xmm0 + src.z * src.z */
1419 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1420 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1423 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1424 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1428 /* xmm0 = xmm0 + src.w * src.w */
1429 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1430 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1433 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1434 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1437 /* xmm1 = 1 / sqrt(xmm0) */
1438 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1440 /* dst.x = xmm1 * src.x */
1441 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1442 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1445 /* dst.y = xmm1 * src.y */
1446 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1447 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1450 /* dst.z = xmm1 * src.z */
1451 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1452 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1455 /* dst.w = xmm1 * src.w */
1456 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1457 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1462 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1463 dst0
[CHAN_W
] = bld
->base
.one
;
1468 case TGSI_OPCODE_DIV
:
1474 case TGSI_OPCODE_DP2
:
1475 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1476 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1477 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1478 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1479 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1480 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1481 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1482 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1483 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1487 case TGSI_OPCODE_TXL
:
1488 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1491 case TGSI_OPCODE_TXP
:
1492 emit_tex( bld
, inst
, FALSE
, TRUE
, dst0
);
1495 case TGSI_OPCODE_BRK
:
1496 lp_exec_break(&bld
->exec_mask
);
1499 case TGSI_OPCODE_IF
:
1500 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1501 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1504 case TGSI_OPCODE_BGNFOR
:
1510 case TGSI_OPCODE_BGNLOOP
:
1511 lp_exec_bgnloop(&bld
->exec_mask
);
1514 case TGSI_OPCODE_REP
:
1520 case TGSI_OPCODE_ELSE
:
1521 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1524 case TGSI_OPCODE_ENDIF
:
1525 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1528 case TGSI_OPCODE_ENDFOR
:
1534 case TGSI_OPCODE_ENDLOOP
:
1535 lp_exec_endloop(&bld
->exec_mask
);
1538 case TGSI_OPCODE_ENDREP
:
1544 case TGSI_OPCODE_PUSHA
:
1550 case TGSI_OPCODE_POPA
:
1556 case TGSI_OPCODE_CEIL
:
1557 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1558 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1559 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1563 case TGSI_OPCODE_I2F
:
1569 case TGSI_OPCODE_NOT
:
1575 case TGSI_OPCODE_TRUNC
:
1576 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1577 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1578 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
1582 case TGSI_OPCODE_SHL
:
1588 case TGSI_OPCODE_ISHR
:
1594 case TGSI_OPCODE_AND
:
1600 case TGSI_OPCODE_OR
:
1606 case TGSI_OPCODE_MOD
:
1612 case TGSI_OPCODE_XOR
:
1618 case TGSI_OPCODE_SAD
:
1624 case TGSI_OPCODE_TXF
:
1630 case TGSI_OPCODE_TXQ
:
1636 case TGSI_OPCODE_CONT
:
1637 lp_exec_continue(&bld
->exec_mask
);
1640 case TGSI_OPCODE_EMIT
:
1644 case TGSI_OPCODE_ENDPRIM
:
1648 case TGSI_OPCODE_NOP
:
1656 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1657 emit_store( bld
, inst
, 0, chan_index
, dst0
[chan_index
]);
1666 lp_build_tgsi_soa(LLVMBuilderRef builder
,
1667 const struct tgsi_token
*tokens
,
1668 struct lp_type type
,
1669 struct lp_build_mask_context
*mask
,
1670 LLVMValueRef consts_ptr
,
1671 const LLVMValueRef
*pos
,
1672 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
1673 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
1674 struct lp_build_sampler_soa
*sampler
)
1676 struct lp_build_tgsi_soa_context bld
;
1677 struct tgsi_parse_context parse
;
1678 uint num_immediates
= 0;
1681 /* Setup build context */
1682 memset(&bld
, 0, sizeof bld
);
1683 lp_build_context_init(&bld
.base
, builder
, type
);
1686 bld
.inputs
= inputs
;
1687 bld
.outputs
= outputs
;
1688 bld
.consts_ptr
= consts_ptr
;
1689 bld
.sampler
= sampler
;
1691 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
1693 tgsi_parse_init( &parse
, tokens
);
1695 while( !tgsi_parse_end_of_tokens( &parse
) ) {
1696 tgsi_parse_token( &parse
);
1698 switch( parse
.FullToken
.Token
.Type
) {
1699 case TGSI_TOKEN_TYPE_DECLARATION
:
1700 /* Inputs already interpolated */
1702 if (!emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
))
1703 _debug_printf("warning: failed to define LLVM variable\n");
1707 case TGSI_TOKEN_TYPE_INSTRUCTION
:
1709 unsigned opcode
= parse
.FullToken
.FullInstruction
.Instruction
.Opcode
;
1710 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info(opcode
);
1711 if (!emit_instruction( &bld
, &parse
.FullToken
.FullInstruction
, info
))
1712 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
1713 info
? info
->mnemonic
: "<invalid>");
1718 case TGSI_TOKEN_TYPE_IMMEDIATE
:
1719 /* simply copy the immediate values into the next immediates[] slot */
1721 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
1723 assert(num_immediates
< LP_MAX_IMMEDIATES
);
1724 for( i
= 0; i
< size
; ++i
)
1725 bld
.immediates
[num_immediates
][i
] =
1726 lp_build_const_scalar(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
1727 for( i
= size
; i
< 4; ++i
)
1728 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
1733 case TGSI_TOKEN_TYPE_PROPERTY
:
1741 LLVMBasicBlockRef block
= LLVMGetInsertBlock(builder
);
1742 LLVMValueRef function
= LLVMGetBasicBlockParent(block
);
1743 debug_printf("11111111111111111111111111111 \n");
1744 tgsi_dump(tokens
, 0);
1745 LLVMDumpValue(function
);
1746 debug_printf("2222222222222222222222222222 \n");
1748 tgsi_parse_free( &parse
);