1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_info.h"
45 #include "tgsi/tgsi_parse.h"
46 #include "tgsi/tgsi_util.h"
47 #include "tgsi/tgsi_exec.h"
48 #include "lp_bld_type.h"
49 #include "lp_bld_const.h"
50 #include "lp_bld_arit.h"
51 #include "lp_bld_logic.h"
52 #include "lp_bld_swizzle.h"
53 #include "lp_bld_flow.h"
54 #include "lp_bld_tgsi.h"
55 #include "lp_bld_debug.h"
58 #define LP_MAX_TEMPS 256
59 #define LP_MAX_IMMEDIATES 256
62 #define FOR_EACH_CHANNEL( CHAN )\
63 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
65 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
66 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
68 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
69 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
71 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
72 FOR_EACH_CHANNEL( CHAN )\
73 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
80 #define QUAD_TOP_LEFT 0
81 #define QUAD_TOP_RIGHT 1
82 #define QUAD_BOTTOM_LEFT 2
83 #define QUAD_BOTTOM_RIGHT 3
85 #define LP_TGSI_MAX_NESTING 16
88 struct lp_build_context
*bld
;
92 LLVMTypeRef int_vec_type
;
94 LLVMValueRef cond_stack
[LP_TGSI_MAX_NESTING
];
96 LLVMValueRef cond_mask
;
98 LLVMValueRef exec_mask
;
101 struct lp_build_tgsi_soa_context
103 struct lp_build_context base
;
105 LLVMValueRef consts_ptr
;
106 const LLVMValueRef
*pos
;
107 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
108 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
110 struct lp_build_sampler_soa
*sampler
;
112 LLVMValueRef immediates
[LP_MAX_IMMEDIATES
][NUM_CHANNELS
];
113 LLVMValueRef temps
[LP_MAX_TEMPS
][NUM_CHANNELS
];
115 struct lp_build_mask_context
*mask
;
116 struct lp_exec_mask exec_mask
;
119 static const unsigned char
121 QUAD_TOP_LEFT
, QUAD_TOP_LEFT
,
122 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_LEFT
125 static const unsigned char
127 QUAD_TOP_RIGHT
, QUAD_TOP_RIGHT
,
128 QUAD_BOTTOM_RIGHT
, QUAD_BOTTOM_RIGHT
131 static const unsigned char
133 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
,
134 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
137 static const unsigned char
138 swizzle_bottom
[4] = {
139 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
,
140 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
143 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
146 mask
->has_mask
= FALSE
;
147 mask
->cond_stack_size
= 0;
149 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
152 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
154 mask
->exec_mask
= mask
->cond_mask
;
155 if (mask
->cond_stack_size
> 0)
156 mask
->has_mask
= TRUE
;
159 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
162 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
163 mask
->cond_mask
= LLVMBuildBitCast(mask
->bld
->builder
, val
,
164 mask
->int_vec_type
, "");
166 lp_exec_mask_update(mask
);
169 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
171 LLVMValueRef prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
172 LLVMValueRef inv_mask
= LLVMBuildNot(mask
->bld
->builder
,
173 mask
->cond_mask
, "");
175 /* means that we didn't have any mask before and that
176 * we were fully enabled */
177 if (mask
->cond_stack_size
<= 1) {
178 prev_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
181 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
184 lp_exec_mask_update(mask
);
187 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
189 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
190 lp_exec_mask_update(mask
);
193 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
197 if (mask
->has_mask
) {
198 LLVMValueRef real_val
, dst_val
;
200 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
201 real_val
= lp_build_select(mask
->bld
,
205 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
207 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
212 emit_ddx(struct lp_build_tgsi_soa_context
*bld
,
215 LLVMValueRef src_left
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_left
);
216 LLVMValueRef src_right
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_right
);
217 return lp_build_sub(&bld
->base
, src_right
, src_left
);
222 emit_ddy(struct lp_build_tgsi_soa_context
*bld
,
225 LLVMValueRef src_top
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_top
);
226 LLVMValueRef src_bottom
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_bottom
);
227 return lp_build_sub(&bld
->base
, src_top
, src_bottom
);
236 struct lp_build_tgsi_soa_context
*bld
,
237 const struct tgsi_full_instruction
*inst
,
239 const unsigned chan_index
)
241 const struct tgsi_full_src_register
*reg
= &inst
->Src
[index
];
242 unsigned swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
251 switch (reg
->Register
.File
) {
252 case TGSI_FILE_CONSTANT
: {
253 LLVMValueRef index
= LLVMConstInt(LLVMInt32Type(), reg
->Register
.Index
*4 + swizzle
, 0);
254 LLVMValueRef scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
, &index
, 1, "");
255 LLVMValueRef scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
256 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
260 case TGSI_FILE_IMMEDIATE
:
261 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
265 case TGSI_FILE_INPUT
:
266 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
270 case TGSI_FILE_TEMPORARY
:
271 res
= LLVMBuildLoad(bld
->base
.builder
, bld
->temps
[reg
->Register
.Index
][swizzle
], "");
273 return bld
->base
.undef
;
278 return bld
->base
.undef
;
284 return bld
->base
.undef
;
287 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
288 case TGSI_UTIL_SIGN_CLEAR
:
289 res
= lp_build_abs( &bld
->base
, res
);
292 case TGSI_UTIL_SIGN_SET
:
293 /* TODO: Use bitwese OR for floating point */
294 res
= lp_build_abs( &bld
->base
, res
);
295 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
298 case TGSI_UTIL_SIGN_TOGGLE
:
299 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
302 case TGSI_UTIL_SIGN_KEEP
:
311 * Register fetch with derivatives.
315 struct lp_build_tgsi_soa_context
*bld
,
316 const struct tgsi_full_instruction
*inst
,
318 const unsigned chan_index
,
325 src
= emit_fetch(bld
, inst
, index
, chan_index
);
330 /* TODO: use interpolation coeffs for inputs */
333 *ddx
= emit_ddx(bld
, src
);
336 *ddy
= emit_ddy(bld
, src
);
345 struct lp_build_tgsi_soa_context
*bld
,
346 const struct tgsi_full_instruction
*inst
,
351 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
353 switch( inst
->Instruction
.Saturate
) {
357 case TGSI_SAT_ZERO_ONE
:
358 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
359 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
362 case TGSI_SAT_MINUS_PLUS_ONE
:
363 value
= lp_build_max(&bld
->base
, value
, lp_build_const_scalar(bld
->base
.type
, -1.0));
364 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
371 switch( reg
->Register
.File
) {
372 case TGSI_FILE_OUTPUT
:
373 lp_exec_mask_store(&bld
->exec_mask
, value
,
374 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
377 case TGSI_FILE_TEMPORARY
:
378 lp_exec_mask_store(&bld
->exec_mask
, value
,
379 bld
->temps
[reg
->Register
.Index
][chan_index
]);
382 case TGSI_FILE_ADDRESS
:
387 case TGSI_FILE_PREDICATE
:
399 * High-level instruction translators.
404 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
405 const struct tgsi_full_instruction
*inst
,
406 boolean apply_lodbias
,
410 const uint unit
= inst
->Src
[1].Register
.Index
;
411 LLVMValueRef lodbias
;
412 LLVMValueRef oow
= NULL
;
413 LLVMValueRef coords
[3];
417 switch (inst
->Texture
.Texture
) {
418 case TGSI_TEXTURE_1D
:
421 case TGSI_TEXTURE_2D
:
422 case TGSI_TEXTURE_RECT
:
425 case TGSI_TEXTURE_SHADOW1D
:
426 case TGSI_TEXTURE_SHADOW2D
:
427 case TGSI_TEXTURE_SHADOWRECT
:
428 case TGSI_TEXTURE_3D
:
429 case TGSI_TEXTURE_CUBE
:
438 lodbias
= emit_fetch( bld
, inst
, 0, 3 );
440 lodbias
= bld
->base
.zero
;
443 oow
= emit_fetch( bld
, inst
, 0, 3 );
444 oow
= lp_build_rcp(&bld
->base
, oow
);
447 for (i
= 0; i
< num_coords
; i
++) {
448 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
450 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
452 for (i
= num_coords
; i
< 3; i
++) {
453 coords
[i
] = bld
->base
.undef
;
456 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
459 unit
, num_coords
, coords
, lodbias
,
466 struct lp_build_tgsi_soa_context
*bld
,
467 const struct tgsi_full_instruction
*inst
)
469 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
470 LLVMValueRef terms
[NUM_CHANNELS
];
474 memset(&terms
, 0, sizeof terms
);
476 FOR_EACH_CHANNEL( chan_index
) {
479 /* Unswizzle channel */
480 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
482 /* Check if the component has not been already tested. */
483 assert(swizzle
< NUM_CHANNELS
);
484 if( !terms
[swizzle
] )
485 /* TODO: change the comparison operator instead of setting the sign */
486 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
490 FOR_EACH_CHANNEL( chan_index
) {
491 if(terms
[chan_index
]) {
492 LLVMValueRef chan_mask
;
494 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
497 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
504 lp_build_mask_update(bld
->mask
, mask
);
509 * Check if inst src/dest regs use indirect addressing into temporary
513 indirect_temp_reference(const struct tgsi_full_instruction
*inst
)
516 for (i
= 0; i
< inst
->Instruction
.NumSrcRegs
; i
++) {
517 const struct tgsi_full_src_register
*reg
= &inst
->Src
[i
];
518 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
519 reg
->Register
.Indirect
)
522 for (i
= 0; i
< inst
->Instruction
.NumDstRegs
; i
++) {
523 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[i
];
524 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
525 reg
->Register
.Indirect
)
533 struct lp_build_tgsi_soa_context
*bld
,
534 const struct tgsi_full_declaration
*decl
)
536 unsigned first
= decl
->Range
.First
;
537 unsigned last
= decl
->Range
.Last
;
540 for (idx
= first
; idx
<= last
; ++idx
) {
543 switch (decl
->Declaration
.File
) {
544 case TGSI_FILE_TEMPORARY
:
545 for (i
= 0; i
< NUM_CHANNELS
; i
++)
546 bld
->temps
[idx
][i
] = lp_build_alloca(&bld
->base
);
550 case TGSI_FILE_OUTPUT
:
551 for (i
= 0; i
< NUM_CHANNELS
; i
++)
552 bld
->outputs
[idx
][i
] = lp_build_alloca(&bld
->base
);
557 /* don't need to declare other vars */
570 struct lp_build_tgsi_soa_context
*bld
,
571 const struct tgsi_full_instruction
*inst
,
572 const struct tgsi_opcode_info
*info
)
575 LLVMValueRef src0
, src1
, src2
;
576 LLVMValueRef tmp0
, tmp1
, tmp2
;
577 LLVMValueRef tmp3
= NULL
;
578 LLVMValueRef tmp4
= NULL
;
579 LLVMValueRef tmp5
= NULL
;
580 LLVMValueRef tmp6
= NULL
;
581 LLVMValueRef tmp7
= NULL
;
583 LLVMValueRef dst0
[NUM_CHANNELS
];
585 /* we can't handle indirect addressing into temp register file yet */
586 if (indirect_temp_reference(inst
))
590 * Stores and write masks are handled in a general fashion after the long
591 * instruction opcode switch statement.
593 * Although not stricitly necessary, we avoid generating instructions for
594 * channels which won't be stored, in cases where's that easy. For some
595 * complex instructions, like texture sampling, it is more convenient to
596 * assume a full writemask and then let LLVM optimization passes eliminate
600 assert(info
->num_dst
<= 1);
602 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
603 dst0
[chan_index
] = bld
->base
.undef
;
607 switch (inst
->Instruction
.Opcode
) {
609 case TGSI_OPCODE_ARL
:
611 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
612 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
615 dst0
[chan_index
] = tmp0
;
620 case TGSI_OPCODE_MOV
:
621 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
622 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
626 case TGSI_OPCODE_LIT
:
627 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
628 dst0
[CHAN_X
] = bld
->base
.one
;
630 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
631 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
632 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
634 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
635 /* XMM[1] = SrcReg[0].yyyy */
636 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
637 /* XMM[1] = max(XMM[1], 0) */
638 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
639 /* XMM[2] = SrcReg[0].wwww */
640 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
641 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
642 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
643 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
644 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
646 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
647 dst0
[CHAN_W
] = bld
->base
.one
;
651 case TGSI_OPCODE_RCP
:
652 /* TGSI_OPCODE_RECIP */
653 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
654 res
= lp_build_rcp(&bld
->base
, src0
);
655 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
656 dst0
[chan_index
] = res
;
660 case TGSI_OPCODE_RSQ
:
661 /* TGSI_OPCODE_RECIPSQRT */
662 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
663 src0
= lp_build_abs(&bld
->base
, src0
);
664 res
= lp_build_rsqrt(&bld
->base
, src0
);
665 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
666 dst0
[chan_index
] = res
;
670 case TGSI_OPCODE_EXP
:
671 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
672 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
673 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
674 LLVMValueRef
*p_exp2_int_part
= NULL
;
675 LLVMValueRef
*p_frac_part
= NULL
;
676 LLVMValueRef
*p_exp2
= NULL
;
678 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
680 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
681 p_exp2_int_part
= &tmp0
;
682 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
684 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
687 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
689 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
691 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
693 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
697 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
698 dst0
[CHAN_W
] = bld
->base
.one
;
702 case TGSI_OPCODE_LOG
:
703 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
704 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
705 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
706 LLVMValueRef
*p_floor_log2
= NULL
;
707 LLVMValueRef
*p_exp
= NULL
;
708 LLVMValueRef
*p_log2
= NULL
;
710 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
711 src0
= lp_build_abs( &bld
->base
, src0
);
713 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
714 p_floor_log2
= &tmp0
;
715 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
717 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
720 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
722 /* dst.x = floor(lg2(abs(src.x))) */
723 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
725 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
726 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
727 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
729 /* dst.z = lg2(abs(src.x)) */
730 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
734 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
735 dst0
[CHAN_W
] = bld
->base
.one
;
739 case TGSI_OPCODE_MUL
:
740 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
741 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
742 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
743 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
747 case TGSI_OPCODE_ADD
:
748 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
749 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
750 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
751 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
755 case TGSI_OPCODE_DP3
:
756 /* TGSI_OPCODE_DOT3 */
757 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
758 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
759 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
760 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
761 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
762 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
763 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
764 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
765 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
766 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
767 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
768 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
769 dst0
[chan_index
] = tmp0
;
773 case TGSI_OPCODE_DP4
:
774 /* TGSI_OPCODE_DOT4 */
775 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
776 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
777 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
778 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
779 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
780 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
781 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
782 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
783 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
784 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
785 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
786 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
787 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
788 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
789 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
790 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
791 dst0
[chan_index
] = tmp0
;
795 case TGSI_OPCODE_DST
:
796 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
797 dst0
[CHAN_X
] = bld
->base
.one
;
799 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
800 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
801 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
802 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
804 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
805 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
807 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
808 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
812 case TGSI_OPCODE_MIN
:
813 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
814 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
815 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
816 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
820 case TGSI_OPCODE_MAX
:
821 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
822 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
823 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
824 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
828 case TGSI_OPCODE_SLT
:
829 /* TGSI_OPCODE_SETLT */
830 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
831 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
832 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
833 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
834 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
838 case TGSI_OPCODE_SGE
:
839 /* TGSI_OPCODE_SETGE */
840 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
841 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
842 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
843 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
844 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
848 case TGSI_OPCODE_MAD
:
849 /* TGSI_OPCODE_MADD */
850 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
851 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
852 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
853 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
854 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
855 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
856 dst0
[chan_index
] = tmp0
;
860 case TGSI_OPCODE_SUB
:
861 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
862 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
863 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
864 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
868 case TGSI_OPCODE_LRP
:
869 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
870 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
871 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
872 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
873 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
874 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
875 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
879 case TGSI_OPCODE_CND
:
880 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
881 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
882 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
883 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
884 tmp1
= lp_build_const_scalar(bld
->base
.type
, 0.5);
885 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
886 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
890 case TGSI_OPCODE_DP2A
:
891 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
892 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
893 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
894 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
895 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
896 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
897 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
898 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
899 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
900 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
901 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
905 case TGSI_OPCODE_FRC
:
906 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
907 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
908 tmp0
= lp_build_floor(&bld
->base
, src0
);
909 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
910 dst0
[chan_index
] = tmp0
;
914 case TGSI_OPCODE_CLAMP
:
915 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
916 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
917 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
918 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
919 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
920 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
921 dst0
[chan_index
] = tmp0
;
925 case TGSI_OPCODE_FLR
:
926 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
927 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
928 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
932 case TGSI_OPCODE_ROUND
:
933 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
934 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
935 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
939 case TGSI_OPCODE_EX2
: {
940 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
941 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
942 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
943 dst0
[chan_index
] = tmp0
;
948 case TGSI_OPCODE_LG2
:
949 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
950 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
951 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
952 dst0
[chan_index
] = tmp0
;
956 case TGSI_OPCODE_POW
:
957 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
958 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
959 res
= lp_build_pow( &bld
->base
, src0
, src1
);
960 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
961 dst0
[chan_index
] = res
;
965 case TGSI_OPCODE_XPD
:
966 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
967 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
968 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
969 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
971 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
972 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
973 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
974 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
976 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
978 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
980 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
981 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
984 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
985 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
986 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
987 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
989 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
990 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
991 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
992 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
995 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
996 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
997 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
998 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1001 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1002 dst0
[CHAN_W
] = bld
->base
.one
;
1006 case TGSI_OPCODE_ABS
:
1007 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1008 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1009 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1013 case TGSI_OPCODE_RCC
:
1018 case TGSI_OPCODE_DPH
:
1019 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1020 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1021 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1022 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1023 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1024 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1025 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1026 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1027 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1028 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1029 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1030 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1031 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1032 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1033 dst0
[chan_index
] = tmp0
;
1037 case TGSI_OPCODE_COS
:
1038 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1039 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1040 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1041 dst0
[chan_index
] = tmp0
;
1045 case TGSI_OPCODE_DDX
:
1046 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1047 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1051 case TGSI_OPCODE_DDY
:
1052 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1053 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1057 case TGSI_OPCODE_KILP
:
1058 /* predicated kill */
1063 case TGSI_OPCODE_KIL
:
1064 /* conditional kill */
1065 emit_kil( bld
, inst
);
1068 case TGSI_OPCODE_PK2H
:
1072 case TGSI_OPCODE_PK2US
:
1076 case TGSI_OPCODE_PK4B
:
1080 case TGSI_OPCODE_PK4UB
:
1084 case TGSI_OPCODE_RFL
:
1088 case TGSI_OPCODE_SEQ
:
1089 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1090 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1091 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1092 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1093 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1097 case TGSI_OPCODE_SFL
:
1098 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1099 dst0
[chan_index
] = bld
->base
.zero
;
1103 case TGSI_OPCODE_SGT
:
1104 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1105 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1106 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1107 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1108 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1112 case TGSI_OPCODE_SIN
:
1113 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1114 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1115 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1116 dst0
[chan_index
] = tmp0
;
1120 case TGSI_OPCODE_SLE
:
1121 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1122 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1123 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1124 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1125 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1129 case TGSI_OPCODE_SNE
:
1130 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1131 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1132 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1133 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1134 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1138 case TGSI_OPCODE_STR
:
1139 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1140 dst0
[chan_index
] = bld
->base
.one
;
1144 case TGSI_OPCODE_TEX
:
1145 emit_tex( bld
, inst
, FALSE
, FALSE
, dst0
);
1148 case TGSI_OPCODE_TXD
:
1153 case TGSI_OPCODE_UP2H
:
1159 case TGSI_OPCODE_UP2US
:
1165 case TGSI_OPCODE_UP4B
:
1171 case TGSI_OPCODE_UP4UB
:
1177 case TGSI_OPCODE_X2D
:
1183 case TGSI_OPCODE_ARA
:
1190 case TGSI_OPCODE_ARR
:
1192 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1193 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1194 emit_rnd( bld
, 0, 0 );
1195 emit_f2it( bld
, 0 );
1196 dst0
[chan_index
] = tmp0
;
1201 case TGSI_OPCODE_BRA
:
1207 case TGSI_OPCODE_CAL
:
1212 case TGSI_OPCODE_RET
:
1217 case TGSI_OPCODE_END
:
1220 case TGSI_OPCODE_SSG
:
1221 /* TGSI_OPCODE_SGN */
1222 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1223 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1224 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1228 case TGSI_OPCODE_CMP
:
1229 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1230 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1231 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1232 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1233 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1234 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1238 case TGSI_OPCODE_SCS
:
1239 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1240 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1241 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1243 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1244 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1245 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1247 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1248 dst0
[CHAN_Z
] = bld
->base
.zero
;
1250 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1251 dst0
[CHAN_W
] = bld
->base
.one
;
1255 case TGSI_OPCODE_TXB
:
1256 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1259 case TGSI_OPCODE_NRM
:
1261 case TGSI_OPCODE_NRM4
:
1262 /* 3 or 4-component normalization */
1264 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1266 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1267 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1268 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1269 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1271 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1274 /* xmm0 = src.x * src.x */
1275 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1276 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1279 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1282 /* xmm0 = xmm0 + src.y * src.y */
1283 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1284 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1287 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1288 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1291 /* xmm0 = xmm0 + src.z * src.z */
1292 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1293 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1296 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1297 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1301 /* xmm0 = xmm0 + src.w * src.w */
1302 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1303 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1306 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1307 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1310 /* xmm1 = 1 / sqrt(xmm0) */
1311 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1313 /* dst.x = xmm1 * src.x */
1314 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1315 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1318 /* dst.y = xmm1 * src.y */
1319 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1320 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1323 /* dst.z = xmm1 * src.z */
1324 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1325 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1328 /* dst.w = xmm1 * src.w */
1329 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1330 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1335 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1336 dst0
[CHAN_W
] = bld
->base
.one
;
1341 case TGSI_OPCODE_DIV
:
1347 case TGSI_OPCODE_DP2
:
1348 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1349 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1350 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1351 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1352 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1353 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1354 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1355 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1356 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1360 case TGSI_OPCODE_TXL
:
1361 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1364 case TGSI_OPCODE_TXP
:
1365 emit_tex( bld
, inst
, FALSE
, TRUE
, dst0
);
1368 case TGSI_OPCODE_BRK
:
1373 case TGSI_OPCODE_IF
:
1374 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1375 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1378 case TGSI_OPCODE_BGNFOR
:
1384 case TGSI_OPCODE_REP
:
1390 case TGSI_OPCODE_ELSE
:
1391 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1394 case TGSI_OPCODE_ENDIF
:
1395 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1398 case TGSI_OPCODE_ENDFOR
:
1404 case TGSI_OPCODE_ENDREP
:
1410 case TGSI_OPCODE_PUSHA
:
1416 case TGSI_OPCODE_POPA
:
1422 case TGSI_OPCODE_CEIL
:
1423 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1424 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1425 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1429 case TGSI_OPCODE_I2F
:
1435 case TGSI_OPCODE_NOT
:
1441 case TGSI_OPCODE_TRUNC
:
1442 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1443 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1444 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
1448 case TGSI_OPCODE_SHL
:
1454 case TGSI_OPCODE_ISHR
:
1460 case TGSI_OPCODE_AND
:
1466 case TGSI_OPCODE_OR
:
1472 case TGSI_OPCODE_MOD
:
1478 case TGSI_OPCODE_XOR
:
1484 case TGSI_OPCODE_SAD
:
1490 case TGSI_OPCODE_TXF
:
1496 case TGSI_OPCODE_TXQ
:
1502 case TGSI_OPCODE_CONT
:
1507 case TGSI_OPCODE_EMIT
:
1511 case TGSI_OPCODE_ENDPRIM
:
1515 case TGSI_OPCODE_NOP
:
1523 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1524 emit_store( bld
, inst
, 0, chan_index
, dst0
[chan_index
]);
1533 lp_build_tgsi_soa(LLVMBuilderRef builder
,
1534 const struct tgsi_token
*tokens
,
1535 struct lp_type type
,
1536 struct lp_build_mask_context
*mask
,
1537 LLVMValueRef consts_ptr
,
1538 const LLVMValueRef
*pos
,
1539 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
1540 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
1541 struct lp_build_sampler_soa
*sampler
)
1543 struct lp_build_tgsi_soa_context bld
;
1544 struct tgsi_parse_context parse
;
1545 uint num_immediates
= 0;
1548 /* Setup build context */
1549 memset(&bld
, 0, sizeof bld
);
1550 lp_build_context_init(&bld
.base
, builder
, type
);
1553 bld
.inputs
= inputs
;
1554 bld
.outputs
= outputs
;
1555 bld
.consts_ptr
= consts_ptr
;
1556 bld
.sampler
= sampler
;
1558 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
1560 tgsi_parse_init( &parse
, tokens
);
1562 while( !tgsi_parse_end_of_tokens( &parse
) ) {
1563 tgsi_parse_token( &parse
);
1565 switch( parse
.FullToken
.Token
.Type
) {
1566 case TGSI_TOKEN_TYPE_DECLARATION
:
1567 /* Inputs already interpolated */
1569 if (!emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
))
1570 _debug_printf("warning: failed to define LLVM variable\n");
1574 case TGSI_TOKEN_TYPE_INSTRUCTION
:
1576 unsigned opcode
= parse
.FullToken
.FullInstruction
.Instruction
.Opcode
;
1577 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info(opcode
);
1578 if (!emit_instruction( &bld
, &parse
.FullToken
.FullInstruction
, info
))
1579 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
1580 info
? info
->mnemonic
: "<invalid>");
1585 case TGSI_TOKEN_TYPE_IMMEDIATE
:
1586 /* simply copy the immediate values into the next immediates[] slot */
1588 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
1590 assert(num_immediates
< LP_MAX_IMMEDIATES
);
1591 for( i
= 0; i
< size
; ++i
)
1592 bld
.immediates
[num_immediates
][i
] =
1593 lp_build_const_scalar(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
1594 for( i
= size
; i
< 4; ++i
)
1595 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
1600 case TGSI_TOKEN_TYPE_PROPERTY
:
1608 tgsi_parse_free( &parse
);