1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_dump.h"
45 #include "tgsi/tgsi_info.h"
46 #include "tgsi/tgsi_parse.h"
47 #include "tgsi/tgsi_util.h"
48 #include "tgsi/tgsi_exec.h"
49 #include "lp_bld_type.h"
50 #include "lp_bld_const.h"
51 #include "lp_bld_arit.h"
52 #include "lp_bld_logic.h"
53 #include "lp_bld_swizzle.h"
54 #include "lp_bld_flow.h"
55 #include "lp_bld_tgsi.h"
56 #include "lp_bld_debug.h"
59 #define LP_MAX_TEMPS 256
60 #define LP_MAX_IMMEDIATES 256
63 #define FOR_EACH_CHANNEL( CHAN )\
64 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
66 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
67 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
69 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
70 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
72 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
73 FOR_EACH_CHANNEL( CHAN )\
74 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
81 #define QUAD_TOP_LEFT 0
82 #define QUAD_TOP_RIGHT 1
83 #define QUAD_BOTTOM_LEFT 2
84 #define QUAD_BOTTOM_RIGHT 3
86 #define LP_TGSI_MAX_NESTING 16
89 struct lp_build_context
*bld
;
93 LLVMTypeRef int_vec_type
;
95 LLVMValueRef cond_stack
[LP_TGSI_MAX_NESTING
];
97 LLVMValueRef cond_mask
;
99 LLVMValueRef break_stack
[LP_TGSI_MAX_NESTING
];
100 int break_stack_size
;
101 LLVMValueRef break_mask
;
103 LLVMValueRef cont_stack
[LP_TGSI_MAX_NESTING
];
105 LLVMValueRef cont_mask
;
107 LLVMBasicBlockRef loop_stack
[LP_TGSI_MAX_NESTING
];
109 LLVMBasicBlockRef loop_block
;
112 LLVMValueRef exec_mask
;
115 struct lp_build_tgsi_soa_context
117 struct lp_build_context base
;
119 LLVMValueRef consts_ptr
;
120 const LLVMValueRef
*pos
;
121 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
122 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
124 struct lp_build_sampler_soa
*sampler
;
126 LLVMValueRef immediates
[LP_MAX_IMMEDIATES
][NUM_CHANNELS
];
127 LLVMValueRef temps
[LP_MAX_TEMPS
][NUM_CHANNELS
];
129 struct lp_build_mask_context
*mask
;
130 struct lp_exec_mask exec_mask
;
133 static const unsigned char
135 QUAD_TOP_LEFT
, QUAD_TOP_LEFT
,
136 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_LEFT
139 static const unsigned char
141 QUAD_TOP_RIGHT
, QUAD_TOP_RIGHT
,
142 QUAD_BOTTOM_RIGHT
, QUAD_BOTTOM_RIGHT
145 static const unsigned char
147 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
,
148 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
151 static const unsigned char
152 swizzle_bottom
[4] = {
153 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
,
154 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
157 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
160 mask
->has_mask
= FALSE
;
161 mask
->cond_stack_size
= 0;
162 mask
->loop_stack_size
= 0;
163 mask
->break_stack_size
= 0;
164 mask
->cont_stack_size
= 0;
166 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
169 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
171 if (mask
->loop_stack_size
) {
172 /*for loops we need to update the entire mask at
175 tmp
= LLVMBuildAnd(mask
->bld
->builder
,
179 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
184 mask
->exec_mask
= mask
->cond_mask
;
187 mask
->has_mask
= (mask
->cond_stack_size
> 0 ||
188 mask
->loop_stack_size
> 0);
191 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
194 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
195 mask
->cond_mask
= LLVMBuildBitCast(mask
->bld
->builder
, val
,
196 mask
->int_vec_type
, "");
198 lp_exec_mask_update(mask
);
201 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
203 LLVMValueRef prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
204 LLVMValueRef inv_mask
= LLVMBuildNot(mask
->bld
->builder
,
205 mask
->cond_mask
, "");
207 /* means that we didn't have any mask before and that
208 * we were fully enabled */
209 if (mask
->cond_stack_size
<= 1) {
210 prev_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
213 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
216 lp_exec_mask_update(mask
);
219 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
221 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
222 lp_exec_mask_update(mask
);
225 static void lp_exec_bgnloop(struct lp_exec_mask
*mask
)
228 if (mask
->cont_stack_size
== 0)
229 mask
->cont_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
230 if (mask
->cont_stack_size
== 0)
231 mask
->break_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
232 if (mask
->cond_stack_size
== 0)
233 mask
->cond_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
234 mask
->loop_stack
[mask
->loop_stack_size
++] = mask
->loop_block
;
235 mask
->loop_block
= lp_build_insert_new_block(mask
->bld
->builder
, "bgnloop");
236 LLVMBuildBr(mask
->bld
->builder
, mask
->loop_block
);
237 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, mask
->loop_block
);
239 lp_exec_mask_update(mask
);
242 static void lp_exec_break(struct lp_exec_mask
*mask
)
244 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
248 mask
->break_stack
[mask
->break_stack_size
++] = mask
->break_mask
;
249 if (mask
->break_stack_size
> 1) {
250 mask
->break_mask
= LLVMBuildAnd(mask
->bld
->builder
,
252 exec_mask
, "break_full");
254 mask
->break_mask
= exec_mask
;
256 lp_exec_mask_update(mask
);
259 static void lp_exec_continue(struct lp_exec_mask
*mask
)
261 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
265 mask
->cont_stack
[mask
->cont_stack_size
++] = mask
->cont_mask
;
266 if (mask
->cont_stack_size
> 1) {
267 mask
->cont_mask
= LLVMBuildAnd(mask
->bld
->builder
,
271 mask
->cont_mask
= exec_mask
;
273 lp_exec_mask_update(mask
);
277 static void lp_exec_endloop(struct lp_exec_mask
*mask
)
279 LLVMBasicBlockRef endloop
;
280 LLVMTypeRef reg_type
= LLVMIntType(mask
->bld
->type
.width
*
281 mask
->bld
->type
.length
);
282 /* i1cond = (mask == 0) */
283 LLVMValueRef i1cond
= LLVMBuildICmp(
286 LLVMBuildBitCast(mask
->bld
->builder
, mask
->break_mask
, reg_type
, ""),
287 LLVMConstNull(reg_type
), "");
289 endloop
= lp_build_insert_new_block(mask
->bld
->builder
, "endloop");
291 LLVMBuildCondBr(mask
->bld
->builder
,
292 i1cond
, mask
->loop_block
, endloop
);
294 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, endloop
);
296 mask
->loop_block
= mask
->loop_stack
[--mask
->loop_stack_size
];
297 /* pop the break mask */
298 if (mask
->cont_stack_size
) {
299 mask
->cont_mask
= mask
->cont_stack
[--mask
->cont_stack_size
];
301 if (mask
->break_stack_size
) {
302 mask
->break_mask
= mask
->cont_stack
[--mask
->break_stack_size
];
305 lp_exec_mask_update(mask
);
308 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
312 if (mask
->has_mask
) {
313 LLVMValueRef real_val
, dst_val
;
315 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
316 real_val
= lp_build_select(mask
->bld
,
320 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
322 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
327 emit_ddx(struct lp_build_tgsi_soa_context
*bld
,
330 LLVMValueRef src_left
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_left
);
331 LLVMValueRef src_right
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_right
);
332 return lp_build_sub(&bld
->base
, src_right
, src_left
);
337 emit_ddy(struct lp_build_tgsi_soa_context
*bld
,
340 LLVMValueRef src_top
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_top
);
341 LLVMValueRef src_bottom
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_bottom
);
342 return lp_build_sub(&bld
->base
, src_top
, src_bottom
);
351 struct lp_build_tgsi_soa_context
*bld
,
352 const struct tgsi_full_instruction
*inst
,
354 const unsigned chan_index
)
356 const struct tgsi_full_src_register
*reg
= &inst
->Src
[index
];
357 unsigned swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
366 switch (reg
->Register
.File
) {
367 case TGSI_FILE_CONSTANT
: {
368 LLVMValueRef index
= LLVMConstInt(LLVMInt32Type(), reg
->Register
.Index
*4 + swizzle
, 0);
369 LLVMValueRef scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
, &index
, 1, "");
370 LLVMValueRef scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
371 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
375 case TGSI_FILE_IMMEDIATE
:
376 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
380 case TGSI_FILE_INPUT
:
381 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
385 case TGSI_FILE_TEMPORARY
:
386 res
= LLVMBuildLoad(bld
->base
.builder
, bld
->temps
[reg
->Register
.Index
][swizzle
], "");
388 return bld
->base
.undef
;
393 return bld
->base
.undef
;
399 return bld
->base
.undef
;
402 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
403 case TGSI_UTIL_SIGN_CLEAR
:
404 res
= lp_build_abs( &bld
->base
, res
);
407 case TGSI_UTIL_SIGN_SET
:
408 /* TODO: Use bitwese OR for floating point */
409 res
= lp_build_abs( &bld
->base
, res
);
410 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
413 case TGSI_UTIL_SIGN_TOGGLE
:
414 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
417 case TGSI_UTIL_SIGN_KEEP
:
426 * Register fetch with derivatives.
430 struct lp_build_tgsi_soa_context
*bld
,
431 const struct tgsi_full_instruction
*inst
,
433 const unsigned chan_index
,
440 src
= emit_fetch(bld
, inst
, index
, chan_index
);
445 /* TODO: use interpolation coeffs for inputs */
448 *ddx
= emit_ddx(bld
, src
);
451 *ddy
= emit_ddy(bld
, src
);
460 struct lp_build_tgsi_soa_context
*bld
,
461 const struct tgsi_full_instruction
*inst
,
466 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
468 switch( inst
->Instruction
.Saturate
) {
472 case TGSI_SAT_ZERO_ONE
:
473 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
474 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
477 case TGSI_SAT_MINUS_PLUS_ONE
:
478 value
= lp_build_max(&bld
->base
, value
, lp_build_const_vec(bld
->base
.type
, -1.0));
479 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
486 switch( reg
->Register
.File
) {
487 case TGSI_FILE_OUTPUT
:
488 lp_exec_mask_store(&bld
->exec_mask
, value
,
489 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
492 case TGSI_FILE_TEMPORARY
:
493 lp_exec_mask_store(&bld
->exec_mask
, value
,
494 bld
->temps
[reg
->Register
.Index
][chan_index
]);
497 case TGSI_FILE_ADDRESS
:
502 case TGSI_FILE_PREDICATE
:
514 * High-level instruction translators.
519 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
520 const struct tgsi_full_instruction
*inst
,
521 boolean apply_lodbias
,
525 const uint unit
= inst
->Src
[1].Register
.Index
;
526 LLVMValueRef lodbias
;
527 LLVMValueRef oow
= NULL
;
528 LLVMValueRef coords
[3];
532 switch (inst
->Texture
.Texture
) {
533 case TGSI_TEXTURE_1D
:
536 case TGSI_TEXTURE_2D
:
537 case TGSI_TEXTURE_RECT
:
540 case TGSI_TEXTURE_SHADOW1D
:
541 case TGSI_TEXTURE_SHADOW2D
:
542 case TGSI_TEXTURE_SHADOWRECT
:
543 case TGSI_TEXTURE_3D
:
544 case TGSI_TEXTURE_CUBE
:
553 lodbias
= emit_fetch( bld
, inst
, 0, 3 );
555 lodbias
= bld
->base
.zero
;
558 oow
= emit_fetch( bld
, inst
, 0, 3 );
559 oow
= lp_build_rcp(&bld
->base
, oow
);
562 for (i
= 0; i
< num_coords
; i
++) {
563 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
565 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
567 for (i
= num_coords
; i
< 3; i
++) {
568 coords
[i
] = bld
->base
.undef
;
571 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
574 unit
, num_coords
, coords
, lodbias
,
581 struct lp_build_tgsi_soa_context
*bld
,
582 const struct tgsi_full_instruction
*inst
)
584 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
585 LLVMValueRef terms
[NUM_CHANNELS
];
589 memset(&terms
, 0, sizeof terms
);
591 FOR_EACH_CHANNEL( chan_index
) {
594 /* Unswizzle channel */
595 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
597 /* Check if the component has not been already tested. */
598 assert(swizzle
< NUM_CHANNELS
);
599 if( !terms
[swizzle
] )
600 /* TODO: change the comparison operator instead of setting the sign */
601 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
605 FOR_EACH_CHANNEL( chan_index
) {
606 if(terms
[chan_index
]) {
607 LLVMValueRef chan_mask
;
609 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
612 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
619 lp_build_mask_update(bld
->mask
, mask
);
624 * Check if inst src/dest regs use indirect addressing into temporary
628 indirect_temp_reference(const struct tgsi_full_instruction
*inst
)
631 for (i
= 0; i
< inst
->Instruction
.NumSrcRegs
; i
++) {
632 const struct tgsi_full_src_register
*reg
= &inst
->Src
[i
];
633 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
634 reg
->Register
.Indirect
)
637 for (i
= 0; i
< inst
->Instruction
.NumDstRegs
; i
++) {
638 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[i
];
639 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
640 reg
->Register
.Indirect
)
648 struct lp_build_tgsi_soa_context
*bld
,
649 const struct tgsi_full_declaration
*decl
)
651 unsigned first
= decl
->Range
.First
;
652 unsigned last
= decl
->Range
.Last
;
655 for (idx
= first
; idx
<= last
; ++idx
) {
658 switch (decl
->Declaration
.File
) {
659 case TGSI_FILE_TEMPORARY
:
660 for (i
= 0; i
< NUM_CHANNELS
; i
++)
661 bld
->temps
[idx
][i
] = lp_build_alloca(&bld
->base
);
665 case TGSI_FILE_OUTPUT
:
666 for (i
= 0; i
< NUM_CHANNELS
; i
++)
667 bld
->outputs
[idx
][i
] = lp_build_alloca(&bld
->base
);
672 /* don't need to declare other vars */
685 struct lp_build_tgsi_soa_context
*bld
,
686 const struct tgsi_full_instruction
*inst
,
687 const struct tgsi_opcode_info
*info
)
690 LLVMValueRef src0
, src1
, src2
;
691 LLVMValueRef tmp0
, tmp1
, tmp2
;
692 LLVMValueRef tmp3
= NULL
;
693 LLVMValueRef tmp4
= NULL
;
694 LLVMValueRef tmp5
= NULL
;
695 LLVMValueRef tmp6
= NULL
;
696 LLVMValueRef tmp7
= NULL
;
698 LLVMValueRef dst0
[NUM_CHANNELS
];
700 /* we can't handle indirect addressing into temp register file yet */
701 if (indirect_temp_reference(inst
))
705 * Stores and write masks are handled in a general fashion after the long
706 * instruction opcode switch statement.
708 * Although not stricitly necessary, we avoid generating instructions for
709 * channels which won't be stored, in cases where's that easy. For some
710 * complex instructions, like texture sampling, it is more convenient to
711 * assume a full writemask and then let LLVM optimization passes eliminate
715 assert(info
->num_dst
<= 1);
717 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
718 dst0
[chan_index
] = bld
->base
.undef
;
722 switch (inst
->Instruction
.Opcode
) {
724 case TGSI_OPCODE_ARL
:
726 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
727 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
730 dst0
[chan_index
] = tmp0
;
735 case TGSI_OPCODE_MOV
:
736 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
737 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
741 case TGSI_OPCODE_LIT
:
742 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
743 dst0
[CHAN_X
] = bld
->base
.one
;
745 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
746 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
747 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
749 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
750 /* XMM[1] = SrcReg[0].yyyy */
751 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
752 /* XMM[1] = max(XMM[1], 0) */
753 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
754 /* XMM[2] = SrcReg[0].wwww */
755 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
756 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
757 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
758 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
759 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
761 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
762 dst0
[CHAN_W
] = bld
->base
.one
;
766 case TGSI_OPCODE_RCP
:
767 /* TGSI_OPCODE_RECIP */
768 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
769 res
= lp_build_rcp(&bld
->base
, src0
);
770 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
771 dst0
[chan_index
] = res
;
775 case TGSI_OPCODE_RSQ
:
776 /* TGSI_OPCODE_RECIPSQRT */
777 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
778 src0
= lp_build_abs(&bld
->base
, src0
);
779 res
= lp_build_rsqrt(&bld
->base
, src0
);
780 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
781 dst0
[chan_index
] = res
;
785 case TGSI_OPCODE_EXP
:
786 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
787 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
788 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
789 LLVMValueRef
*p_exp2_int_part
= NULL
;
790 LLVMValueRef
*p_frac_part
= NULL
;
791 LLVMValueRef
*p_exp2
= NULL
;
793 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
795 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
796 p_exp2_int_part
= &tmp0
;
797 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
799 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
802 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
804 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
806 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
808 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
812 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
813 dst0
[CHAN_W
] = bld
->base
.one
;
817 case TGSI_OPCODE_LOG
:
818 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
819 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
820 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
821 LLVMValueRef
*p_floor_log2
= NULL
;
822 LLVMValueRef
*p_exp
= NULL
;
823 LLVMValueRef
*p_log2
= NULL
;
825 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
826 src0
= lp_build_abs( &bld
->base
, src0
);
828 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
829 p_floor_log2
= &tmp0
;
830 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
832 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
835 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
837 /* dst.x = floor(lg2(abs(src.x))) */
838 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
840 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
841 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
842 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
844 /* dst.z = lg2(abs(src.x)) */
845 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
849 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
850 dst0
[CHAN_W
] = bld
->base
.one
;
854 case TGSI_OPCODE_MUL
:
855 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
856 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
857 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
858 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
862 case TGSI_OPCODE_ADD
:
863 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
864 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
865 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
866 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
870 case TGSI_OPCODE_DP3
:
871 /* TGSI_OPCODE_DOT3 */
872 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
873 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
874 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
875 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
876 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
877 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
878 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
879 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
880 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
881 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
882 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
883 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
884 dst0
[chan_index
] = tmp0
;
888 case TGSI_OPCODE_DP4
:
889 /* TGSI_OPCODE_DOT4 */
890 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
891 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
892 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
893 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
894 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
895 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
896 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
897 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
898 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
899 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
900 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
901 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
902 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
903 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
904 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
905 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
906 dst0
[chan_index
] = tmp0
;
910 case TGSI_OPCODE_DST
:
911 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
912 dst0
[CHAN_X
] = bld
->base
.one
;
914 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
915 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
916 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
917 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
919 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
920 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
922 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
923 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
927 case TGSI_OPCODE_MIN
:
928 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
929 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
930 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
931 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
935 case TGSI_OPCODE_MAX
:
936 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
937 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
938 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
939 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
943 case TGSI_OPCODE_SLT
:
944 /* TGSI_OPCODE_SETLT */
945 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
946 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
947 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
948 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
949 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
953 case TGSI_OPCODE_SGE
:
954 /* TGSI_OPCODE_SETGE */
955 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
956 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
957 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
958 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
959 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
963 case TGSI_OPCODE_MAD
:
964 /* TGSI_OPCODE_MADD */
965 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
966 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
967 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
968 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
969 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
970 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
971 dst0
[chan_index
] = tmp0
;
975 case TGSI_OPCODE_SUB
:
976 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
977 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
978 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
979 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
983 case TGSI_OPCODE_LRP
:
984 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
985 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
986 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
987 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
988 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
989 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
990 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
994 case TGSI_OPCODE_CND
:
995 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
996 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
997 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
998 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
999 tmp1
= lp_build_const_vec(bld
->base
.type
, 0.5);
1000 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
1001 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
1005 case TGSI_OPCODE_DP2A
:
1006 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1007 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1008 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1009 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1010 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1011 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1012 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1013 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
1014 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1015 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1016 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1020 case TGSI_OPCODE_FRC
:
1021 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1022 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1023 tmp0
= lp_build_floor(&bld
->base
, src0
);
1024 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
1025 dst0
[chan_index
] = tmp0
;
1029 case TGSI_OPCODE_CLAMP
:
1030 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1031 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1032 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1033 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1034 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
1035 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
1036 dst0
[chan_index
] = tmp0
;
1040 case TGSI_OPCODE_FLR
:
1041 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1042 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1043 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
1047 case TGSI_OPCODE_ROUND
:
1048 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1049 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1050 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
1054 case TGSI_OPCODE_EX2
: {
1055 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1056 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
1057 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1058 dst0
[chan_index
] = tmp0
;
1063 case TGSI_OPCODE_LG2
:
1064 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1065 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
1066 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1067 dst0
[chan_index
] = tmp0
;
1071 case TGSI_OPCODE_POW
:
1072 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1073 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1074 res
= lp_build_pow( &bld
->base
, src0
, src1
);
1075 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1076 dst0
[chan_index
] = res
;
1080 case TGSI_OPCODE_XPD
:
1081 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1082 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1083 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1084 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1086 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1087 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1088 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1089 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1091 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1093 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
1095 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1096 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
1097 dst0
[CHAN_X
] = tmp2
;
1099 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1100 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1101 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1102 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1104 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1105 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
1106 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
1107 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
1108 dst0
[CHAN_Y
] = tmp3
;
1110 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1111 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1112 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
1113 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1114 dst0
[CHAN_Z
] = tmp5
;
1116 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1117 dst0
[CHAN_W
] = bld
->base
.one
;
1121 case TGSI_OPCODE_ABS
:
1122 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1123 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1124 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1128 case TGSI_OPCODE_RCC
:
1133 case TGSI_OPCODE_DPH
:
1134 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1135 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1136 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1137 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1138 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1139 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1140 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1141 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1142 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1143 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1144 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1145 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1146 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1147 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1148 dst0
[chan_index
] = tmp0
;
1152 case TGSI_OPCODE_COS
:
1153 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1154 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1155 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1156 dst0
[chan_index
] = tmp0
;
1160 case TGSI_OPCODE_DDX
:
1161 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1162 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1166 case TGSI_OPCODE_DDY
:
1167 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1168 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1172 case TGSI_OPCODE_KILP
:
1173 /* predicated kill */
1178 case TGSI_OPCODE_KIL
:
1179 /* conditional kill */
1180 emit_kil( bld
, inst
);
1183 case TGSI_OPCODE_PK2H
:
1187 case TGSI_OPCODE_PK2US
:
1191 case TGSI_OPCODE_PK4B
:
1195 case TGSI_OPCODE_PK4UB
:
1199 case TGSI_OPCODE_RFL
:
1203 case TGSI_OPCODE_SEQ
:
1204 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1205 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1206 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1207 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1208 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1212 case TGSI_OPCODE_SFL
:
1213 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1214 dst0
[chan_index
] = bld
->base
.zero
;
1218 case TGSI_OPCODE_SGT
:
1219 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1220 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1221 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1222 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1223 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1227 case TGSI_OPCODE_SIN
:
1228 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1229 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1230 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1231 dst0
[chan_index
] = tmp0
;
1235 case TGSI_OPCODE_SLE
:
1236 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1237 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1238 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1239 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1240 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1244 case TGSI_OPCODE_SNE
:
1245 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1246 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1247 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1248 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1249 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1253 case TGSI_OPCODE_STR
:
1254 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1255 dst0
[chan_index
] = bld
->base
.one
;
1259 case TGSI_OPCODE_TEX
:
1260 emit_tex( bld
, inst
, FALSE
, FALSE
, dst0
);
1263 case TGSI_OPCODE_TXD
:
1268 case TGSI_OPCODE_UP2H
:
1274 case TGSI_OPCODE_UP2US
:
1280 case TGSI_OPCODE_UP4B
:
1286 case TGSI_OPCODE_UP4UB
:
1292 case TGSI_OPCODE_X2D
:
1298 case TGSI_OPCODE_ARA
:
1305 case TGSI_OPCODE_ARR
:
1307 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1308 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1309 emit_rnd( bld
, 0, 0 );
1310 emit_f2it( bld
, 0 );
1311 dst0
[chan_index
] = tmp0
;
1316 case TGSI_OPCODE_BRA
:
1322 case TGSI_OPCODE_CAL
:
1327 case TGSI_OPCODE_RET
:
1332 case TGSI_OPCODE_END
:
1335 case TGSI_OPCODE_SSG
:
1336 /* TGSI_OPCODE_SGN */
1337 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1338 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1339 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1343 case TGSI_OPCODE_CMP
:
1344 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1345 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1346 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1347 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1348 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1349 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1353 case TGSI_OPCODE_SCS
:
1354 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1355 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1356 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1358 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1359 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1360 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1362 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1363 dst0
[CHAN_Z
] = bld
->base
.zero
;
1365 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1366 dst0
[CHAN_W
] = bld
->base
.one
;
1370 case TGSI_OPCODE_TXB
:
1371 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1374 case TGSI_OPCODE_NRM
:
1376 case TGSI_OPCODE_NRM4
:
1377 /* 3 or 4-component normalization */
1379 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1381 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1382 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1383 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1384 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1386 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1389 /* xmm0 = src.x * src.x */
1390 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1391 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1394 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1397 /* xmm0 = xmm0 + src.y * src.y */
1398 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1399 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1402 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1403 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1406 /* xmm0 = xmm0 + src.z * src.z */
1407 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1408 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1411 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1412 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1416 /* xmm0 = xmm0 + src.w * src.w */
1417 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1418 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1421 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1422 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1425 /* xmm1 = 1 / sqrt(xmm0) */
1426 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1428 /* dst.x = xmm1 * src.x */
1429 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1430 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1433 /* dst.y = xmm1 * src.y */
1434 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1435 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1438 /* dst.z = xmm1 * src.z */
1439 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1440 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1443 /* dst.w = xmm1 * src.w */
1444 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1445 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1450 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1451 dst0
[CHAN_W
] = bld
->base
.one
;
1456 case TGSI_OPCODE_DIV
:
1462 case TGSI_OPCODE_DP2
:
1463 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1464 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1465 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1466 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1467 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1468 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1469 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1470 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1471 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1475 case TGSI_OPCODE_TXL
:
1476 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1479 case TGSI_OPCODE_TXP
:
1480 emit_tex( bld
, inst
, FALSE
, TRUE
, dst0
);
1483 case TGSI_OPCODE_BRK
:
1484 lp_exec_break(&bld
->exec_mask
);
1487 case TGSI_OPCODE_IF
:
1488 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1489 tmp0
= lp_build_cmp(&bld
->base
, PIPE_FUNC_NOTEQUAL
,
1490 tmp0
, bld
->base
.zero
);
1491 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1494 case TGSI_OPCODE_BGNFOR
:
1500 case TGSI_OPCODE_BGNLOOP
:
1501 lp_exec_bgnloop(&bld
->exec_mask
);
1504 case TGSI_OPCODE_REP
:
1510 case TGSI_OPCODE_ELSE
:
1511 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1514 case TGSI_OPCODE_ENDIF
:
1515 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1518 case TGSI_OPCODE_ENDFOR
:
1524 case TGSI_OPCODE_ENDLOOP
:
1525 lp_exec_endloop(&bld
->exec_mask
);
1528 case TGSI_OPCODE_ENDREP
:
1534 case TGSI_OPCODE_PUSHA
:
1540 case TGSI_OPCODE_POPA
:
1546 case TGSI_OPCODE_CEIL
:
1547 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1548 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1549 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1553 case TGSI_OPCODE_I2F
:
1559 case TGSI_OPCODE_NOT
:
1565 case TGSI_OPCODE_TRUNC
:
1566 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1567 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1568 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
1572 case TGSI_OPCODE_SHL
:
1578 case TGSI_OPCODE_ISHR
:
1584 case TGSI_OPCODE_AND
:
1590 case TGSI_OPCODE_OR
:
1596 case TGSI_OPCODE_MOD
:
1602 case TGSI_OPCODE_XOR
:
1608 case TGSI_OPCODE_SAD
:
1614 case TGSI_OPCODE_TXF
:
1620 case TGSI_OPCODE_TXQ
:
1626 case TGSI_OPCODE_CONT
:
1627 lp_exec_continue(&bld
->exec_mask
);
1630 case TGSI_OPCODE_EMIT
:
1634 case TGSI_OPCODE_ENDPRIM
:
1638 case TGSI_OPCODE_NOP
:
1646 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1647 emit_store( bld
, inst
, 0, chan_index
, dst0
[chan_index
]);
1656 lp_build_tgsi_soa(LLVMBuilderRef builder
,
1657 const struct tgsi_token
*tokens
,
1658 struct lp_type type
,
1659 struct lp_build_mask_context
*mask
,
1660 LLVMValueRef consts_ptr
,
1661 const LLVMValueRef
*pos
,
1662 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
1663 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
1664 struct lp_build_sampler_soa
*sampler
)
1666 struct lp_build_tgsi_soa_context bld
;
1667 struct tgsi_parse_context parse
;
1668 uint num_immediates
= 0;
1671 /* Setup build context */
1672 memset(&bld
, 0, sizeof bld
);
1673 lp_build_context_init(&bld
.base
, builder
, type
);
1676 bld
.inputs
= inputs
;
1677 bld
.outputs
= outputs
;
1678 bld
.consts_ptr
= consts_ptr
;
1679 bld
.sampler
= sampler
;
1681 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
1683 tgsi_parse_init( &parse
, tokens
);
1685 while( !tgsi_parse_end_of_tokens( &parse
) ) {
1686 tgsi_parse_token( &parse
);
1688 switch( parse
.FullToken
.Token
.Type
) {
1689 case TGSI_TOKEN_TYPE_DECLARATION
:
1690 /* Inputs already interpolated */
1692 if (!emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
))
1693 _debug_printf("warning: failed to define LLVM variable\n");
1697 case TGSI_TOKEN_TYPE_INSTRUCTION
:
1699 unsigned opcode
= parse
.FullToken
.FullInstruction
.Instruction
.Opcode
;
1700 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info(opcode
);
1701 if (!emit_instruction( &bld
, &parse
.FullToken
.FullInstruction
, info
))
1702 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
1703 info
? info
->mnemonic
: "<invalid>");
1708 case TGSI_TOKEN_TYPE_IMMEDIATE
:
1709 /* simply copy the immediate values into the next immediates[] slot */
1711 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
1713 assert(num_immediates
< LP_MAX_IMMEDIATES
);
1714 for( i
= 0; i
< size
; ++i
)
1715 bld
.immediates
[num_immediates
][i
] =
1716 lp_build_const_vec(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
1717 for( i
= size
; i
< 4; ++i
)
1718 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
1723 case TGSI_TOKEN_TYPE_PROPERTY
:
1731 LLVMBasicBlockRef block
= LLVMGetInsertBlock(builder
);
1732 LLVMValueRef function
= LLVMGetBasicBlockParent(block
);
1733 debug_printf("11111111111111111111111111111 \n");
1734 tgsi_dump(tokens
, 0);
1735 LLVMDumpValue(function
);
1736 debug_printf("2222222222222222222222222222 \n");
1738 tgsi_parse_free( &parse
);