1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_dump.h"
45 #include "tgsi/tgsi_info.h"
46 #include "tgsi/tgsi_parse.h"
47 #include "tgsi/tgsi_util.h"
48 #include "tgsi/tgsi_exec.h"
49 #include "lp_bld_type.h"
50 #include "lp_bld_const.h"
51 #include "lp_bld_arit.h"
52 #include "lp_bld_logic.h"
53 #include "lp_bld_swizzle.h"
54 #include "lp_bld_flow.h"
55 #include "lp_bld_tgsi.h"
56 #include "lp_bld_debug.h"
59 #define LP_MAX_TEMPS 256
60 #define LP_MAX_IMMEDIATES 256
63 #define FOR_EACH_CHANNEL( CHAN )\
64 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
66 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
67 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
69 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
70 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
72 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
73 FOR_EACH_CHANNEL( CHAN )\
74 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
81 #define QUAD_TOP_LEFT 0
82 #define QUAD_TOP_RIGHT 1
83 #define QUAD_BOTTOM_LEFT 2
84 #define QUAD_BOTTOM_RIGHT 3
86 #define LP_TGSI_MAX_NESTING 16
89 struct lp_build_context
*bld
;
93 LLVMTypeRef int_vec_type
;
95 LLVMValueRef cond_stack
[LP_TGSI_MAX_NESTING
];
97 LLVMValueRef cond_mask
;
99 LLVMValueRef break_stack
[LP_TGSI_MAX_NESTING
];
100 int break_stack_size
;
101 LLVMValueRef break_mask
;
103 LLVMValueRef cont_stack
[LP_TGSI_MAX_NESTING
];
105 LLVMValueRef cont_mask
;
107 LLVMBasicBlockRef loop_stack
[LP_TGSI_MAX_NESTING
];
109 LLVMBasicBlockRef loop_block
;
112 LLVMValueRef exec_mask
;
115 struct lp_build_tgsi_soa_context
117 struct lp_build_context base
;
119 LLVMValueRef consts_ptr
;
120 const LLVMValueRef
*pos
;
121 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
122 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
124 struct lp_build_sampler_soa
*sampler
;
126 LLVMValueRef immediates
[LP_MAX_IMMEDIATES
][NUM_CHANNELS
];
127 LLVMValueRef temps
[LP_MAX_TEMPS
][NUM_CHANNELS
];
128 LLVMValueRef addr
[LP_MAX_TEMPS
][NUM_CHANNELS
];
130 struct lp_build_mask_context
*mask
;
131 struct lp_exec_mask exec_mask
;
134 static const unsigned char
136 QUAD_TOP_LEFT
, QUAD_TOP_LEFT
,
137 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_LEFT
140 static const unsigned char
142 QUAD_TOP_RIGHT
, QUAD_TOP_RIGHT
,
143 QUAD_BOTTOM_RIGHT
, QUAD_BOTTOM_RIGHT
146 static const unsigned char
148 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
,
149 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
152 static const unsigned char
153 swizzle_bottom
[4] = {
154 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
,
155 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
158 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
161 mask
->has_mask
= FALSE
;
162 mask
->cond_stack_size
= 0;
163 mask
->loop_stack_size
= 0;
164 mask
->break_stack_size
= 0;
165 mask
->cont_stack_size
= 0;
167 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
170 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
172 if (mask
->loop_stack_size
) {
173 /*for loops we need to update the entire mask at runtime */
175 assert(mask
->break_mask
);
176 tmp
= LLVMBuildAnd(mask
->bld
->builder
,
180 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
185 mask
->exec_mask
= mask
->cond_mask
;
188 mask
->has_mask
= (mask
->cond_stack_size
> 0 ||
189 mask
->loop_stack_size
> 0);
192 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
195 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
196 mask
->cond_mask
= LLVMBuildBitCast(mask
->bld
->builder
, val
,
197 mask
->int_vec_type
, "");
199 lp_exec_mask_update(mask
);
202 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
204 LLVMValueRef prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
205 LLVMValueRef inv_mask
= LLVMBuildNot(mask
->bld
->builder
,
206 mask
->cond_mask
, "");
208 /* means that we didn't have any mask before and that
209 * we were fully enabled */
210 if (mask
->cond_stack_size
<= 1) {
211 prev_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
214 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
217 lp_exec_mask_update(mask
);
220 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
222 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
223 lp_exec_mask_update(mask
);
226 static void lp_exec_bgnloop(struct lp_exec_mask
*mask
)
229 if (mask
->cont_stack_size
== 0)
230 mask
->cont_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
231 if (mask
->break_stack_size
== 0)
232 mask
->break_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
233 if (mask
->cond_stack_size
== 0)
234 mask
->cond_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
236 mask
->break_stack
[mask
->break_stack_size
++] = mask
->break_mask
;
237 mask
->cont_stack
[mask
->cont_stack_size
++] = mask
->cont_mask
;
238 mask
->loop_stack
[mask
->loop_stack_size
++] = mask
->loop_block
;
239 mask
->loop_block
= lp_build_insert_new_block(mask
->bld
->builder
, "bgnloop");
240 LLVMBuildBr(mask
->bld
->builder
, mask
->loop_block
);
241 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, mask
->loop_block
);
243 lp_exec_mask_update(mask
);
246 static void lp_exec_break(struct lp_exec_mask
*mask
)
248 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
252 /* mask->break_stack_size > 1 implies that we encountered a break
253 * statemant already and if that's the case we want to make sure
254 * our mask is a combination of the previous break and the current
256 if (mask
->break_stack_size
> 1) {
257 mask
->break_mask
= LLVMBuildAnd(mask
->bld
->builder
,
259 exec_mask
, "break_full");
261 mask
->break_mask
= exec_mask
;
263 lp_exec_mask_update(mask
);
266 static void lp_exec_continue(struct lp_exec_mask
*mask
)
268 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
272 if (mask
->cont_stack_size
> 1) {
273 mask
->cont_mask
= LLVMBuildAnd(mask
->bld
->builder
,
277 mask
->cont_mask
= exec_mask
;
279 lp_exec_mask_update(mask
);
283 static void lp_exec_endloop(struct lp_exec_mask
*mask
)
285 LLVMBasicBlockRef endloop
;
286 LLVMTypeRef reg_type
= LLVMIntType(mask
->bld
->type
.width
*
287 mask
->bld
->type
.length
);
290 assert(mask
->break_mask
);
292 /* i1cond = (mask == 0) */
293 i1cond
= LLVMBuildICmp(
296 LLVMBuildBitCast(mask
->bld
->builder
, mask
->break_mask
, reg_type
, ""),
297 LLVMConstNull(reg_type
), "");
299 endloop
= lp_build_insert_new_block(mask
->bld
->builder
, "endloop");
301 LLVMBuildCondBr(mask
->bld
->builder
,
302 i1cond
, mask
->loop_block
, endloop
);
304 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, endloop
);
306 mask
->loop_block
= mask
->loop_stack
[--mask
->loop_stack_size
];
307 /* pop the cont mask */
308 if (mask
->cont_stack_size
) {
309 mask
->cont_mask
= mask
->cont_stack
[--mask
->cont_stack_size
];
311 /* pop the break mask */
312 if (mask
->break_stack_size
) {
313 mask
->break_mask
= mask
->break_stack
[--mask
->break_stack_size
];
316 lp_exec_mask_update(mask
);
319 /* stores val into an address pointed to by dst.
320 * mask->exec_mask is used to figure out which bits of val
321 * should be stored into the address
322 * (0 means don't store this bit, 1 means do store).
324 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
328 if (mask
->has_mask
) {
329 LLVMValueRef real_val
, dst_val
;
331 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
332 real_val
= lp_build_select(mask
->bld
,
336 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
338 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
343 emit_ddx(struct lp_build_tgsi_soa_context
*bld
,
346 LLVMValueRef src_left
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_left
);
347 LLVMValueRef src_right
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_right
);
348 return lp_build_sub(&bld
->base
, src_right
, src_left
);
353 emit_ddy(struct lp_build_tgsi_soa_context
*bld
,
356 LLVMValueRef src_top
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_top
);
357 LLVMValueRef src_bottom
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_bottom
);
358 return lp_build_sub(&bld
->base
, src_top
, src_bottom
);
367 struct lp_build_tgsi_soa_context
*bld
,
368 const struct tgsi_full_instruction
*inst
,
370 const unsigned chan_index
)
372 const struct tgsi_full_src_register
*reg
= &inst
->Src
[index
];
373 unsigned swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
383 if (reg
->Register
.Indirect
) {
384 LLVMTypeRef int_vec_type
= lp_build_int_vec_type(bld
->base
.type
);
385 unsigned swizzle
= tgsi_util_get_src_register_swizzle( ®
->Indirect
, chan_index
);
386 addr
= LLVMBuildLoad(bld
->base
.builder
,
387 bld
->addr
[reg
->Indirect
.Index
][swizzle
],
389 /* for indexing we want integers */
390 addr
= LLVMBuildFPToSI(bld
->base
.builder
, addr
,
392 addr
= LLVMBuildExtractElement(bld
->base
.builder
,
393 addr
, LLVMConstInt(LLVMInt32Type(), 0, 0),
397 switch (reg
->Register
.File
) {
398 case TGSI_FILE_CONSTANT
: {
399 LLVMValueRef index
= LLVMConstInt(LLVMInt32Type(), reg
->Register
.Index
*4 + swizzle
, 0);
400 LLVMValueRef scalar
, scalar_ptr
;
402 if (reg
->Register
.Indirect
) {
403 /*lp_build_printf(bld->base.builder,
404 "\taddr = %d\n", addr);*/
405 addr
= lp_build_mul(&bld
->base
, addr
, LLVMConstInt(LLVMInt32Type(), 4, 0));
406 index
= lp_build_add(&bld
->base
, index
, addr
);
408 scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
, &index
, 1, "");
409 scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
411 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
415 case TGSI_FILE_IMMEDIATE
:
416 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
420 case TGSI_FILE_INPUT
:
421 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
425 case TGSI_FILE_TEMPORARY
:
426 res
= LLVMBuildLoad(bld
->base
.builder
, bld
->temps
[reg
->Register
.Index
][swizzle
], "");
428 return bld
->base
.undef
;
433 return bld
->base
.undef
;
439 return bld
->base
.undef
;
442 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
443 case TGSI_UTIL_SIGN_CLEAR
:
444 res
= lp_build_abs( &bld
->base
, res
);
447 case TGSI_UTIL_SIGN_SET
:
448 /* TODO: Use bitwese OR for floating point */
449 res
= lp_build_abs( &bld
->base
, res
);
450 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
453 case TGSI_UTIL_SIGN_TOGGLE
:
454 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
457 case TGSI_UTIL_SIGN_KEEP
:
466 * Register fetch with derivatives.
470 struct lp_build_tgsi_soa_context
*bld
,
471 const struct tgsi_full_instruction
*inst
,
473 const unsigned chan_index
,
480 src
= emit_fetch(bld
, inst
, index
, chan_index
);
485 /* TODO: use interpolation coeffs for inputs */
488 *ddx
= emit_ddx(bld
, src
);
491 *ddy
= emit_ddy(bld
, src
);
500 struct lp_build_tgsi_soa_context
*bld
,
501 const struct tgsi_full_instruction
*inst
,
506 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
508 switch( inst
->Instruction
.Saturate
) {
512 case TGSI_SAT_ZERO_ONE
:
513 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
514 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
517 case TGSI_SAT_MINUS_PLUS_ONE
:
518 value
= lp_build_max(&bld
->base
, value
, lp_build_const_vec(bld
->base
.type
, -1.0));
519 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
526 switch( reg
->Register
.File
) {
527 case TGSI_FILE_OUTPUT
:
528 lp_exec_mask_store(&bld
->exec_mask
, value
,
529 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
532 case TGSI_FILE_TEMPORARY
:
533 lp_exec_mask_store(&bld
->exec_mask
, value
,
534 bld
->temps
[reg
->Register
.Index
][chan_index
]);
537 case TGSI_FILE_ADDRESS
:
538 lp_exec_mask_store(&bld
->exec_mask
, value
,
539 bld
->addr
[reg
->Indirect
.Index
][chan_index
]);
542 case TGSI_FILE_PREDICATE
:
554 * High-level instruction translators.
559 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
560 const struct tgsi_full_instruction
*inst
,
561 boolean apply_lodbias
,
565 const uint unit
= inst
->Src
[1].Register
.Index
;
566 LLVMValueRef lodbias
;
567 LLVMValueRef oow
= NULL
;
568 LLVMValueRef coords
[3];
572 switch (inst
->Texture
.Texture
) {
573 case TGSI_TEXTURE_1D
:
576 case TGSI_TEXTURE_2D
:
577 case TGSI_TEXTURE_RECT
:
580 case TGSI_TEXTURE_SHADOW1D
:
581 case TGSI_TEXTURE_SHADOW2D
:
582 case TGSI_TEXTURE_SHADOWRECT
:
583 case TGSI_TEXTURE_3D
:
584 case TGSI_TEXTURE_CUBE
:
593 lodbias
= emit_fetch( bld
, inst
, 0, 3 );
595 lodbias
= bld
->base
.zero
;
598 oow
= emit_fetch( bld
, inst
, 0, 3 );
599 oow
= lp_build_rcp(&bld
->base
, oow
);
602 for (i
= 0; i
< num_coords
; i
++) {
603 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
605 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
607 for (i
= num_coords
; i
< 3; i
++) {
608 coords
[i
] = bld
->base
.undef
;
611 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
614 unit
, num_coords
, coords
, lodbias
,
620 * Kill fragment if any of the src register values are negative.
624 struct lp_build_tgsi_soa_context
*bld
,
625 const struct tgsi_full_instruction
*inst
)
627 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
628 LLVMValueRef terms
[NUM_CHANNELS
];
632 memset(&terms
, 0, sizeof terms
);
634 FOR_EACH_CHANNEL( chan_index
) {
637 /* Unswizzle channel */
638 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
640 /* Check if the component has not been already tested. */
641 assert(swizzle
< NUM_CHANNELS
);
642 if( !terms
[swizzle
] )
643 /* TODO: change the comparison operator instead of setting the sign */
644 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
648 FOR_EACH_CHANNEL( chan_index
) {
649 if(terms
[chan_index
]) {
650 LLVMValueRef chan_mask
;
653 * If term < 0 then mask = 0 else mask = ~0.
655 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
658 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
665 lp_build_mask_update(bld
->mask
, mask
);
670 * Predicated fragment kill.
671 * XXX Actually, we do an unconditional kill (as in tgsi_exec.c).
672 * The only predication is the execution mask which will apply if
673 * we're inside a loop or conditional.
676 emit_kilp(struct lp_build_tgsi_soa_context
*bld
,
677 const struct tgsi_full_instruction
*inst
)
681 /* For those channels which are "alive", disable fragment shader
684 if (bld
->exec_mask
.has_mask
) {
685 mask
= LLVMBuildNot(bld
->base
.builder
, bld
->exec_mask
.exec_mask
, "kilp");
688 mask
= bld
->base
.zero
;
691 lp_build_mask_update(bld
->mask
, mask
);
696 * Check if inst src/dest regs use indirect addressing into temporary
700 indirect_temp_reference(const struct tgsi_full_instruction
*inst
)
703 for (i
= 0; i
< inst
->Instruction
.NumSrcRegs
; i
++) {
704 const struct tgsi_full_src_register
*reg
= &inst
->Src
[i
];
705 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
706 reg
->Register
.Indirect
)
709 for (i
= 0; i
< inst
->Instruction
.NumDstRegs
; i
++) {
710 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[i
];
711 if (reg
->Register
.File
== TGSI_FILE_TEMPORARY
&&
712 reg
->Register
.Indirect
)
720 struct lp_build_tgsi_soa_context
*bld
,
721 const struct tgsi_full_declaration
*decl
)
723 unsigned first
= decl
->Range
.First
;
724 unsigned last
= decl
->Range
.Last
;
726 LLVMBasicBlockRef current_block
=
727 LLVMGetInsertBlock(bld
->base
.builder
);
728 LLVMBasicBlockRef first_block
=
729 LLVMGetEntryBasicBlock(
730 LLVMGetBasicBlockParent(current_block
));
731 LLVMValueRef first_inst
=
732 LLVMGetFirstInstruction(first_block
);
734 /* we want alloca's to be the first instruction
735 * in the function so we need to rewind the builder
736 * to the very beginning */
737 LLVMPositionBuilderBefore(bld
->base
.builder
,
740 for (idx
= first
; idx
<= last
; ++idx
) {
741 switch (decl
->Declaration
.File
) {
742 case TGSI_FILE_TEMPORARY
:
743 for (i
= 0; i
< NUM_CHANNELS
; i
++)
744 bld
->temps
[idx
][i
] = lp_build_alloca(&bld
->base
);
747 case TGSI_FILE_OUTPUT
:
748 for (i
= 0; i
< NUM_CHANNELS
; i
++)
749 bld
->outputs
[idx
][i
] = lp_build_alloca(&bld
->base
);
752 case TGSI_FILE_ADDRESS
:
753 for (i
= 0; i
< NUM_CHANNELS
; i
++)
754 bld
->addr
[idx
][i
] = lp_build_alloca(&bld
->base
);
758 /* don't need to declare other vars */
763 LLVMPositionBuilderAtEnd(bld
->base
.builder
,
770 * Emit LLVM for one TGSI instruction.
771 * \param return TRUE for success, FALSE otherwise
775 struct lp_build_tgsi_soa_context
*bld
,
776 const struct tgsi_full_instruction
*inst
,
777 const struct tgsi_opcode_info
*info
)
780 LLVMValueRef src0
, src1
, src2
;
781 LLVMValueRef tmp0
, tmp1
, tmp2
;
782 LLVMValueRef tmp3
= NULL
;
783 LLVMValueRef tmp4
= NULL
;
784 LLVMValueRef tmp5
= NULL
;
785 LLVMValueRef tmp6
= NULL
;
786 LLVMValueRef tmp7
= NULL
;
788 LLVMValueRef dst0
[NUM_CHANNELS
];
790 /* we can't handle indirect addressing into temp register file yet */
791 if (indirect_temp_reference(inst
))
795 * Stores and write masks are handled in a general fashion after the long
796 * instruction opcode switch statement.
798 * Although not stricitly necessary, we avoid generating instructions for
799 * channels which won't be stored, in cases where's that easy. For some
800 * complex instructions, like texture sampling, it is more convenient to
801 * assume a full writemask and then let LLVM optimization passes eliminate
805 assert(info
->num_dst
<= 1);
807 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
808 dst0
[chan_index
] = bld
->base
.undef
;
812 switch (inst
->Instruction
.Opcode
) {
813 case TGSI_OPCODE_ARL
:
814 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
815 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
816 tmp0
= lp_build_floor(&bld
->base
, tmp0
);
817 dst0
[chan_index
] = tmp0
;
821 case TGSI_OPCODE_MOV
:
822 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
823 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
827 case TGSI_OPCODE_LIT
:
828 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
829 dst0
[CHAN_X
] = bld
->base
.one
;
831 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
832 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
833 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
835 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
836 /* XMM[1] = SrcReg[0].yyyy */
837 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
838 /* XMM[1] = max(XMM[1], 0) */
839 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
840 /* XMM[2] = SrcReg[0].wwww */
841 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
842 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
843 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
844 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
845 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
847 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
848 dst0
[CHAN_W
] = bld
->base
.one
;
852 case TGSI_OPCODE_RCP
:
853 /* TGSI_OPCODE_RECIP */
854 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
855 res
= lp_build_rcp(&bld
->base
, src0
);
856 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
857 dst0
[chan_index
] = res
;
861 case TGSI_OPCODE_RSQ
:
862 /* TGSI_OPCODE_RECIPSQRT */
863 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
864 src0
= lp_build_abs(&bld
->base
, src0
);
865 res
= lp_build_rsqrt(&bld
->base
, src0
);
866 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
867 dst0
[chan_index
] = res
;
871 case TGSI_OPCODE_EXP
:
872 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
873 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
874 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
875 LLVMValueRef
*p_exp2_int_part
= NULL
;
876 LLVMValueRef
*p_frac_part
= NULL
;
877 LLVMValueRef
*p_exp2
= NULL
;
879 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
881 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
882 p_exp2_int_part
= &tmp0
;
883 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
885 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
888 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
890 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
892 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
894 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
898 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
899 dst0
[CHAN_W
] = bld
->base
.one
;
903 case TGSI_OPCODE_LOG
:
904 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
905 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
906 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
907 LLVMValueRef
*p_floor_log2
= NULL
;
908 LLVMValueRef
*p_exp
= NULL
;
909 LLVMValueRef
*p_log2
= NULL
;
911 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
912 src0
= lp_build_abs( &bld
->base
, src0
);
914 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
915 p_floor_log2
= &tmp0
;
916 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
918 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
921 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
923 /* dst.x = floor(lg2(abs(src.x))) */
924 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
926 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
927 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
928 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
930 /* dst.z = lg2(abs(src.x)) */
931 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
935 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
936 dst0
[CHAN_W
] = bld
->base
.one
;
940 case TGSI_OPCODE_MUL
:
941 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
942 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
943 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
944 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
948 case TGSI_OPCODE_ADD
:
949 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
950 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
951 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
952 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
956 case TGSI_OPCODE_DP3
:
957 /* TGSI_OPCODE_DOT3 */
958 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
959 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
960 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
961 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
962 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
963 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
964 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
965 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
966 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
967 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
968 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
969 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
970 dst0
[chan_index
] = tmp0
;
974 case TGSI_OPCODE_DP4
:
975 /* TGSI_OPCODE_DOT4 */
976 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
977 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
978 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
979 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
980 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
981 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
982 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
983 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
984 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
985 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
986 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
987 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
988 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
989 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
990 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
991 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
992 dst0
[chan_index
] = tmp0
;
996 case TGSI_OPCODE_DST
:
997 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
998 dst0
[CHAN_X
] = bld
->base
.one
;
1000 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1001 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1002 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1003 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1005 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1006 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
1008 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1009 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
1013 case TGSI_OPCODE_MIN
:
1014 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1015 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1016 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1017 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
1021 case TGSI_OPCODE_MAX
:
1022 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1023 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1024 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1025 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
1029 case TGSI_OPCODE_SLT
:
1030 /* TGSI_OPCODE_SETLT */
1031 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1032 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1033 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1034 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
1035 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1039 case TGSI_OPCODE_SGE
:
1040 /* TGSI_OPCODE_SETGE */
1041 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1042 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1043 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1044 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
1045 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1049 case TGSI_OPCODE_MAD
:
1050 /* TGSI_OPCODE_MADD */
1051 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1052 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1053 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1054 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
1055 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1056 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
1057 dst0
[chan_index
] = tmp0
;
1061 case TGSI_OPCODE_SUB
:
1062 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1063 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1064 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1065 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
1069 case TGSI_OPCODE_LRP
:
1070 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1071 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1072 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1073 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1074 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
1075 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
1076 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
1080 case TGSI_OPCODE_CND
:
1081 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1082 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1083 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1084 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1085 tmp1
= lp_build_const_vec(bld
->base
.type
, 0.5);
1086 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
1087 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
1091 case TGSI_OPCODE_DP2A
:
1092 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1093 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1094 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1095 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1096 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1097 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1098 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1099 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
1100 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1101 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1102 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1106 case TGSI_OPCODE_FRC
:
1107 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1108 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1109 tmp0
= lp_build_floor(&bld
->base
, src0
);
1110 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
1111 dst0
[chan_index
] = tmp0
;
1115 case TGSI_OPCODE_CLAMP
:
1116 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1117 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1118 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1119 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1120 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
1121 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
1122 dst0
[chan_index
] = tmp0
;
1126 case TGSI_OPCODE_FLR
:
1127 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1128 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1129 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
1133 case TGSI_OPCODE_ROUND
:
1134 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1135 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1136 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
1140 case TGSI_OPCODE_EX2
: {
1141 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1142 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
1143 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1144 dst0
[chan_index
] = tmp0
;
1149 case TGSI_OPCODE_LG2
:
1150 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1151 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
1152 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1153 dst0
[chan_index
] = tmp0
;
1157 case TGSI_OPCODE_POW
:
1158 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1159 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1160 res
= lp_build_pow( &bld
->base
, src0
, src1
);
1161 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1162 dst0
[chan_index
] = res
;
1166 case TGSI_OPCODE_XPD
:
1167 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1168 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1169 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1170 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1172 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1173 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1174 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1175 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1177 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1179 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
1181 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1182 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
1183 dst0
[CHAN_X
] = tmp2
;
1185 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1186 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1187 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1188 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1190 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1191 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
1192 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
1193 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
1194 dst0
[CHAN_Y
] = tmp3
;
1196 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1197 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1198 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
1199 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1200 dst0
[CHAN_Z
] = tmp5
;
1202 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1203 dst0
[CHAN_W
] = bld
->base
.one
;
1207 case TGSI_OPCODE_ABS
:
1208 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1209 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1210 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1214 case TGSI_OPCODE_RCC
:
1219 case TGSI_OPCODE_DPH
:
1220 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1221 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1222 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1223 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1224 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1225 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1226 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1227 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1228 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1229 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1230 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1231 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1232 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1233 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1234 dst0
[chan_index
] = tmp0
;
1238 case TGSI_OPCODE_COS
:
1239 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1240 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1241 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1242 dst0
[chan_index
] = tmp0
;
1246 case TGSI_OPCODE_DDX
:
1247 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1248 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1252 case TGSI_OPCODE_DDY
:
1253 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1254 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1258 case TGSI_OPCODE_KILP
:
1259 /* predicated kill */
1260 emit_kilp( bld
, inst
);
1263 case TGSI_OPCODE_KIL
:
1264 /* conditional kill */
1265 emit_kil( bld
, inst
);
1268 case TGSI_OPCODE_PK2H
:
1272 case TGSI_OPCODE_PK2US
:
1276 case TGSI_OPCODE_PK4B
:
1280 case TGSI_OPCODE_PK4UB
:
1284 case TGSI_OPCODE_RFL
:
1288 case TGSI_OPCODE_SEQ
:
1289 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1290 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1291 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1292 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1293 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1297 case TGSI_OPCODE_SFL
:
1298 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1299 dst0
[chan_index
] = bld
->base
.zero
;
1303 case TGSI_OPCODE_SGT
:
1304 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1305 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1306 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1307 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1308 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1312 case TGSI_OPCODE_SIN
:
1313 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1314 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1315 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1316 dst0
[chan_index
] = tmp0
;
1320 case TGSI_OPCODE_SLE
:
1321 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1322 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1323 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1324 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1325 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1329 case TGSI_OPCODE_SNE
:
1330 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1331 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1332 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1333 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1334 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1338 case TGSI_OPCODE_STR
:
1339 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1340 dst0
[chan_index
] = bld
->base
.one
;
1344 case TGSI_OPCODE_TEX
:
1345 emit_tex( bld
, inst
, FALSE
, FALSE
, dst0
);
1348 case TGSI_OPCODE_TXD
:
1353 case TGSI_OPCODE_UP2H
:
1359 case TGSI_OPCODE_UP2US
:
1365 case TGSI_OPCODE_UP4B
:
1371 case TGSI_OPCODE_UP4UB
:
1377 case TGSI_OPCODE_X2D
:
1383 case TGSI_OPCODE_ARA
:
1389 case TGSI_OPCODE_ARR
:
1390 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1391 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1392 tmp0
= lp_build_round(&bld
->base
, tmp0
);
1393 dst0
[chan_index
] = tmp0
;
1397 case TGSI_OPCODE_BRA
:
1403 case TGSI_OPCODE_CAL
:
1408 case TGSI_OPCODE_RET
:
1413 case TGSI_OPCODE_END
:
1416 case TGSI_OPCODE_SSG
:
1417 /* TGSI_OPCODE_SGN */
1418 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1419 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1420 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1424 case TGSI_OPCODE_CMP
:
1425 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1426 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1427 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1428 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1429 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1430 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1434 case TGSI_OPCODE_SCS
:
1435 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1436 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1437 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1439 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1440 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1441 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1443 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1444 dst0
[CHAN_Z
] = bld
->base
.zero
;
1446 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1447 dst0
[CHAN_W
] = bld
->base
.one
;
1451 case TGSI_OPCODE_TXB
:
1452 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1455 case TGSI_OPCODE_NRM
:
1457 case TGSI_OPCODE_NRM4
:
1458 /* 3 or 4-component normalization */
1460 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1462 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1463 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1464 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1465 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1467 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1470 /* xmm0 = src.x * src.x */
1471 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1472 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1475 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1478 /* xmm0 = xmm0 + src.y * src.y */
1479 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1480 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1483 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1484 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1487 /* xmm0 = xmm0 + src.z * src.z */
1488 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1489 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1492 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1493 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1497 /* xmm0 = xmm0 + src.w * src.w */
1498 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1499 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1502 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1503 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1506 /* xmm1 = 1 / sqrt(xmm0) */
1507 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1509 /* dst.x = xmm1 * src.x */
1510 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1511 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1514 /* dst.y = xmm1 * src.y */
1515 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1516 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1519 /* dst.z = xmm1 * src.z */
1520 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1521 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1524 /* dst.w = xmm1 * src.w */
1525 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1526 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1531 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1532 dst0
[CHAN_W
] = bld
->base
.one
;
1537 case TGSI_OPCODE_DIV
:
1543 case TGSI_OPCODE_DP2
:
1544 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1545 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1546 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1547 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1548 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1549 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1550 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1551 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1552 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1556 case TGSI_OPCODE_TXL
:
1557 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1560 case TGSI_OPCODE_TXP
:
1561 emit_tex( bld
, inst
, FALSE
, TRUE
, dst0
);
1564 case TGSI_OPCODE_BRK
:
1565 lp_exec_break(&bld
->exec_mask
);
1568 case TGSI_OPCODE_IF
:
1569 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1570 tmp0
= lp_build_cmp(&bld
->base
, PIPE_FUNC_NOTEQUAL
,
1571 tmp0
, bld
->base
.zero
);
1572 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1575 case TGSI_OPCODE_BGNFOR
:
1581 case TGSI_OPCODE_BGNLOOP
:
1582 lp_exec_bgnloop(&bld
->exec_mask
);
1585 case TGSI_OPCODE_REP
:
1591 case TGSI_OPCODE_ELSE
:
1592 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1595 case TGSI_OPCODE_ENDIF
:
1596 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1599 case TGSI_OPCODE_ENDFOR
:
1605 case TGSI_OPCODE_ENDLOOP
:
1606 lp_exec_endloop(&bld
->exec_mask
);
1609 case TGSI_OPCODE_ENDREP
:
1615 case TGSI_OPCODE_PUSHA
:
1621 case TGSI_OPCODE_POPA
:
1627 case TGSI_OPCODE_CEIL
:
1628 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1629 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1630 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1634 case TGSI_OPCODE_I2F
:
1640 case TGSI_OPCODE_NOT
:
1646 case TGSI_OPCODE_TRUNC
:
1647 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1648 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1649 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
1653 case TGSI_OPCODE_SHL
:
1659 case TGSI_OPCODE_ISHR
:
1665 case TGSI_OPCODE_AND
:
1671 case TGSI_OPCODE_OR
:
1677 case TGSI_OPCODE_MOD
:
1683 case TGSI_OPCODE_XOR
:
1689 case TGSI_OPCODE_SAD
:
1695 case TGSI_OPCODE_TXF
:
1701 case TGSI_OPCODE_TXQ
:
1707 case TGSI_OPCODE_CONT
:
1708 lp_exec_continue(&bld
->exec_mask
);
1711 case TGSI_OPCODE_EMIT
:
1715 case TGSI_OPCODE_ENDPRIM
:
1719 case TGSI_OPCODE_NOP
:
1727 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1728 emit_store( bld
, inst
, 0, chan_index
, dst0
[chan_index
]);
1737 lp_build_tgsi_soa(LLVMBuilderRef builder
,
1738 const struct tgsi_token
*tokens
,
1739 struct lp_type type
,
1740 struct lp_build_mask_context
*mask
,
1741 LLVMValueRef consts_ptr
,
1742 const LLVMValueRef
*pos
,
1743 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
1744 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
1745 struct lp_build_sampler_soa
*sampler
)
1747 struct lp_build_tgsi_soa_context bld
;
1748 struct tgsi_parse_context parse
;
1749 uint num_immediates
= 0;
1752 /* Setup build context */
1753 memset(&bld
, 0, sizeof bld
);
1754 lp_build_context_init(&bld
.base
, builder
, type
);
1757 bld
.inputs
= inputs
;
1758 bld
.outputs
= outputs
;
1759 bld
.consts_ptr
= consts_ptr
;
1760 bld
.sampler
= sampler
;
1762 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
1764 tgsi_parse_init( &parse
, tokens
);
1766 while( !tgsi_parse_end_of_tokens( &parse
) ) {
1767 tgsi_parse_token( &parse
);
1769 switch( parse
.FullToken
.Token
.Type
) {
1770 case TGSI_TOKEN_TYPE_DECLARATION
:
1771 /* Inputs already interpolated */
1773 if (!emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
))
1774 _debug_printf("warning: failed to define LLVM variable\n");
1778 case TGSI_TOKEN_TYPE_INSTRUCTION
:
1780 unsigned opcode
= parse
.FullToken
.FullInstruction
.Instruction
.Opcode
;
1781 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info(opcode
);
1782 if (!emit_instruction( &bld
, &parse
.FullToken
.FullInstruction
, info
))
1783 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
1784 info
? info
->mnemonic
: "<invalid>");
1789 case TGSI_TOKEN_TYPE_IMMEDIATE
:
1790 /* simply copy the immediate values into the next immediates[] slot */
1792 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
1794 assert(num_immediates
< LP_MAX_IMMEDIATES
);
1795 for( i
= 0; i
< size
; ++i
)
1796 bld
.immediates
[num_immediates
][i
] =
1797 lp_build_const_vec(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
1798 for( i
= size
; i
< 4; ++i
)
1799 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
1804 case TGSI_TOKEN_TYPE_PROPERTY
:
1812 LLVMBasicBlockRef block
= LLVMGetInsertBlock(builder
);
1813 LLVMValueRef function
= LLVMGetBasicBlockParent(block
);
1814 debug_printf("11111111111111111111111111111 \n");
1815 tgsi_dump(tokens
, 0);
1816 LLVMDumpValue(function
);
1817 debug_printf("2222222222222222222222222222 \n");
1819 tgsi_parse_free( &parse
);