1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_dump.h"
45 #include "tgsi/tgsi_info.h"
46 #include "tgsi/tgsi_parse.h"
47 #include "tgsi/tgsi_util.h"
48 #include "tgsi/tgsi_exec.h"
49 #include "tgsi/tgsi_scan.h"
50 #include "lp_bld_type.h"
51 #include "lp_bld_const.h"
52 #include "lp_bld_arit.h"
53 #include "lp_bld_logic.h"
54 #include "lp_bld_swizzle.h"
55 #include "lp_bld_flow.h"
56 #include "lp_bld_tgsi.h"
57 #include "lp_bld_debug.h"
60 #define LP_MAX_TEMPS 256
61 #define LP_MAX_IMMEDIATES 256
64 #define FOR_EACH_CHANNEL( CHAN )\
65 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
67 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
68 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
70 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
71 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
73 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
74 FOR_EACH_CHANNEL( CHAN )\
75 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
82 #define QUAD_TOP_LEFT 0
83 #define QUAD_TOP_RIGHT 1
84 #define QUAD_BOTTOM_LEFT 2
85 #define QUAD_BOTTOM_RIGHT 3
87 #define LP_TGSI_MAX_NESTING 16
90 struct lp_build_context
*bld
;
94 LLVMTypeRef int_vec_type
;
96 LLVMValueRef cond_stack
[LP_TGSI_MAX_NESTING
];
98 LLVMValueRef cond_mask
;
100 LLVMValueRef break_stack
[LP_TGSI_MAX_NESTING
];
101 int break_stack_size
;
102 LLVMValueRef break_mask
;
104 LLVMValueRef cont_stack
[LP_TGSI_MAX_NESTING
];
106 LLVMValueRef cont_mask
;
108 LLVMBasicBlockRef loop_stack
[LP_TGSI_MAX_NESTING
];
110 LLVMBasicBlockRef loop_block
;
113 LLVMValueRef exec_mask
;
116 struct lp_build_tgsi_soa_context
118 struct lp_build_context base
;
120 LLVMValueRef consts_ptr
;
121 const LLVMValueRef
*pos
;
122 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
123 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
125 struct lp_build_sampler_soa
*sampler
;
127 LLVMValueRef immediates
[LP_MAX_IMMEDIATES
][NUM_CHANNELS
];
128 LLVMValueRef temps
[LP_MAX_TEMPS
][NUM_CHANNELS
];
129 LLVMValueRef addr
[LP_MAX_TEMPS
][NUM_CHANNELS
];
131 /* we allocate an array of temps if we have indirect
132 * addressing and then the temps above is unused */
133 LLVMValueRef temps_array
;
134 boolean has_indirect_addressing
;
136 struct lp_build_mask_context
*mask
;
137 struct lp_exec_mask exec_mask
;
140 static const unsigned char
142 QUAD_TOP_LEFT
, QUAD_TOP_LEFT
,
143 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_LEFT
146 static const unsigned char
148 QUAD_TOP_RIGHT
, QUAD_TOP_RIGHT
,
149 QUAD_BOTTOM_RIGHT
, QUAD_BOTTOM_RIGHT
152 static const unsigned char
154 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
,
155 QUAD_TOP_LEFT
, QUAD_TOP_RIGHT
158 static const unsigned char
159 swizzle_bottom
[4] = {
160 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
,
161 QUAD_BOTTOM_LEFT
, QUAD_BOTTOM_RIGHT
164 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
167 mask
->has_mask
= FALSE
;
168 mask
->cond_stack_size
= 0;
169 mask
->loop_stack_size
= 0;
170 mask
->break_stack_size
= 0;
171 mask
->cont_stack_size
= 0;
173 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
176 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
178 if (mask
->loop_stack_size
) {
179 /*for loops we need to update the entire mask at runtime */
181 assert(mask
->break_mask
);
182 tmp
= LLVMBuildAnd(mask
->bld
->builder
,
186 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
191 mask
->exec_mask
= mask
->cond_mask
;
194 mask
->has_mask
= (mask
->cond_stack_size
> 0 ||
195 mask
->loop_stack_size
> 0);
198 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
201 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
202 mask
->cond_mask
= LLVMBuildBitCast(mask
->bld
->builder
, val
,
203 mask
->int_vec_type
, "");
205 lp_exec_mask_update(mask
);
208 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
210 LLVMValueRef prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
211 LLVMValueRef inv_mask
= LLVMBuildNot(mask
->bld
->builder
,
212 mask
->cond_mask
, "");
214 /* means that we didn't have any mask before and that
215 * we were fully enabled */
216 if (mask
->cond_stack_size
<= 1) {
217 prev_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
220 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
223 lp_exec_mask_update(mask
);
226 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
228 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
229 lp_exec_mask_update(mask
);
232 static void lp_exec_bgnloop(struct lp_exec_mask
*mask
)
235 if (mask
->cont_stack_size
== 0)
236 mask
->cont_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
237 if (mask
->break_stack_size
== 0)
238 mask
->break_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
239 if (mask
->cond_stack_size
== 0)
240 mask
->cond_mask
= LLVMConstAllOnes(mask
->int_vec_type
);
242 mask
->break_stack
[mask
->break_stack_size
++] = mask
->break_mask
;
243 mask
->cont_stack
[mask
->cont_stack_size
++] = mask
->cont_mask
;
244 mask
->loop_stack
[mask
->loop_stack_size
++] = mask
->loop_block
;
245 mask
->loop_block
= lp_build_insert_new_block(mask
->bld
->builder
, "bgnloop");
246 LLVMBuildBr(mask
->bld
->builder
, mask
->loop_block
);
247 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, mask
->loop_block
);
249 lp_exec_mask_update(mask
);
252 static void lp_exec_break(struct lp_exec_mask
*mask
)
254 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
258 /* mask->break_stack_size > 1 implies that we encountered a break
259 * statemant already and if that's the case we want to make sure
260 * our mask is a combination of the previous break and the current
262 if (mask
->break_stack_size
> 1) {
263 mask
->break_mask
= LLVMBuildAnd(mask
->bld
->builder
,
265 exec_mask
, "break_full");
267 mask
->break_mask
= exec_mask
;
269 lp_exec_mask_update(mask
);
272 static void lp_exec_continue(struct lp_exec_mask
*mask
)
274 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
278 if (mask
->cont_stack_size
> 1) {
279 mask
->cont_mask
= LLVMBuildAnd(mask
->bld
->builder
,
283 mask
->cont_mask
= exec_mask
;
285 lp_exec_mask_update(mask
);
289 static void lp_exec_endloop(struct lp_exec_mask
*mask
)
291 LLVMBasicBlockRef endloop
;
292 LLVMTypeRef reg_type
= LLVMIntType(mask
->bld
->type
.width
*
293 mask
->bld
->type
.length
);
296 assert(mask
->break_mask
);
298 /* i1cond = (mask == 0) */
299 i1cond
= LLVMBuildICmp(
302 LLVMBuildBitCast(mask
->bld
->builder
, mask
->break_mask
, reg_type
, ""),
303 LLVMConstNull(reg_type
), "");
305 endloop
= lp_build_insert_new_block(mask
->bld
->builder
, "endloop");
307 LLVMBuildCondBr(mask
->bld
->builder
,
308 i1cond
, mask
->loop_block
, endloop
);
310 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, endloop
);
312 mask
->loop_block
= mask
->loop_stack
[--mask
->loop_stack_size
];
313 /* pop the cont mask */
314 if (mask
->cont_stack_size
) {
315 mask
->cont_mask
= mask
->cont_stack
[--mask
->cont_stack_size
];
317 /* pop the break mask */
318 if (mask
->break_stack_size
) {
319 mask
->break_mask
= mask
->break_stack
[--mask
->break_stack_size
];
322 lp_exec_mask_update(mask
);
325 /* stores val into an address pointed to by dst.
326 * mask->exec_mask is used to figure out which bits of val
327 * should be stored into the address
328 * (0 means don't store this bit, 1 means do store).
330 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
334 if (mask
->has_mask
) {
335 LLVMValueRef real_val
, dst_val
;
337 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
338 real_val
= lp_build_select(mask
->bld
,
342 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
344 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
349 emit_ddx(struct lp_build_tgsi_soa_context
*bld
,
352 LLVMValueRef src_left
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_left
);
353 LLVMValueRef src_right
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_right
);
354 return lp_build_sub(&bld
->base
, src_right
, src_left
);
359 emit_ddy(struct lp_build_tgsi_soa_context
*bld
,
362 LLVMValueRef src_top
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_top
);
363 LLVMValueRef src_bottom
= lp_build_swizzle1_aos(&bld
->base
, src
, swizzle_bottom
);
364 return lp_build_sub(&bld
->base
, src_top
, src_bottom
);
368 get_temp_ptr(struct lp_build_tgsi_soa_context
*bld
,
374 if (!bld
->has_indirect_addressing
) {
375 return bld
->temps
[index
][swizzle
];
377 LLVMValueRef lindex
=
378 LLVMConstInt(LLVMInt32Type(), index
*4 + swizzle
, 0);
380 lindex
= lp_build_add(&bld
->base
, lindex
, addr
);
381 return LLVMBuildGEP(bld
->base
.builder
, bld
->temps_array
, &lindex
, 1, "");
390 struct lp_build_tgsi_soa_context
*bld
,
391 const struct tgsi_full_instruction
*inst
,
393 const unsigned chan_index
)
395 const struct tgsi_full_src_register
*reg
= &inst
->Src
[index
];
396 unsigned swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
406 if (reg
->Register
.Indirect
) {
407 LLVMTypeRef int_vec_type
= lp_build_int_vec_type(bld
->base
.type
);
408 unsigned swizzle
= tgsi_util_get_src_register_swizzle( ®
->Indirect
, chan_index
);
409 addr
= LLVMBuildLoad(bld
->base
.builder
,
410 bld
->addr
[reg
->Indirect
.Index
][swizzle
],
412 /* for indexing we want integers */
413 addr
= LLVMBuildFPToSI(bld
->base
.builder
, addr
,
415 addr
= LLVMBuildExtractElement(bld
->base
.builder
,
416 addr
, LLVMConstInt(LLVMInt32Type(), 0, 0),
418 addr
= lp_build_mul(&bld
->base
, addr
, LLVMConstInt(LLVMInt32Type(), 4, 0));
421 switch (reg
->Register
.File
) {
422 case TGSI_FILE_CONSTANT
: {
423 LLVMValueRef index
= LLVMConstInt(LLVMInt32Type(), reg
->Register
.Index
*4 + swizzle
, 0);
424 LLVMValueRef scalar
, scalar_ptr
;
426 if (reg
->Register
.Indirect
) {
427 /*lp_build_printf(bld->base.builder,
428 "\taddr = %d\n", addr);*/
429 index
= lp_build_add(&bld
->base
, index
, addr
);
431 scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
, &index
, 1, "");
432 scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
434 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
438 case TGSI_FILE_IMMEDIATE
:
439 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
443 case TGSI_FILE_INPUT
:
444 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
448 case TGSI_FILE_TEMPORARY
: {
449 LLVMValueRef temp_ptr
= get_temp_ptr(bld
, reg
->Register
.Index
,
451 reg
->Register
.Indirect
,
453 res
= LLVMBuildLoad(bld
->base
.builder
, temp_ptr
, "");
455 return bld
->base
.undef
;
461 return bld
->base
.undef
;
467 return bld
->base
.undef
;
470 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
471 case TGSI_UTIL_SIGN_CLEAR
:
472 res
= lp_build_abs( &bld
->base
, res
);
475 case TGSI_UTIL_SIGN_SET
:
476 /* TODO: Use bitwese OR for floating point */
477 res
= lp_build_abs( &bld
->base
, res
);
478 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
481 case TGSI_UTIL_SIGN_TOGGLE
:
482 res
= LLVMBuildNeg( bld
->base
.builder
, res
, "" );
485 case TGSI_UTIL_SIGN_KEEP
:
494 * Register fetch with derivatives.
498 struct lp_build_tgsi_soa_context
*bld
,
499 const struct tgsi_full_instruction
*inst
,
501 const unsigned chan_index
,
508 src
= emit_fetch(bld
, inst
, index
, chan_index
);
513 /* TODO: use interpolation coeffs for inputs */
516 *ddx
= emit_ddx(bld
, src
);
519 *ddy
= emit_ddy(bld
, src
);
528 struct lp_build_tgsi_soa_context
*bld
,
529 const struct tgsi_full_instruction
*inst
,
534 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
537 switch( inst
->Instruction
.Saturate
) {
541 case TGSI_SAT_ZERO_ONE
:
542 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
543 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
546 case TGSI_SAT_MINUS_PLUS_ONE
:
547 value
= lp_build_max(&bld
->base
, value
, lp_build_const_vec(bld
->base
.type
, -1.0));
548 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
555 if (reg
->Register
.Indirect
) {
556 LLVMTypeRef int_vec_type
= lp_build_int_vec_type(bld
->base
.type
);
557 unsigned swizzle
= tgsi_util_get_src_register_swizzle( ®
->Indirect
, chan_index
);
558 addr
= LLVMBuildLoad(bld
->base
.builder
,
559 bld
->addr
[reg
->Indirect
.Index
][swizzle
],
561 /* for indexing we want integers */
562 addr
= LLVMBuildFPToSI(bld
->base
.builder
, addr
,
564 addr
= LLVMBuildExtractElement(bld
->base
.builder
,
565 addr
, LLVMConstInt(LLVMInt32Type(), 0, 0),
567 addr
= lp_build_mul(&bld
->base
, addr
, LLVMConstInt(LLVMInt32Type(), 4, 0));
570 switch( reg
->Register
.File
) {
571 case TGSI_FILE_OUTPUT
:
572 lp_exec_mask_store(&bld
->exec_mask
, value
,
573 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
576 case TGSI_FILE_TEMPORARY
: {
577 LLVMValueRef temp_ptr
= get_temp_ptr(bld
, reg
->Register
.Index
,
579 reg
->Register
.Indirect
,
581 lp_exec_mask_store(&bld
->exec_mask
, value
, temp_ptr
);
585 case TGSI_FILE_ADDRESS
:
586 lp_exec_mask_store(&bld
->exec_mask
, value
,
587 bld
->addr
[reg
->Indirect
.Index
][chan_index
]);
590 case TGSI_FILE_PREDICATE
:
602 * High-level instruction translators.
607 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
608 const struct tgsi_full_instruction
*inst
,
609 boolean apply_lodbias
,
613 const uint unit
= inst
->Src
[1].Register
.Index
;
614 LLVMValueRef lodbias
;
615 LLVMValueRef oow
= NULL
;
616 LLVMValueRef coords
[3];
620 switch (inst
->Texture
.Texture
) {
621 case TGSI_TEXTURE_1D
:
624 case TGSI_TEXTURE_2D
:
625 case TGSI_TEXTURE_RECT
:
628 case TGSI_TEXTURE_SHADOW1D
:
629 case TGSI_TEXTURE_SHADOW2D
:
630 case TGSI_TEXTURE_SHADOWRECT
:
631 case TGSI_TEXTURE_3D
:
632 case TGSI_TEXTURE_CUBE
:
641 lodbias
= emit_fetch( bld
, inst
, 0, 3 );
643 lodbias
= bld
->base
.zero
;
646 oow
= emit_fetch( bld
, inst
, 0, 3 );
647 oow
= lp_build_rcp(&bld
->base
, oow
);
650 for (i
= 0; i
< num_coords
; i
++) {
651 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
653 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
655 for (i
= num_coords
; i
< 3; i
++) {
656 coords
[i
] = bld
->base
.undef
;
659 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
662 unit
, num_coords
, coords
, lodbias
,
668 * Kill fragment if any of the src register values are negative.
672 struct lp_build_tgsi_soa_context
*bld
,
673 const struct tgsi_full_instruction
*inst
)
675 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
676 LLVMValueRef terms
[NUM_CHANNELS
];
680 memset(&terms
, 0, sizeof terms
);
682 FOR_EACH_CHANNEL( chan_index
) {
685 /* Unswizzle channel */
686 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
688 /* Check if the component has not been already tested. */
689 assert(swizzle
< NUM_CHANNELS
);
690 if( !terms
[swizzle
] )
691 /* TODO: change the comparison operator instead of setting the sign */
692 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
696 FOR_EACH_CHANNEL( chan_index
) {
697 if(terms
[chan_index
]) {
698 LLVMValueRef chan_mask
;
701 * If term < 0 then mask = 0 else mask = ~0.
703 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
706 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
713 lp_build_mask_update(bld
->mask
, mask
);
718 * Predicated fragment kill.
719 * XXX Actually, we do an unconditional kill (as in tgsi_exec.c).
720 * The only predication is the execution mask which will apply if
721 * we're inside a loop or conditional.
724 emit_kilp(struct lp_build_tgsi_soa_context
*bld
,
725 const struct tgsi_full_instruction
*inst
)
729 /* For those channels which are "alive", disable fragment shader
732 if (bld
->exec_mask
.has_mask
) {
733 mask
= LLVMBuildNot(bld
->base
.builder
, bld
->exec_mask
.exec_mask
, "kilp");
736 mask
= bld
->base
.zero
;
739 lp_build_mask_update(bld
->mask
, mask
);
744 struct lp_build_tgsi_soa_context
*bld
,
745 const struct tgsi_full_declaration
*decl
)
747 unsigned first
= decl
->Range
.First
;
748 unsigned last
= decl
->Range
.Last
;
750 LLVMBasicBlockRef current_block
=
751 LLVMGetInsertBlock(bld
->base
.builder
);
752 LLVMBasicBlockRef first_block
=
753 LLVMGetEntryBasicBlock(
754 LLVMGetBasicBlockParent(current_block
));
755 LLVMValueRef first_inst
=
756 LLVMGetFirstInstruction(first_block
);
758 /* we want alloca's to be the first instruction
759 * in the function so we need to rewind the builder
760 * to the very beginning */
761 LLVMPositionBuilderBefore(bld
->base
.builder
,
764 for (idx
= first
; idx
<= last
; ++idx
) {
765 switch (decl
->Declaration
.File
) {
766 case TGSI_FILE_TEMPORARY
:
767 if (bld
->has_indirect_addressing
) {
768 LLVMValueRef val
= LLVMConstInt(LLVMInt32Type(),
770 bld
->temps_array
= LLVMBuildArrayAlloca(bld
->base
.builder
,
771 lp_build_vec_type(bld
->base
.type
),
774 for (i
= 0; i
< NUM_CHANNELS
; i
++)
775 bld
->temps
[idx
][i
] = lp_build_alloca(&bld
->base
);
779 case TGSI_FILE_OUTPUT
:
780 for (i
= 0; i
< NUM_CHANNELS
; i
++)
781 bld
->outputs
[idx
][i
] = lp_build_alloca(&bld
->base
);
784 case TGSI_FILE_ADDRESS
:
785 for (i
= 0; i
< NUM_CHANNELS
; i
++)
786 bld
->addr
[idx
][i
] = lp_build_alloca(&bld
->base
);
790 /* don't need to declare other vars */
795 LLVMPositionBuilderAtEnd(bld
->base
.builder
,
802 * Emit LLVM for one TGSI instruction.
803 * \param return TRUE for success, FALSE otherwise
807 struct lp_build_tgsi_soa_context
*bld
,
808 const struct tgsi_full_instruction
*inst
,
809 const struct tgsi_opcode_info
*info
)
812 LLVMValueRef src0
, src1
, src2
;
813 LLVMValueRef tmp0
, tmp1
, tmp2
;
814 LLVMValueRef tmp3
= NULL
;
815 LLVMValueRef tmp4
= NULL
;
816 LLVMValueRef tmp5
= NULL
;
817 LLVMValueRef tmp6
= NULL
;
818 LLVMValueRef tmp7
= NULL
;
820 LLVMValueRef dst0
[NUM_CHANNELS
];
823 * Stores and write masks are handled in a general fashion after the long
824 * instruction opcode switch statement.
826 * Although not stricitly necessary, we avoid generating instructions for
827 * channels which won't be stored, in cases where's that easy. For some
828 * complex instructions, like texture sampling, it is more convenient to
829 * assume a full writemask and then let LLVM optimization passes eliminate
833 assert(info
->num_dst
<= 1);
835 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
836 dst0
[chan_index
] = bld
->base
.undef
;
840 switch (inst
->Instruction
.Opcode
) {
841 case TGSI_OPCODE_ARL
:
842 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
843 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
844 tmp0
= lp_build_floor(&bld
->base
, tmp0
);
845 dst0
[chan_index
] = tmp0
;
849 case TGSI_OPCODE_MOV
:
850 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
851 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
855 case TGSI_OPCODE_LIT
:
856 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
857 dst0
[CHAN_X
] = bld
->base
.one
;
859 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
860 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
861 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
863 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
864 /* XMM[1] = SrcReg[0].yyyy */
865 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
866 /* XMM[1] = max(XMM[1], 0) */
867 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
868 /* XMM[2] = SrcReg[0].wwww */
869 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
870 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
871 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
872 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
873 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
875 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
876 dst0
[CHAN_W
] = bld
->base
.one
;
880 case TGSI_OPCODE_RCP
:
881 /* TGSI_OPCODE_RECIP */
882 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
883 res
= lp_build_rcp(&bld
->base
, src0
);
884 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
885 dst0
[chan_index
] = res
;
889 case TGSI_OPCODE_RSQ
:
890 /* TGSI_OPCODE_RECIPSQRT */
891 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
892 src0
= lp_build_abs(&bld
->base
, src0
);
893 res
= lp_build_rsqrt(&bld
->base
, src0
);
894 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
895 dst0
[chan_index
] = res
;
899 case TGSI_OPCODE_EXP
:
900 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
901 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
902 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
903 LLVMValueRef
*p_exp2_int_part
= NULL
;
904 LLVMValueRef
*p_frac_part
= NULL
;
905 LLVMValueRef
*p_exp2
= NULL
;
907 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
909 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
910 p_exp2_int_part
= &tmp0
;
911 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
913 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
916 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
918 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
920 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
922 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
926 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
927 dst0
[CHAN_W
] = bld
->base
.one
;
931 case TGSI_OPCODE_LOG
:
932 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
933 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
934 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
935 LLVMValueRef
*p_floor_log2
= NULL
;
936 LLVMValueRef
*p_exp
= NULL
;
937 LLVMValueRef
*p_log2
= NULL
;
939 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
940 src0
= lp_build_abs( &bld
->base
, src0
);
942 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
943 p_floor_log2
= &tmp0
;
944 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
946 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
949 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
951 /* dst.x = floor(lg2(abs(src.x))) */
952 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
954 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
955 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
956 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
958 /* dst.z = lg2(abs(src.x)) */
959 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
963 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
964 dst0
[CHAN_W
] = bld
->base
.one
;
968 case TGSI_OPCODE_MUL
:
969 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
970 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
971 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
972 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
976 case TGSI_OPCODE_ADD
:
977 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
978 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
979 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
980 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
984 case TGSI_OPCODE_DP3
:
985 /* TGSI_OPCODE_DOT3 */
986 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
987 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
988 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
989 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
990 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
991 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
992 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
993 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
994 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
995 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
996 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
997 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
998 dst0
[chan_index
] = tmp0
;
1002 case TGSI_OPCODE_DP4
:
1003 /* TGSI_OPCODE_DOT4 */
1004 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1005 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1006 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1007 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1008 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1009 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1010 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1011 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1012 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1013 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1014 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1015 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
1016 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1017 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1018 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1019 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1020 dst0
[chan_index
] = tmp0
;
1024 case TGSI_OPCODE_DST
:
1025 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1026 dst0
[CHAN_X
] = bld
->base
.one
;
1028 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1029 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1030 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1031 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1033 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1034 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
1036 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1037 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
1041 case TGSI_OPCODE_MIN
:
1042 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1043 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1044 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1045 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
1049 case TGSI_OPCODE_MAX
:
1050 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1051 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1052 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1053 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
1057 case TGSI_OPCODE_SLT
:
1058 /* TGSI_OPCODE_SETLT */
1059 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1060 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1061 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1062 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
1063 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1067 case TGSI_OPCODE_SGE
:
1068 /* TGSI_OPCODE_SETGE */
1069 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1070 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1071 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1072 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
1073 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1077 case TGSI_OPCODE_MAD
:
1078 /* TGSI_OPCODE_MADD */
1079 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1080 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1081 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1082 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
1083 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1084 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
1085 dst0
[chan_index
] = tmp0
;
1089 case TGSI_OPCODE_SUB
:
1090 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1091 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1092 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1093 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
1097 case TGSI_OPCODE_LRP
:
1098 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1099 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1100 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1101 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1102 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
1103 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
1104 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
1108 case TGSI_OPCODE_CND
:
1109 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1110 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1111 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1112 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1113 tmp1
= lp_build_const_vec(bld
->base
.type
, 0.5);
1114 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
1115 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
1119 case TGSI_OPCODE_DP2A
:
1120 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1121 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1122 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1123 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1124 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1125 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1126 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1127 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
1128 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1129 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1130 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1134 case TGSI_OPCODE_FRC
:
1135 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1136 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1137 tmp0
= lp_build_floor(&bld
->base
, src0
);
1138 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
1139 dst0
[chan_index
] = tmp0
;
1143 case TGSI_OPCODE_CLAMP
:
1144 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1145 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1146 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1147 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1148 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
1149 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
1150 dst0
[chan_index
] = tmp0
;
1154 case TGSI_OPCODE_FLR
:
1155 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1156 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1157 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
1161 case TGSI_OPCODE_ROUND
:
1162 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1163 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1164 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
1168 case TGSI_OPCODE_EX2
: {
1169 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1170 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
1171 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1172 dst0
[chan_index
] = tmp0
;
1177 case TGSI_OPCODE_LG2
:
1178 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1179 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
1180 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1181 dst0
[chan_index
] = tmp0
;
1185 case TGSI_OPCODE_POW
:
1186 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1187 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1188 res
= lp_build_pow( &bld
->base
, src0
, src1
);
1189 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1190 dst0
[chan_index
] = res
;
1194 case TGSI_OPCODE_XPD
:
1195 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1196 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1197 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1198 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1200 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1201 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1202 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1203 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1205 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1207 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
1209 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1210 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
1211 dst0
[CHAN_X
] = tmp2
;
1213 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1214 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1215 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1216 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1218 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1219 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
1220 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
1221 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
1222 dst0
[CHAN_Y
] = tmp3
;
1224 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1225 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1226 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
1227 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1228 dst0
[CHAN_Z
] = tmp5
;
1230 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1231 dst0
[CHAN_W
] = bld
->base
.one
;
1235 case TGSI_OPCODE_ABS
:
1236 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1237 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1238 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1242 case TGSI_OPCODE_RCC
:
1247 case TGSI_OPCODE_DPH
:
1248 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1249 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1250 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1251 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1252 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1253 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1254 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1255 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1256 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1257 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1258 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1259 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1260 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1261 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1262 dst0
[chan_index
] = tmp0
;
1266 case TGSI_OPCODE_COS
:
1267 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1268 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1269 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1270 dst0
[chan_index
] = tmp0
;
1274 case TGSI_OPCODE_DDX
:
1275 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1276 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1280 case TGSI_OPCODE_DDY
:
1281 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1282 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1286 case TGSI_OPCODE_KILP
:
1287 /* predicated kill */
1288 emit_kilp( bld
, inst
);
1291 case TGSI_OPCODE_KIL
:
1292 /* conditional kill */
1293 emit_kil( bld
, inst
);
1296 case TGSI_OPCODE_PK2H
:
1300 case TGSI_OPCODE_PK2US
:
1304 case TGSI_OPCODE_PK4B
:
1308 case TGSI_OPCODE_PK4UB
:
1312 case TGSI_OPCODE_RFL
:
1316 case TGSI_OPCODE_SEQ
:
1317 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1318 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1319 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1320 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1321 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1325 case TGSI_OPCODE_SFL
:
1326 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1327 dst0
[chan_index
] = bld
->base
.zero
;
1331 case TGSI_OPCODE_SGT
:
1332 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1333 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1334 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1335 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1336 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1340 case TGSI_OPCODE_SIN
:
1341 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1342 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1343 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1344 dst0
[chan_index
] = tmp0
;
1348 case TGSI_OPCODE_SLE
:
1349 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1350 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1351 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1352 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1353 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1357 case TGSI_OPCODE_SNE
:
1358 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1359 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1360 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1361 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1362 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1366 case TGSI_OPCODE_STR
:
1367 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1368 dst0
[chan_index
] = bld
->base
.one
;
1372 case TGSI_OPCODE_TEX
:
1373 emit_tex( bld
, inst
, FALSE
, FALSE
, dst0
);
1376 case TGSI_OPCODE_TXD
:
1381 case TGSI_OPCODE_UP2H
:
1387 case TGSI_OPCODE_UP2US
:
1393 case TGSI_OPCODE_UP4B
:
1399 case TGSI_OPCODE_UP4UB
:
1405 case TGSI_OPCODE_X2D
:
1411 case TGSI_OPCODE_ARA
:
1417 case TGSI_OPCODE_ARR
:
1418 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1419 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1420 tmp0
= lp_build_round(&bld
->base
, tmp0
);
1421 dst0
[chan_index
] = tmp0
;
1425 case TGSI_OPCODE_BRA
:
1431 case TGSI_OPCODE_CAL
:
1436 case TGSI_OPCODE_RET
:
1441 case TGSI_OPCODE_END
:
1444 case TGSI_OPCODE_SSG
:
1445 /* TGSI_OPCODE_SGN */
1446 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1447 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1448 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1452 case TGSI_OPCODE_CMP
:
1453 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1454 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1455 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1456 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1457 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1458 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1462 case TGSI_OPCODE_SCS
:
1463 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1464 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1465 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1467 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1468 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1469 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1471 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1472 dst0
[CHAN_Z
] = bld
->base
.zero
;
1474 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1475 dst0
[CHAN_W
] = bld
->base
.one
;
1479 case TGSI_OPCODE_TXB
:
1480 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1483 case TGSI_OPCODE_NRM
:
1485 case TGSI_OPCODE_NRM4
:
1486 /* 3 or 4-component normalization */
1488 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1490 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1491 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1492 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1493 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1495 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1498 /* xmm0 = src.x * src.x */
1499 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1500 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1503 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1506 /* xmm0 = xmm0 + src.y * src.y */
1507 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1508 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1511 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1512 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1515 /* xmm0 = xmm0 + src.z * src.z */
1516 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1517 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1520 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1521 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1525 /* xmm0 = xmm0 + src.w * src.w */
1526 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1527 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1530 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1531 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1534 /* xmm1 = 1 / sqrt(xmm0) */
1535 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1537 /* dst.x = xmm1 * src.x */
1538 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1539 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1542 /* dst.y = xmm1 * src.y */
1543 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1544 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1547 /* dst.z = xmm1 * src.z */
1548 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1549 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1552 /* dst.w = xmm1 * src.w */
1553 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1554 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1559 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1560 dst0
[CHAN_W
] = bld
->base
.one
;
1565 case TGSI_OPCODE_DIV
:
1571 case TGSI_OPCODE_DP2
:
1572 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1573 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1574 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1575 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1576 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1577 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1578 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1579 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1580 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1584 case TGSI_OPCODE_TXL
:
1585 emit_tex( bld
, inst
, TRUE
, FALSE
, dst0
);
1588 case TGSI_OPCODE_TXP
:
1589 emit_tex( bld
, inst
, FALSE
, TRUE
, dst0
);
1592 case TGSI_OPCODE_BRK
:
1593 lp_exec_break(&bld
->exec_mask
);
1596 case TGSI_OPCODE_IF
:
1597 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1598 tmp0
= lp_build_cmp(&bld
->base
, PIPE_FUNC_NOTEQUAL
,
1599 tmp0
, bld
->base
.zero
);
1600 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1603 case TGSI_OPCODE_BGNFOR
:
1605 case TGSI_OPCODE_BGNLOOP
:
1606 lp_exec_bgnloop(&bld
->exec_mask
);
1609 case TGSI_OPCODE_REP
:
1615 case TGSI_OPCODE_ELSE
:
1616 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1619 case TGSI_OPCODE_ENDIF
:
1620 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1623 case TGSI_OPCODE_ENDFOR
:
1625 case TGSI_OPCODE_ENDLOOP
:
1626 lp_exec_endloop(&bld
->exec_mask
);
1629 case TGSI_OPCODE_ENDREP
:
1635 case TGSI_OPCODE_PUSHA
:
1641 case TGSI_OPCODE_POPA
:
1647 case TGSI_OPCODE_CEIL
:
1648 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1649 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1650 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1654 case TGSI_OPCODE_I2F
:
1660 case TGSI_OPCODE_NOT
:
1666 case TGSI_OPCODE_TRUNC
:
1667 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1668 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1669 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
1673 case TGSI_OPCODE_SHL
:
1679 case TGSI_OPCODE_ISHR
:
1685 case TGSI_OPCODE_AND
:
1691 case TGSI_OPCODE_OR
:
1697 case TGSI_OPCODE_MOD
:
1703 case TGSI_OPCODE_XOR
:
1709 case TGSI_OPCODE_SAD
:
1715 case TGSI_OPCODE_TXF
:
1721 case TGSI_OPCODE_TXQ
:
1727 case TGSI_OPCODE_CONT
:
1728 lp_exec_continue(&bld
->exec_mask
);
1731 case TGSI_OPCODE_EMIT
:
1735 case TGSI_OPCODE_ENDPRIM
:
1739 case TGSI_OPCODE_NOP
:
1747 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1748 emit_store( bld
, inst
, 0, chan_index
, dst0
[chan_index
]);
1757 lp_build_tgsi_soa(LLVMBuilderRef builder
,
1758 const struct tgsi_token
*tokens
,
1759 struct lp_type type
,
1760 struct lp_build_mask_context
*mask
,
1761 LLVMValueRef consts_ptr
,
1762 const LLVMValueRef
*pos
,
1763 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
1764 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
1765 struct lp_build_sampler_soa
*sampler
,
1766 struct tgsi_shader_info
*info
)
1768 struct lp_build_tgsi_soa_context bld
;
1769 struct tgsi_parse_context parse
;
1770 uint num_immediates
= 0;
1773 /* Setup build context */
1774 memset(&bld
, 0, sizeof bld
);
1775 lp_build_context_init(&bld
.base
, builder
, type
);
1778 bld
.inputs
= inputs
;
1779 bld
.outputs
= outputs
;
1780 bld
.consts_ptr
= consts_ptr
;
1781 bld
.sampler
= sampler
;
1782 bld
.has_indirect_addressing
= info
->opcode_count
[TGSI_OPCODE_ARR
] > 0 ||
1783 info
->opcode_count
[TGSI_OPCODE_ARL
] > 0;
1785 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
1787 tgsi_parse_init( &parse
, tokens
);
1789 while( !tgsi_parse_end_of_tokens( &parse
) ) {
1790 tgsi_parse_token( &parse
);
1792 switch( parse
.FullToken
.Token
.Type
) {
1793 case TGSI_TOKEN_TYPE_DECLARATION
:
1794 /* Inputs already interpolated */
1796 if (!emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
))
1797 _debug_printf("warning: failed to define LLVM variable\n");
1801 case TGSI_TOKEN_TYPE_INSTRUCTION
:
1803 unsigned opcode
= parse
.FullToken
.FullInstruction
.Instruction
.Opcode
;
1804 const struct tgsi_opcode_info
*opcode_info
= tgsi_get_opcode_info(opcode
);
1805 if (!emit_instruction( &bld
, &parse
.FullToken
.FullInstruction
, opcode_info
))
1806 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
1807 opcode_info
->mnemonic
);
1812 case TGSI_TOKEN_TYPE_IMMEDIATE
:
1813 /* simply copy the immediate values into the next immediates[] slot */
1815 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
1817 assert(num_immediates
< LP_MAX_IMMEDIATES
);
1818 for( i
= 0; i
< size
; ++i
)
1819 bld
.immediates
[num_immediates
][i
] =
1820 lp_build_const_vec(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
1821 for( i
= size
; i
< 4; ++i
)
1822 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
1827 case TGSI_TOKEN_TYPE_PROPERTY
:
1835 LLVMBasicBlockRef block
= LLVMGetInsertBlock(builder
);
1836 LLVMValueRef function
= LLVMGetBasicBlockParent(block
);
1837 debug_printf("11111111111111111111111111111 \n");
1838 tgsi_dump(tokens
, 0);
1839 LLVMDumpValue(function
);
1840 debug_printf("2222222222222222222222222222 \n");
1842 tgsi_parse_free( &parse
);