1 /**************************************************************************
3 * Copyright 2009 VMware, Inc.
4 * Copyright 2007-2008 Tungsten Graphics, Inc., Cedar Park, Texas.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 **************************************************************************/
31 * TGSI to LLVM IR translation -- SoA.
33 * @author Jose Fonseca <jfonseca@vmware.com>
35 * Based on tgsi_sse2.c code written by Michal Krol, Keith Whitwell,
36 * Brian Paul, and others.
39 #include "pipe/p_config.h"
40 #include "pipe/p_shader_tokens.h"
41 #include "util/u_debug.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "tgsi/tgsi_dump.h"
45 #include "tgsi/tgsi_info.h"
46 #include "tgsi/tgsi_parse.h"
47 #include "tgsi/tgsi_util.h"
48 #include "tgsi/tgsi_scan.h"
49 #include "lp_bld_type.h"
50 #include "lp_bld_const.h"
51 #include "lp_bld_arit.h"
52 #include "lp_bld_bitarit.h"
53 #include "lp_bld_gather.h"
54 #include "lp_bld_logic.h"
55 #include "lp_bld_swizzle.h"
56 #include "lp_bld_flow.h"
57 #include "lp_bld_quad.h"
58 #include "lp_bld_tgsi.h"
59 #include "lp_bld_limits.h"
60 #include "lp_bld_debug.h"
63 #define FOR_EACH_CHANNEL( CHAN )\
64 for (CHAN = 0; CHAN < NUM_CHANNELS; CHAN++)
66 #define IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
67 ((INST)->Dst[0].Register.WriteMask & (1 << (CHAN)))
69 #define IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )\
70 if (IS_DST0_CHANNEL_ENABLED( INST, CHAN ))
72 #define FOR_EACH_DST0_ENABLED_CHANNEL( INST, CHAN )\
73 FOR_EACH_CHANNEL( CHAN )\
74 IF_IS_DST0_CHANNEL_ENABLED( INST, CHAN )
80 #define NUM_CHANNELS 4
82 #define LP_MAX_INSTRUCTIONS 256
86 struct lp_build_context
*bld
;
90 LLVMTypeRef int_vec_type
;
92 LLVMValueRef cond_stack
[LP_MAX_TGSI_NESTING
];
94 LLVMValueRef cond_mask
;
96 LLVMBasicBlockRef loop_block
;
97 LLVMValueRef cont_mask
;
98 LLVMValueRef break_mask
;
99 LLVMValueRef break_var
;
101 LLVMBasicBlockRef loop_block
;
102 LLVMValueRef cont_mask
;
103 LLVMValueRef break_mask
;
104 LLVMValueRef break_var
;
105 } loop_stack
[LP_MAX_TGSI_NESTING
];
108 LLVMValueRef ret_mask
;
111 LLVMValueRef ret_mask
;
112 } call_stack
[LP_MAX_TGSI_NESTING
];
115 LLVMValueRef exec_mask
;
118 struct lp_build_tgsi_soa_context
120 struct lp_build_context base
;
122 /* Builder for integer masks and indices */
123 struct lp_build_context uint_bld
;
125 LLVMValueRef consts_ptr
;
126 const LLVMValueRef
*pos
;
127 const LLVMValueRef (*inputs
)[NUM_CHANNELS
];
128 LLVMValueRef (*outputs
)[NUM_CHANNELS
];
130 const struct lp_build_sampler_soa
*sampler
;
132 LLVMValueRef immediates
[LP_MAX_TGSI_IMMEDIATES
][NUM_CHANNELS
];
133 LLVMValueRef temps
[LP_MAX_TGSI_TEMPS
][NUM_CHANNELS
];
134 LLVMValueRef addr
[LP_MAX_TGSI_ADDRS
][NUM_CHANNELS
];
135 LLVMValueRef preds
[LP_MAX_TGSI_PREDS
][NUM_CHANNELS
];
137 /* We allocate/use this array of temps if (1 << TGSI_FILE_TEMPORARY) is
138 * set in the indirect_files field.
139 * The temps[] array above is unused then.
141 LLVMValueRef temps_array
;
143 const struct tgsi_shader_info
*info
;
144 /** bitmask indicating which register files are accessed indirectly */
145 unsigned indirect_files
;
147 struct lp_build_mask_context
*mask
;
148 struct lp_exec_mask exec_mask
;
150 struct tgsi_full_instruction
*instructions
;
151 uint max_instructions
;
154 static void lp_exec_mask_init(struct lp_exec_mask
*mask
, struct lp_build_context
*bld
)
157 mask
->has_mask
= FALSE
;
158 mask
->cond_stack_size
= 0;
159 mask
->loop_stack_size
= 0;
160 mask
->call_stack_size
= 0;
162 mask
->int_vec_type
= lp_build_int_vec_type(mask
->bld
->type
);
163 mask
->exec_mask
= mask
->ret_mask
= mask
->break_mask
= mask
->cont_mask
= mask
->cond_mask
=
164 LLVMConstAllOnes(mask
->int_vec_type
);
167 static void lp_exec_mask_update(struct lp_exec_mask
*mask
)
169 if (mask
->loop_stack_size
) {
170 /*for loops we need to update the entire mask at runtime */
172 assert(mask
->break_mask
);
173 tmp
= LLVMBuildAnd(mask
->bld
->builder
,
177 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
182 mask
->exec_mask
= mask
->cond_mask
;
184 if (mask
->call_stack_size
) {
185 mask
->exec_mask
= LLVMBuildAnd(mask
->bld
->builder
,
191 mask
->has_mask
= (mask
->cond_stack_size
> 0 ||
192 mask
->loop_stack_size
> 0 ||
193 mask
->call_stack_size
> 0);
196 static void lp_exec_mask_cond_push(struct lp_exec_mask
*mask
,
199 assert(mask
->cond_stack_size
< LP_MAX_TGSI_NESTING
);
200 if (mask
->cond_stack_size
== 0) {
201 assert(mask
->cond_mask
== LLVMConstAllOnes(mask
->int_vec_type
));
203 mask
->cond_stack
[mask
->cond_stack_size
++] = mask
->cond_mask
;
204 assert(LLVMTypeOf(val
) == mask
->int_vec_type
);
205 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
209 lp_exec_mask_update(mask
);
212 static void lp_exec_mask_cond_invert(struct lp_exec_mask
*mask
)
214 LLVMValueRef prev_mask
;
215 LLVMValueRef inv_mask
;
217 assert(mask
->cond_stack_size
);
218 prev_mask
= mask
->cond_stack
[mask
->cond_stack_size
- 1];
219 if (mask
->cond_stack_size
== 1) {
220 assert(prev_mask
== LLVMConstAllOnes(mask
->int_vec_type
));
223 inv_mask
= LLVMBuildNot(mask
->bld
->builder
, mask
->cond_mask
, "");
225 mask
->cond_mask
= LLVMBuildAnd(mask
->bld
->builder
,
228 lp_exec_mask_update(mask
);
231 static void lp_exec_mask_cond_pop(struct lp_exec_mask
*mask
)
233 assert(mask
->cond_stack_size
);
234 mask
->cond_mask
= mask
->cond_stack
[--mask
->cond_stack_size
];
235 lp_exec_mask_update(mask
);
238 static void lp_exec_bgnloop(struct lp_exec_mask
*mask
)
240 if (mask
->loop_stack_size
== 0) {
241 assert(mask
->loop_block
== NULL
);
242 assert(mask
->cont_mask
== LLVMConstAllOnes(mask
->int_vec_type
));
243 assert(mask
->break_mask
== LLVMConstAllOnes(mask
->int_vec_type
));
244 assert(mask
->break_var
== NULL
);
247 assert(mask
->loop_stack_size
< LP_MAX_TGSI_NESTING
);
249 mask
->loop_stack
[mask
->loop_stack_size
].loop_block
= mask
->loop_block
;
250 mask
->loop_stack
[mask
->loop_stack_size
].cont_mask
= mask
->cont_mask
;
251 mask
->loop_stack
[mask
->loop_stack_size
].break_mask
= mask
->break_mask
;
252 mask
->loop_stack
[mask
->loop_stack_size
].break_var
= mask
->break_var
;
253 ++mask
->loop_stack_size
;
255 mask
->break_var
= lp_build_alloca(mask
->bld
->builder
, mask
->int_vec_type
, "");
256 LLVMBuildStore(mask
->bld
->builder
, mask
->break_mask
, mask
->break_var
);
258 mask
->loop_block
= lp_build_insert_new_block(mask
->bld
->builder
, "bgnloop");
259 LLVMBuildBr(mask
->bld
->builder
, mask
->loop_block
);
260 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, mask
->loop_block
);
262 mask
->break_mask
= LLVMBuildLoad(mask
->bld
->builder
, mask
->break_var
, "");
264 lp_exec_mask_update(mask
);
267 static void lp_exec_break(struct lp_exec_mask
*mask
)
269 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
273 mask
->break_mask
= LLVMBuildAnd(mask
->bld
->builder
,
275 exec_mask
, "break_full");
277 lp_exec_mask_update(mask
);
280 static void lp_exec_continue(struct lp_exec_mask
*mask
)
282 LLVMValueRef exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
286 mask
->cont_mask
= LLVMBuildAnd(mask
->bld
->builder
,
290 lp_exec_mask_update(mask
);
294 static void lp_exec_endloop(struct lp_exec_mask
*mask
)
296 LLVMBasicBlockRef endloop
;
297 LLVMTypeRef reg_type
= LLVMIntType(mask
->bld
->type
.width
*
298 mask
->bld
->type
.length
);
301 assert(mask
->break_mask
);
304 * Restore the cont_mask, but don't pop
306 assert(mask
->loop_stack_size
);
307 mask
->cont_mask
= mask
->loop_stack
[mask
->loop_stack_size
- 1].cont_mask
;
308 lp_exec_mask_update(mask
);
311 * Unlike the continue mask, the break_mask must be preserved across loop
314 LLVMBuildStore(mask
->bld
->builder
, mask
->break_mask
, mask
->break_var
);
316 /* i1cond = (mask == 0) */
317 i1cond
= LLVMBuildICmp(
320 LLVMBuildBitCast(mask
->bld
->builder
, mask
->exec_mask
, reg_type
, ""),
321 LLVMConstNull(reg_type
), "");
323 endloop
= lp_build_insert_new_block(mask
->bld
->builder
, "endloop");
325 LLVMBuildCondBr(mask
->bld
->builder
,
326 i1cond
, mask
->loop_block
, endloop
);
328 LLVMPositionBuilderAtEnd(mask
->bld
->builder
, endloop
);
330 assert(mask
->loop_stack_size
);
331 --mask
->loop_stack_size
;
332 mask
->loop_block
= mask
->loop_stack
[mask
->loop_stack_size
].loop_block
;
333 mask
->cont_mask
= mask
->loop_stack
[mask
->loop_stack_size
].cont_mask
;
334 mask
->break_mask
= mask
->loop_stack
[mask
->loop_stack_size
].break_mask
;
335 mask
->break_var
= mask
->loop_stack
[mask
->loop_stack_size
].break_var
;
337 lp_exec_mask_update(mask
);
340 /* stores val into an address pointed to by dst.
341 * mask->exec_mask is used to figure out which bits of val
342 * should be stored into the address
343 * (0 means don't store this bit, 1 means do store).
345 static void lp_exec_mask_store(struct lp_exec_mask
*mask
,
350 /* Mix the predicate and execution mask */
351 if (mask
->has_mask
) {
353 pred
= LLVMBuildAnd(mask
->bld
->builder
, pred
, mask
->exec_mask
, "");
355 pred
= mask
->exec_mask
;
360 LLVMValueRef real_val
, dst_val
;
362 dst_val
= LLVMBuildLoad(mask
->bld
->builder
, dst
, "");
363 real_val
= lp_build_select(mask
->bld
,
367 LLVMBuildStore(mask
->bld
->builder
, real_val
, dst
);
369 LLVMBuildStore(mask
->bld
->builder
, val
, dst
);
372 static void lp_exec_mask_call(struct lp_exec_mask
*mask
,
376 assert(mask
->call_stack_size
< LP_MAX_TGSI_NESTING
);
377 mask
->call_stack
[mask
->call_stack_size
].pc
= *pc
;
378 mask
->call_stack
[mask
->call_stack_size
].ret_mask
= mask
->ret_mask
;
379 mask
->call_stack_size
++;
383 static void lp_exec_mask_ret(struct lp_exec_mask
*mask
, int *pc
)
385 LLVMValueRef exec_mask
;
387 if (mask
->call_stack_size
== 0) {
388 /* returning from main() */
392 exec_mask
= LLVMBuildNot(mask
->bld
->builder
,
396 mask
->ret_mask
= LLVMBuildAnd(mask
->bld
->builder
,
398 exec_mask
, "ret_full");
400 lp_exec_mask_update(mask
);
403 static void lp_exec_mask_bgnsub(struct lp_exec_mask
*mask
)
407 static void lp_exec_mask_endsub(struct lp_exec_mask
*mask
, int *pc
)
409 assert(mask
->call_stack_size
);
410 mask
->call_stack_size
--;
411 *pc
= mask
->call_stack
[mask
->call_stack_size
].pc
;
412 mask
->ret_mask
= mask
->call_stack
[mask
->call_stack_size
].ret_mask
;
413 lp_exec_mask_update(mask
);
418 * Return pointer to a temporary register channel (src or dest).
419 * Note that indirect addressing cannot be handled here.
420 * \param index which temporary register
421 * \param chan which channel of the temp register.
424 get_temp_ptr(struct lp_build_tgsi_soa_context
*bld
,
429 if (bld
->indirect_files
& (1 << TGSI_FILE_TEMPORARY
)) {
430 LLVMValueRef lindex
= lp_build_const_int32(index
* 4 + chan
);
431 return LLVMBuildGEP(bld
->base
.builder
, bld
->temps_array
, &lindex
, 1, "");
434 return bld
->temps
[index
][chan
];
441 * XXX the lp_build_gather() function should be capable of doing this
442 * with a little work.
445 build_gather(struct lp_build_tgsi_soa_context
*bld
,
446 LLVMValueRef base_ptr
,
447 LLVMValueRef indexes
)
449 LLVMValueRef res
= bld
->base
.undef
;
453 * Loop over elements of index_vec, load scalar value, insert it into 'res'.
455 for (i
= 0; i
< bld
->base
.type
.length
; i
++) {
456 LLVMValueRef ii
= LLVMConstInt(LLVMInt32Type(), i
, 0);
457 LLVMValueRef index
= LLVMBuildExtractElement(bld
->base
.builder
,
459 LLVMValueRef scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, base_ptr
,
461 LLVMValueRef scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
463 res
= LLVMBuildInsertElement(bld
->base
.builder
, res
, scalar
, ii
, "");
471 * Scatter/store vector.
474 build_scatter(struct lp_build_tgsi_soa_context
*bld
,
475 LLVMValueRef base_ptr
,
476 LLVMValueRef indexes
,
479 LLVMBuilderRef builder
= bld
->base
.builder
;
483 * Loop over elements of index_vec, store scalar value.
485 for (i
= 0; i
< bld
->base
.type
.length
; i
++) {
486 LLVMValueRef ii
= LLVMConstInt(LLVMInt32Type(), i
, 0);
487 LLVMValueRef index
= LLVMBuildExtractElement(builder
, indexes
, ii
, "");
488 LLVMValueRef scalar_ptr
= LLVMBuildGEP(builder
, base_ptr
, &index
, 1, "scatter_ptr");
489 LLVMValueRef val
= LLVMBuildExtractElement(builder
, values
, ii
, "scatter_val");
491 LLVMBuildStore(builder
, val
, scalar_ptr
);
497 * Read the current value of the ADDR register, convert the floats to
498 * ints, add the base index and return the vector of offsets.
499 * The offsets will be used to index into the constant buffer or
500 * temporary register file.
503 get_indirect_index(struct lp_build_tgsi_soa_context
*bld
,
504 unsigned reg_file
, unsigned reg_index
,
505 const struct tgsi_src_register
*indirect_reg
)
507 struct lp_build_context
*uint_bld
= &bld
->uint_bld
;
508 /* always use X component of address register */
509 unsigned swizzle
= indirect_reg
->SwizzleX
;
512 LLVMValueRef max_index
;
515 assert(bld
->indirect_files
& (1 << reg_file
));
517 base
= lp_build_const_int_vec(uint_bld
->type
, reg_index
);
520 rel
= LLVMBuildLoad(bld
->base
.builder
,
521 bld
->addr
[indirect_reg
->Index
][swizzle
],
524 /* for indexing we want integers */
525 rel
= LLVMBuildFPToSI(bld
->base
.builder
,
527 uint_bld
->vec_type
, "");
529 index
= lp_build_add(uint_bld
, base
, rel
);
531 max_index
= lp_build_const_int_vec(uint_bld
->type
,
532 bld
->info
->file_max
[reg_file
]);
534 assert(!uint_bld
->type
.sign
);
535 index
= lp_build_min(uint_bld
, index
, max_index
);
546 struct lp_build_tgsi_soa_context
*bld
,
547 const struct tgsi_full_instruction
*inst
,
549 const unsigned chan_index
)
551 struct lp_build_context
*uint_bld
= &bld
->uint_bld
;
552 const struct tgsi_full_src_register
*reg
= &inst
->Src
[src_op
];
553 const unsigned swizzle
=
554 tgsi_util_get_full_src_register_swizzle(reg
, chan_index
);
556 LLVMValueRef indirect_index
= NULL
;
559 assert(0 && "invalid swizzle in emit_fetch()");
560 return bld
->base
.undef
;
563 if (reg
->Register
.Indirect
) {
564 indirect_index
= get_indirect_index(bld
,
569 assert(reg
->Register
.Index
<= bld
->info
->file_max
[reg
->Register
.File
]);
572 switch (reg
->Register
.File
) {
573 case TGSI_FILE_CONSTANT
:
574 if (reg
->Register
.Indirect
) {
575 LLVMValueRef swizzle_vec
=
576 lp_build_const_int_vec(uint_bld
->type
, swizzle
);
577 LLVMValueRef index_vec
; /* index into the const buffer */
579 /* index_vec = indirect_index * 4 + swizzle */
580 index_vec
= lp_build_shl_imm(uint_bld
, indirect_index
, 2);
581 index_vec
= lp_build_add(uint_bld
, index_vec
, swizzle_vec
);
583 /* Gather values from the constant buffer */
584 res
= build_gather(bld
, bld
->consts_ptr
, index_vec
);
587 LLVMValueRef index
; /* index into the const buffer */
588 LLVMValueRef scalar
, scalar_ptr
;
590 index
= lp_build_const_int32(reg
->Register
.Index
*4 + swizzle
);
592 scalar_ptr
= LLVMBuildGEP(bld
->base
.builder
, bld
->consts_ptr
,
594 scalar
= LLVMBuildLoad(bld
->base
.builder
, scalar_ptr
, "");
596 res
= lp_build_broadcast_scalar(&bld
->base
, scalar
);
600 case TGSI_FILE_IMMEDIATE
:
601 res
= bld
->immediates
[reg
->Register
.Index
][swizzle
];
605 case TGSI_FILE_INPUT
:
606 res
= bld
->inputs
[reg
->Register
.Index
][swizzle
];
610 case TGSI_FILE_TEMPORARY
:
611 if (reg
->Register
.Indirect
) {
612 LLVMValueRef swizzle_vec
=
613 lp_build_const_int_vec(uint_bld
->type
, swizzle
);
614 LLVMValueRef length_vec
=
615 lp_build_const_int_vec(uint_bld
->type
, bld
->base
.type
.length
);
616 LLVMValueRef index_vec
; /* index into the const buffer */
617 LLVMValueRef temps_array
;
618 LLVMTypeRef float4_ptr_type
;
620 /* index_vec = (indirect_index * 4 + swizzle) * length */
621 index_vec
= lp_build_shl_imm(uint_bld
, indirect_index
, 2);
622 index_vec
= lp_build_add(uint_bld
, index_vec
, swizzle_vec
);
623 index_vec
= lp_build_mul(uint_bld
, index_vec
, length_vec
);
625 /* cast temps_array pointer to float* */
626 float4_ptr_type
= LLVMPointerType(LLVMFloatType(), 0);
627 temps_array
= LLVMBuildBitCast(uint_bld
->builder
, bld
->temps_array
,
628 float4_ptr_type
, "");
630 /* Gather values from the temporary register array */
631 res
= build_gather(bld
, temps_array
, index_vec
);
634 LLVMValueRef temp_ptr
;
635 temp_ptr
= get_temp_ptr(bld
, reg
->Register
.Index
, swizzle
);
636 res
= LLVMBuildLoad(bld
->base
.builder
, temp_ptr
, "");
638 return bld
->base
.undef
;
643 assert(0 && "invalid src register in emit_fetch()");
644 return bld
->base
.undef
;
647 switch( tgsi_util_get_full_src_register_sign_mode( reg
, chan_index
) ) {
648 case TGSI_UTIL_SIGN_CLEAR
:
649 res
= lp_build_abs( &bld
->base
, res
);
652 case TGSI_UTIL_SIGN_SET
:
653 res
= lp_build_abs( &bld
->base
, res
);
655 case TGSI_UTIL_SIGN_TOGGLE
:
656 res
= lp_build_negate( &bld
->base
, res
);
659 case TGSI_UTIL_SIGN_KEEP
:
668 * Register fetch with derivatives.
672 struct lp_build_tgsi_soa_context
*bld
,
673 const struct tgsi_full_instruction
*inst
,
675 const unsigned chan_index
,
682 src
= emit_fetch(bld
, inst
, index
, chan_index
);
687 /* TODO: use interpolation coeffs for inputs */
690 *ddx
= lp_build_ddx(&bld
->base
, src
);
693 *ddy
= lp_build_ddy(&bld
->base
, src
);
701 emit_fetch_predicate(
702 struct lp_build_tgsi_soa_context
*bld
,
703 const struct tgsi_full_instruction
*inst
,
707 unsigned char swizzles
[4];
708 LLVMValueRef unswizzled
[4] = {NULL
, NULL
, NULL
, NULL
};
712 if (!inst
->Instruction
.Predicate
) {
713 FOR_EACH_CHANNEL( chan
) {
719 swizzles
[0] = inst
->Predicate
.SwizzleX
;
720 swizzles
[1] = inst
->Predicate
.SwizzleY
;
721 swizzles
[2] = inst
->Predicate
.SwizzleZ
;
722 swizzles
[3] = inst
->Predicate
.SwizzleW
;
724 index
= inst
->Predicate
.Index
;
725 assert(index
< LP_MAX_TGSI_PREDS
);
727 FOR_EACH_CHANNEL( chan
) {
728 unsigned swizzle
= swizzles
[chan
];
731 * Only fetch the predicate register channels that are actually listed
734 if (!unswizzled
[swizzle
]) {
735 value
= LLVMBuildLoad(bld
->base
.builder
,
736 bld
->preds
[index
][swizzle
], "");
739 * Convert the value to an integer mask.
741 * TODO: Short-circuit this comparison -- a D3D setp_xx instructions
742 * is needlessly causing two comparisons due to storing the intermediate
743 * result as float vector instead of an integer mask vector.
745 value
= lp_build_compare(bld
->base
.builder
,
750 if (inst
->Predicate
.Negate
) {
751 value
= LLVMBuildNot(bld
->base
.builder
, value
, "");
754 unswizzled
[swizzle
] = value
;
756 value
= unswizzled
[swizzle
];
769 struct lp_build_tgsi_soa_context
*bld
,
770 const struct tgsi_full_instruction
*inst
,
776 const struct tgsi_full_dst_register
*reg
= &inst
->Dst
[index
];
777 struct lp_build_context
*uint_bld
= &bld
->uint_bld
;
778 LLVMValueRef indirect_index
= NULL
;
780 switch( inst
->Instruction
.Saturate
) {
784 case TGSI_SAT_ZERO_ONE
:
785 value
= lp_build_max(&bld
->base
, value
, bld
->base
.zero
);
786 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
789 case TGSI_SAT_MINUS_PLUS_ONE
:
790 value
= lp_build_max(&bld
->base
, value
, lp_build_const_vec(bld
->base
.type
, -1.0));
791 value
= lp_build_min(&bld
->base
, value
, bld
->base
.one
);
798 if (reg
->Register
.Indirect
) {
799 indirect_index
= get_indirect_index(bld
,
804 assert(reg
->Register
.Index
<= bld
->info
->file_max
[reg
->Register
.File
]);
807 switch( reg
->Register
.File
) {
808 case TGSI_FILE_OUTPUT
:
809 lp_exec_mask_store(&bld
->exec_mask
, pred
, value
,
810 bld
->outputs
[reg
->Register
.Index
][chan_index
]);
813 case TGSI_FILE_TEMPORARY
:
814 if (reg
->Register
.Indirect
) {
815 LLVMValueRef chan_vec
=
816 lp_build_const_int_vec(uint_bld
->type
, chan_index
);
817 LLVMValueRef length_vec
=
818 lp_build_const_int_vec(uint_bld
->type
, bld
->base
.type
.length
);
819 LLVMValueRef index_vec
; /* indexes into the temp registers */
820 LLVMValueRef temps_array
;
821 LLVMTypeRef float_ptr_type
;
823 /* index_vec = (indirect_index * 4 + chan_index) * length */
824 index_vec
= lp_build_shl_imm(uint_bld
, indirect_index
, 2);
825 index_vec
= lp_build_add(uint_bld
, index_vec
, chan_vec
);
826 index_vec
= lp_build_mul(uint_bld
, index_vec
, length_vec
);
828 float_ptr_type
= LLVMPointerType(LLVMFloatType(), 0);
829 temps_array
= LLVMBuildBitCast(bld
->base
.builder
, bld
->temps_array
,
832 /* Scatter store values into temp registers */
833 build_scatter(bld
, temps_array
, index_vec
, value
);
836 LLVMValueRef temp_ptr
= get_temp_ptr(bld
, reg
->Register
.Index
,
838 lp_exec_mask_store(&bld
->exec_mask
, pred
, value
, temp_ptr
);
842 case TGSI_FILE_ADDRESS
:
843 lp_exec_mask_store(&bld
->exec_mask
, pred
, value
,
844 bld
->addr
[reg
->Indirect
.Index
][chan_index
]);
847 case TGSI_FILE_PREDICATE
:
848 lp_exec_mask_store(&bld
->exec_mask
, pred
, value
,
849 bld
->preds
[reg
->Register
.Index
][chan_index
]);
859 * High-level instruction translators.
863 emit_tex( struct lp_build_tgsi_soa_context
*bld
,
864 const struct tgsi_full_instruction
*inst
,
865 enum lp_build_tex_modifier modifier
,
869 LLVMValueRef lod_bias
, explicit_lod
;
870 LLVMValueRef oow
= NULL
;
871 LLVMValueRef coords
[3];
878 _debug_printf("warning: found texture instruction but no sampler generator supplied\n");
879 for (i
= 0; i
< 4; i
++) {
880 texel
[i
] = bld
->base
.undef
;
885 switch (inst
->Texture
.Texture
) {
886 case TGSI_TEXTURE_1D
:
889 case TGSI_TEXTURE_2D
:
890 case TGSI_TEXTURE_RECT
:
893 case TGSI_TEXTURE_SHADOW1D
:
894 case TGSI_TEXTURE_SHADOW2D
:
895 case TGSI_TEXTURE_SHADOWRECT
:
896 case TGSI_TEXTURE_3D
:
897 case TGSI_TEXTURE_CUBE
:
905 if (modifier
== LP_BLD_TEX_MODIFIER_LOD_BIAS
) {
906 lod_bias
= emit_fetch( bld
, inst
, 0, 3 );
909 else if (modifier
== LP_BLD_TEX_MODIFIER_EXPLICIT_LOD
) {
911 explicit_lod
= emit_fetch( bld
, inst
, 0, 3 );
918 if (modifier
== LP_BLD_TEX_MODIFIER_PROJECTED
) {
919 oow
= emit_fetch( bld
, inst
, 0, 3 );
920 oow
= lp_build_rcp(&bld
->base
, oow
);
923 for (i
= 0; i
< num_coords
; i
++) {
924 coords
[i
] = emit_fetch( bld
, inst
, 0, i
);
925 if (modifier
== LP_BLD_TEX_MODIFIER_PROJECTED
)
926 coords
[i
] = lp_build_mul(&bld
->base
, coords
[i
], oow
);
928 for (i
= num_coords
; i
< 3; i
++) {
929 coords
[i
] = bld
->base
.undef
;
932 if (modifier
== LP_BLD_TEX_MODIFIER_EXPLICIT_DERIV
) {
933 LLVMTypeRef i32t
= LLVMInt32Type();
934 LLVMValueRef index0
= LLVMConstInt(i32t
, 0, 0);
935 for (i
= 0; i
< num_coords
; i
++) {
936 LLVMValueRef src1
= emit_fetch( bld
, inst
, 1, i
);
937 LLVMValueRef src2
= emit_fetch( bld
, inst
, 2, i
);
938 ddx
[i
] = LLVMBuildExtractElement(bld
->base
.builder
, src1
, index0
, "");
939 ddy
[i
] = LLVMBuildExtractElement(bld
->base
.builder
, src2
, index0
, "");
941 unit
= inst
->Src
[3].Register
.Index
;
943 for (i
= 0; i
< num_coords
; i
++) {
944 ddx
[i
] = lp_build_scalar_ddx( &bld
->base
, coords
[i
] );
945 ddy
[i
] = lp_build_scalar_ddy( &bld
->base
, coords
[i
] );
947 unit
= inst
->Src
[1].Register
.Index
;
949 for (i
= num_coords
; i
< 3; i
++) {
950 ddx
[i
] = LLVMGetUndef(bld
->base
.elem_type
);
951 ddy
[i
] = LLVMGetUndef(bld
->base
.elem_type
);
954 bld
->sampler
->emit_fetch_texel(bld
->sampler
,
957 unit
, num_coords
, coords
,
959 lod_bias
, explicit_lod
,
964 near_end_of_shader(struct lp_build_tgsi_soa_context
*bld
,
969 for (i
= 0; i
< 5; i
++) {
972 if (pc
+ i
>= bld
->info
->num_instructions
)
975 opcode
= bld
->instructions
[pc
+ i
].Instruction
.Opcode
;
977 if (opcode
== TGSI_OPCODE_END
)
980 if (opcode
== TGSI_OPCODE_TEX
||
981 opcode
== TGSI_OPCODE_TXP
||
982 opcode
== TGSI_OPCODE_TXD
||
983 opcode
== TGSI_OPCODE_TXB
||
984 opcode
== TGSI_OPCODE_TXL
||
985 opcode
== TGSI_OPCODE_TXF
||
986 opcode
== TGSI_OPCODE_TXQ
||
987 opcode
== TGSI_OPCODE_CAL
||
988 opcode
== TGSI_OPCODE_CALLNZ
||
989 opcode
== TGSI_OPCODE_IF
||
990 opcode
== TGSI_OPCODE_IFC
||
991 opcode
== TGSI_OPCODE_BGNLOOP
||
992 opcode
== TGSI_OPCODE_SWITCH
)
1002 * Kill fragment if any of the src register values are negative.
1006 struct lp_build_tgsi_soa_context
*bld
,
1007 const struct tgsi_full_instruction
*inst
,
1010 const struct tgsi_full_src_register
*reg
= &inst
->Src
[0];
1011 LLVMValueRef terms
[NUM_CHANNELS
];
1013 unsigned chan_index
;
1015 memset(&terms
, 0, sizeof terms
);
1017 FOR_EACH_CHANNEL( chan_index
) {
1020 /* Unswizzle channel */
1021 swizzle
= tgsi_util_get_full_src_register_swizzle( reg
, chan_index
);
1023 /* Check if the component has not been already tested. */
1024 assert(swizzle
< NUM_CHANNELS
);
1025 if( !terms
[swizzle
] )
1026 /* TODO: change the comparison operator instead of setting the sign */
1027 terms
[swizzle
] = emit_fetch(bld
, inst
, 0, chan_index
);
1031 FOR_EACH_CHANNEL( chan_index
) {
1032 if(terms
[chan_index
]) {
1033 LLVMValueRef chan_mask
;
1036 * If term < 0 then mask = 0 else mask = ~0.
1038 chan_mask
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GEQUAL
, terms
[chan_index
], bld
->base
.zero
);
1041 mask
= LLVMBuildAnd(bld
->base
.builder
, mask
, chan_mask
, "");
1048 lp_build_mask_update(bld
->mask
, mask
);
1050 if (!near_end_of_shader(bld
, pc
))
1051 lp_build_mask_check(bld
->mask
);
1057 * Predicated fragment kill.
1058 * XXX Actually, we do an unconditional kill (as in tgsi_exec.c).
1059 * The only predication is the execution mask which will apply if
1060 * we're inside a loop or conditional.
1063 emit_kilp(struct lp_build_tgsi_soa_context
*bld
,
1064 const struct tgsi_full_instruction
*inst
,
1069 /* For those channels which are "alive", disable fragment shader
1072 if (bld
->exec_mask
.has_mask
) {
1073 mask
= LLVMBuildNot(bld
->base
.builder
, bld
->exec_mask
.exec_mask
, "kilp");
1076 LLVMValueRef zero
= LLVMConstNull(bld
->base
.int_vec_type
);
1080 lp_build_mask_update(bld
->mask
, mask
);
1082 if (!near_end_of_shader(bld
, pc
))
1083 lp_build_mask_check(bld
->mask
);
1088 struct lp_build_tgsi_soa_context
*bld
,
1089 const struct tgsi_full_declaration
*decl
)
1091 LLVMTypeRef vec_type
= bld
->base
.vec_type
;
1093 unsigned first
= decl
->Range
.First
;
1094 unsigned last
= decl
->Range
.Last
;
1097 for (idx
= first
; idx
<= last
; ++idx
) {
1098 assert(last
<= bld
->info
->file_max
[decl
->Declaration
.File
]);
1099 switch (decl
->Declaration
.File
) {
1100 case TGSI_FILE_TEMPORARY
:
1101 assert(idx
< LP_MAX_TGSI_TEMPS
);
1102 if (bld
->indirect_files
& (1 << TGSI_FILE_TEMPORARY
)) {
1103 LLVMValueRef array_size
= LLVMConstInt(LLVMInt32Type(),
1105 bld
->temps_array
= lp_build_array_alloca(bld
->base
.builder
,
1106 vec_type
, array_size
, "");
1108 for (i
= 0; i
< NUM_CHANNELS
; i
++)
1109 bld
->temps
[idx
][i
] = lp_build_alloca(bld
->base
.builder
,
1114 case TGSI_FILE_OUTPUT
:
1115 for (i
= 0; i
< NUM_CHANNELS
; i
++)
1116 bld
->outputs
[idx
][i
] = lp_build_alloca(bld
->base
.builder
,
1120 case TGSI_FILE_ADDRESS
:
1121 assert(idx
< LP_MAX_TGSI_ADDRS
);
1122 for (i
= 0; i
< NUM_CHANNELS
; i
++)
1123 bld
->addr
[idx
][i
] = lp_build_alloca(bld
->base
.builder
,
1127 case TGSI_FILE_PREDICATE
:
1128 assert(idx
< LP_MAX_TGSI_PREDS
);
1129 for (i
= 0; i
< NUM_CHANNELS
; i
++)
1130 bld
->preds
[idx
][i
] = lp_build_alloca(bld
->base
.builder
,
1135 /* don't need to declare other vars */
1143 * Emit LLVM for one TGSI instruction.
1144 * \param return TRUE for success, FALSE otherwise
1148 struct lp_build_tgsi_soa_context
*bld
,
1149 const struct tgsi_full_instruction
*inst
,
1150 const struct tgsi_opcode_info
*info
,
1153 unsigned chan_index
;
1154 LLVMValueRef src0
, src1
, src2
;
1155 LLVMValueRef tmp0
, tmp1
, tmp2
;
1156 LLVMValueRef tmp3
= NULL
;
1157 LLVMValueRef tmp4
= NULL
;
1158 LLVMValueRef tmp5
= NULL
;
1159 LLVMValueRef tmp6
= NULL
;
1160 LLVMValueRef tmp7
= NULL
;
1162 LLVMValueRef dst0
[NUM_CHANNELS
];
1165 * Stores and write masks are handled in a general fashion after the long
1166 * instruction opcode switch statement.
1168 * Although not stricitly necessary, we avoid generating instructions for
1169 * channels which won't be stored, in cases where's that easy. For some
1170 * complex instructions, like texture sampling, it is more convenient to
1171 * assume a full writemask and then let LLVM optimization passes eliminate
1177 assert(info
->num_dst
<= 1);
1178 if (info
->num_dst
) {
1179 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1180 dst0
[chan_index
] = bld
->base
.undef
;
1184 switch (inst
->Instruction
.Opcode
) {
1185 case TGSI_OPCODE_ARL
:
1186 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1187 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1188 tmp0
= lp_build_floor(&bld
->base
, tmp0
);
1189 dst0
[chan_index
] = tmp0
;
1193 case TGSI_OPCODE_MOV
:
1194 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1195 dst0
[chan_index
] = emit_fetch( bld
, inst
, 0, chan_index
);
1199 case TGSI_OPCODE_LIT
:
1200 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ) {
1201 dst0
[CHAN_X
] = bld
->base
.one
;
1203 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1204 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1205 dst0
[CHAN_Y
] = lp_build_max( &bld
->base
, src0
, bld
->base
.zero
);
1207 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1208 /* XMM[1] = SrcReg[0].yyyy */
1209 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1210 /* XMM[1] = max(XMM[1], 0) */
1211 tmp1
= lp_build_max( &bld
->base
, tmp1
, bld
->base
.zero
);
1212 /* XMM[2] = SrcReg[0].wwww */
1213 tmp2
= emit_fetch( bld
, inst
, 0, CHAN_W
);
1214 tmp1
= lp_build_pow( &bld
->base
, tmp1
, tmp2
);
1215 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1216 tmp2
= lp_build_cmp(&bld
->base
, PIPE_FUNC_GREATER
, tmp0
, bld
->base
.zero
);
1217 dst0
[CHAN_Z
] = lp_build_select(&bld
->base
, tmp2
, tmp1
, bld
->base
.zero
);
1219 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) ) {
1220 dst0
[CHAN_W
] = bld
->base
.one
;
1224 case TGSI_OPCODE_RCP
:
1225 /* TGSI_OPCODE_RECIP */
1226 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1227 res
= lp_build_rcp(&bld
->base
, src0
);
1228 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1229 dst0
[chan_index
] = res
;
1233 case TGSI_OPCODE_RSQ
:
1234 /* TGSI_OPCODE_RECIPSQRT */
1235 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1236 src0
= lp_build_abs(&bld
->base
, src0
);
1237 res
= lp_build_rsqrt(&bld
->base
, src0
);
1238 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1239 dst0
[chan_index
] = res
;
1243 case TGSI_OPCODE_EXP
:
1244 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1245 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1246 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
1247 LLVMValueRef
*p_exp2_int_part
= NULL
;
1248 LLVMValueRef
*p_frac_part
= NULL
;
1249 LLVMValueRef
*p_exp2
= NULL
;
1251 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1253 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
1254 p_exp2_int_part
= &tmp0
;
1255 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
1256 p_frac_part
= &tmp1
;
1257 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
1260 lp_build_exp2_approx(&bld
->base
, src0
, p_exp2_int_part
, p_frac_part
, p_exp2
);
1262 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
1263 dst0
[CHAN_X
] = tmp0
;
1264 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
1265 dst0
[CHAN_Y
] = tmp1
;
1266 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
1267 dst0
[CHAN_Z
] = tmp2
;
1270 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
1271 dst0
[CHAN_W
] = bld
->base
.one
;
1275 case TGSI_OPCODE_LOG
:
1276 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1277 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1278 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
)) {
1279 LLVMValueRef
*p_floor_log2
= NULL
;
1280 LLVMValueRef
*p_exp
= NULL
;
1281 LLVMValueRef
*p_log2
= NULL
;
1283 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1284 src0
= lp_build_abs( &bld
->base
, src0
);
1286 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
1287 p_floor_log2
= &tmp0
;
1288 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
))
1290 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
1293 lp_build_log2_approx(&bld
->base
, src0
, p_exp
, p_floor_log2
, p_log2
);
1295 /* dst.x = floor(lg2(abs(src.x))) */
1296 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
))
1297 dst0
[CHAN_X
] = tmp0
;
1298 /* dst.y = abs(src)/ex2(floor(lg2(abs(src.x)))) */
1299 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
)) {
1300 dst0
[CHAN_Y
] = lp_build_div( &bld
->base
, src0
, tmp1
);
1302 /* dst.z = lg2(abs(src.x)) */
1303 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
))
1304 dst0
[CHAN_Z
] = tmp2
;
1307 if (IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
)) {
1308 dst0
[CHAN_W
] = bld
->base
.one
;
1312 case TGSI_OPCODE_MUL
:
1313 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1314 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1315 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1316 dst0
[chan_index
] = lp_build_mul(&bld
->base
, src0
, src1
);
1320 case TGSI_OPCODE_ADD
:
1321 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1322 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1323 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1324 dst0
[chan_index
] = lp_build_add(&bld
->base
, src0
, src1
);
1328 case TGSI_OPCODE_DP3
:
1329 /* TGSI_OPCODE_DOT3 */
1330 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1331 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1332 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1333 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1334 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1335 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1336 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1337 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1338 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1339 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1340 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1341 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1342 dst0
[chan_index
] = tmp0
;
1346 case TGSI_OPCODE_DP4
:
1347 /* TGSI_OPCODE_DOT4 */
1348 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1349 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1350 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1351 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1352 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1353 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1354 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1355 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1356 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1357 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1358 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1359 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_W
);
1360 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1361 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1362 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1363 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1364 dst0
[chan_index
] = tmp0
;
1368 case TGSI_OPCODE_DST
:
1369 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1370 dst0
[CHAN_X
] = bld
->base
.one
;
1372 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1373 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1374 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1375 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1377 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1378 dst0
[CHAN_Z
] = emit_fetch( bld
, inst
, 0, CHAN_Z
);
1380 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1381 dst0
[CHAN_W
] = emit_fetch( bld
, inst
, 1, CHAN_W
);
1385 case TGSI_OPCODE_MIN
:
1386 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1387 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1388 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1389 dst0
[chan_index
] = lp_build_min( &bld
->base
, src0
, src1
);
1393 case TGSI_OPCODE_MAX
:
1394 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1395 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1396 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1397 dst0
[chan_index
] = lp_build_max( &bld
->base
, src0
, src1
);
1401 case TGSI_OPCODE_SLT
:
1402 /* TGSI_OPCODE_SETLT */
1403 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1404 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1405 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1406 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, src1
);
1407 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1411 case TGSI_OPCODE_SGE
:
1412 /* TGSI_OPCODE_SETGE */
1413 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1414 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1415 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1416 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GEQUAL
, src0
, src1
);
1417 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1421 case TGSI_OPCODE_MAD
:
1422 /* TGSI_OPCODE_MADD */
1423 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1424 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1425 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1426 tmp2
= emit_fetch( bld
, inst
, 2, chan_index
);
1427 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1428 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp2
);
1429 dst0
[chan_index
] = tmp0
;
1433 case TGSI_OPCODE_SUB
:
1434 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1435 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1436 tmp1
= emit_fetch( bld
, inst
, 1, chan_index
);
1437 dst0
[chan_index
] = lp_build_sub( &bld
->base
, tmp0
, tmp1
);
1441 case TGSI_OPCODE_LRP
:
1442 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1443 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1444 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1445 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1446 tmp0
= lp_build_sub( &bld
->base
, src1
, src2
);
1447 tmp0
= lp_build_mul( &bld
->base
, src0
, tmp0
);
1448 dst0
[chan_index
] = lp_build_add( &bld
->base
, tmp0
, src2
);
1452 case TGSI_OPCODE_CND
:
1453 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1454 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1455 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1456 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1457 tmp1
= lp_build_const_vec(bld
->base
.type
, 0.5);
1458 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src2
, tmp1
);
1459 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src0
, src1
);
1463 case TGSI_OPCODE_DP2A
:
1464 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1465 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1466 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1467 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1468 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1469 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1470 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1471 tmp1
= emit_fetch( bld
, inst
, 2, CHAN_X
); /* xmm1 = src[2].x */
1472 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1473 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1474 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1478 case TGSI_OPCODE_FRC
:
1479 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1480 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1481 tmp0
= lp_build_floor(&bld
->base
, src0
);
1482 tmp0
= lp_build_sub(&bld
->base
, src0
, tmp0
);
1483 dst0
[chan_index
] = tmp0
;
1487 case TGSI_OPCODE_CLAMP
:
1488 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1489 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1490 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1491 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1492 tmp0
= lp_build_max(&bld
->base
, tmp0
, src1
);
1493 tmp0
= lp_build_min(&bld
->base
, tmp0
, src2
);
1494 dst0
[chan_index
] = tmp0
;
1498 case TGSI_OPCODE_FLR
:
1499 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1500 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1501 dst0
[chan_index
] = lp_build_floor(&bld
->base
, tmp0
);
1505 case TGSI_OPCODE_ROUND
:
1506 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1507 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1508 dst0
[chan_index
] = lp_build_round(&bld
->base
, tmp0
);
1512 case TGSI_OPCODE_EX2
: {
1513 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1514 tmp0
= lp_build_exp2( &bld
->base
, tmp0
);
1515 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1516 dst0
[chan_index
] = tmp0
;
1521 case TGSI_OPCODE_LG2
:
1522 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1523 tmp0
= lp_build_log2( &bld
->base
, tmp0
);
1524 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1525 dst0
[chan_index
] = tmp0
;
1529 case TGSI_OPCODE_POW
:
1530 src0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1531 src1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1532 res
= lp_build_pow( &bld
->base
, src0
, src1
);
1533 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1534 dst0
[chan_index
] = res
;
1538 case TGSI_OPCODE_XPD
:
1539 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1540 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ) {
1541 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1542 tmp3
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1544 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) ||
1545 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1546 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1547 tmp4
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1549 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1551 tmp2
= lp_build_mul( &bld
->base
, tmp2
, tmp1
);
1553 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1554 tmp2
= lp_build_sub( &bld
->base
, tmp2
, tmp5
);
1555 dst0
[CHAN_X
] = tmp2
;
1557 if( IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) ||
1558 IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) ) {
1559 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1560 tmp5
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1562 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1563 tmp3
= lp_build_mul( &bld
->base
, tmp3
, tmp2
);
1564 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp5
);
1565 tmp3
= lp_build_sub( &bld
->base
, tmp3
, tmp1
);
1566 dst0
[CHAN_Y
] = tmp3
;
1568 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1569 tmp5
= lp_build_mul( &bld
->base
, tmp5
, tmp4
);
1570 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp2
);
1571 tmp5
= lp_build_sub( &bld
->base
, tmp5
, tmp0
);
1572 dst0
[CHAN_Z
] = tmp5
;
1574 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1575 dst0
[CHAN_W
] = bld
->base
.one
;
1579 case TGSI_OPCODE_ABS
:
1580 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1581 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1582 dst0
[chan_index
] = lp_build_abs( &bld
->base
, tmp0
);
1586 case TGSI_OPCODE_RCC
:
1591 case TGSI_OPCODE_DPH
:
1592 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1593 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
);
1594 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
);
1595 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
);
1596 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
);
1597 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1598 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1599 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Z
);
1600 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Z
);
1601 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
);
1602 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1603 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_W
);
1604 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1605 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1606 dst0
[chan_index
] = tmp0
;
1610 case TGSI_OPCODE_COS
:
1611 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1612 tmp0
= lp_build_cos( &bld
->base
, tmp0
);
1613 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1614 dst0
[chan_index
] = tmp0
;
1618 case TGSI_OPCODE_DDX
:
1619 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1620 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, &dst0
[chan_index
], NULL
);
1624 case TGSI_OPCODE_DDY
:
1625 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1626 emit_fetch_deriv( bld
, inst
, 0, chan_index
, NULL
, NULL
, &dst0
[chan_index
]);
1630 case TGSI_OPCODE_KILP
:
1631 /* predicated kill */
1632 emit_kilp( bld
, inst
, (*pc
)-1 );
1635 case TGSI_OPCODE_KIL
:
1636 /* conditional kill */
1637 emit_kil( bld
, inst
, (*pc
)-1 );
1640 case TGSI_OPCODE_PK2H
:
1644 case TGSI_OPCODE_PK2US
:
1648 case TGSI_OPCODE_PK4B
:
1652 case TGSI_OPCODE_PK4UB
:
1656 case TGSI_OPCODE_RFL
:
1660 case TGSI_OPCODE_SEQ
:
1661 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1662 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1663 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1664 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_EQUAL
, src0
, src1
);
1665 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1669 case TGSI_OPCODE_SFL
:
1670 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1671 dst0
[chan_index
] = bld
->base
.zero
;
1675 case TGSI_OPCODE_SGT
:
1676 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1677 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1678 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1679 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_GREATER
, src0
, src1
);
1680 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1684 case TGSI_OPCODE_SIN
:
1685 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1686 tmp0
= lp_build_sin( &bld
->base
, tmp0
);
1687 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1688 dst0
[chan_index
] = tmp0
;
1692 case TGSI_OPCODE_SLE
:
1693 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1694 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1695 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1696 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LEQUAL
, src0
, src1
);
1697 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1701 case TGSI_OPCODE_SNE
:
1702 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1703 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1704 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1705 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_NOTEQUAL
, src0
, src1
);
1706 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, bld
->base
.one
, bld
->base
.zero
);
1710 case TGSI_OPCODE_STR
:
1711 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1712 dst0
[chan_index
] = bld
->base
.one
;
1716 case TGSI_OPCODE_TEX
:
1717 emit_tex( bld
, inst
, LP_BLD_TEX_MODIFIER_NONE
, dst0
);
1720 case TGSI_OPCODE_TXD
:
1721 emit_tex( bld
, inst
, LP_BLD_TEX_MODIFIER_EXPLICIT_DERIV
, dst0
);
1724 case TGSI_OPCODE_UP2H
:
1730 case TGSI_OPCODE_UP2US
:
1736 case TGSI_OPCODE_UP4B
:
1742 case TGSI_OPCODE_UP4UB
:
1748 case TGSI_OPCODE_X2D
:
1754 case TGSI_OPCODE_ARA
:
1760 case TGSI_OPCODE_ARR
:
1761 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1762 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1763 tmp0
= lp_build_round(&bld
->base
, tmp0
);
1764 dst0
[chan_index
] = tmp0
;
1768 case TGSI_OPCODE_BRA
:
1774 case TGSI_OPCODE_CAL
:
1775 lp_exec_mask_call(&bld
->exec_mask
,
1781 case TGSI_OPCODE_RET
:
1782 lp_exec_mask_ret(&bld
->exec_mask
, pc
);
1785 case TGSI_OPCODE_END
:
1789 case TGSI_OPCODE_SSG
:
1790 /* TGSI_OPCODE_SGN */
1791 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1792 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1793 dst0
[chan_index
] = lp_build_sgn( &bld
->base
, tmp0
);
1797 case TGSI_OPCODE_CMP
:
1798 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1799 src0
= emit_fetch( bld
, inst
, 0, chan_index
);
1800 src1
= emit_fetch( bld
, inst
, 1, chan_index
);
1801 src2
= emit_fetch( bld
, inst
, 2, chan_index
);
1802 tmp0
= lp_build_cmp( &bld
->base
, PIPE_FUNC_LESS
, src0
, bld
->base
.zero
);
1803 dst0
[chan_index
] = lp_build_select( &bld
->base
, tmp0
, src1
, src2
);
1807 case TGSI_OPCODE_SCS
:
1808 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_X
) {
1809 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1810 dst0
[CHAN_X
] = lp_build_cos( &bld
->base
, tmp0
);
1812 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Y
) {
1813 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
);
1814 dst0
[CHAN_Y
] = lp_build_sin( &bld
->base
, tmp0
);
1816 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_Z
) {
1817 dst0
[CHAN_Z
] = bld
->base
.zero
;
1819 IF_IS_DST0_CHANNEL_ENABLED( inst
, CHAN_W
) {
1820 dst0
[CHAN_W
] = bld
->base
.one
;
1824 case TGSI_OPCODE_TXB
:
1825 emit_tex( bld
, inst
, LP_BLD_TEX_MODIFIER_LOD_BIAS
, dst0
);
1828 case TGSI_OPCODE_NRM
:
1830 case TGSI_OPCODE_NRM4
:
1831 /* 3 or 4-component normalization */
1833 uint dims
= (inst
->Instruction
.Opcode
== TGSI_OPCODE_NRM
) ? 3 : 4;
1835 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) ||
1836 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
) ||
1837 IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
) ||
1838 (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 4)) {
1840 /* NOTE: Cannot use xmm regs 2/3 here (see emit_rsqrt() above). */
1843 /* xmm0 = src.x * src.x */
1844 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1845 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1848 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp0
);
1851 /* xmm0 = xmm0 + src.y * src.y */
1852 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Y
);
1853 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1856 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1857 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1860 /* xmm0 = xmm0 + src.z * src.z */
1861 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_Z
);
1862 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1865 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1866 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1870 /* xmm0 = xmm0 + src.w * src.w */
1871 tmp1
= emit_fetch(bld
, inst
, 0, CHAN_W
);
1872 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
)) {
1875 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp1
);
1876 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
);
1879 /* xmm1 = 1 / sqrt(xmm0) */
1880 tmp1
= lp_build_rsqrt( &bld
->base
, tmp0
);
1882 /* dst.x = xmm1 * src.x */
1883 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
)) {
1884 dst0
[CHAN_X
] = lp_build_mul( &bld
->base
, tmp4
, tmp1
);
1887 /* dst.y = xmm1 * src.y */
1888 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Y
)) {
1889 dst0
[CHAN_Y
] = lp_build_mul( &bld
->base
, tmp5
, tmp1
);
1892 /* dst.z = xmm1 * src.z */
1893 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_Z
)) {
1894 dst0
[CHAN_Z
] = lp_build_mul( &bld
->base
, tmp6
, tmp1
);
1897 /* dst.w = xmm1 * src.w */
1898 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_X
) && dims
== 4) {
1899 dst0
[CHAN_W
] = lp_build_mul( &bld
->base
, tmp7
, tmp1
);
1904 if (IS_DST0_CHANNEL_ENABLED(inst
, CHAN_W
) && dims
== 3) {
1905 dst0
[CHAN_W
] = bld
->base
.one
;
1910 case TGSI_OPCODE_DIV
:
1916 case TGSI_OPCODE_DP2
:
1917 tmp0
= emit_fetch( bld
, inst
, 0, CHAN_X
); /* xmm0 = src[0].x */
1918 tmp1
= emit_fetch( bld
, inst
, 1, CHAN_X
); /* xmm1 = src[1].x */
1919 tmp0
= lp_build_mul( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 * xmm1 */
1920 tmp1
= emit_fetch( bld
, inst
, 0, CHAN_Y
); /* xmm1 = src[0].y */
1921 tmp2
= emit_fetch( bld
, inst
, 1, CHAN_Y
); /* xmm2 = src[1].y */
1922 tmp1
= lp_build_mul( &bld
->base
, tmp1
, tmp2
); /* xmm1 = xmm1 * xmm2 */
1923 tmp0
= lp_build_add( &bld
->base
, tmp0
, tmp1
); /* xmm0 = xmm0 + xmm1 */
1924 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1925 dst0
[chan_index
] = tmp0
; /* dest[ch] = xmm0 */
1929 case TGSI_OPCODE_TXL
:
1930 emit_tex( bld
, inst
, LP_BLD_TEX_MODIFIER_EXPLICIT_LOD
, dst0
);
1933 case TGSI_OPCODE_TXP
:
1934 emit_tex( bld
, inst
, LP_BLD_TEX_MODIFIER_PROJECTED
, dst0
);
1937 case TGSI_OPCODE_BRK
:
1938 lp_exec_break(&bld
->exec_mask
);
1941 case TGSI_OPCODE_IF
:
1942 tmp0
= emit_fetch(bld
, inst
, 0, CHAN_X
);
1943 tmp0
= lp_build_cmp(&bld
->base
, PIPE_FUNC_NOTEQUAL
,
1944 tmp0
, bld
->base
.zero
);
1945 lp_exec_mask_cond_push(&bld
->exec_mask
, tmp0
);
1948 case TGSI_OPCODE_BGNLOOP
:
1949 lp_exec_bgnloop(&bld
->exec_mask
);
1952 case TGSI_OPCODE_BGNSUB
:
1953 lp_exec_mask_bgnsub(&bld
->exec_mask
);
1956 case TGSI_OPCODE_ELSE
:
1957 lp_exec_mask_cond_invert(&bld
->exec_mask
);
1960 case TGSI_OPCODE_ENDIF
:
1961 lp_exec_mask_cond_pop(&bld
->exec_mask
);
1964 case TGSI_OPCODE_ENDLOOP
:
1965 lp_exec_endloop(&bld
->exec_mask
);
1968 case TGSI_OPCODE_ENDSUB
:
1969 lp_exec_mask_endsub(&bld
->exec_mask
, pc
);
1972 case TGSI_OPCODE_PUSHA
:
1978 case TGSI_OPCODE_POPA
:
1984 case TGSI_OPCODE_CEIL
:
1985 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
1986 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
1987 dst0
[chan_index
] = lp_build_ceil(&bld
->base
, tmp0
);
1991 case TGSI_OPCODE_I2F
:
1997 case TGSI_OPCODE_NOT
:
2003 case TGSI_OPCODE_TRUNC
:
2004 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
2005 tmp0
= emit_fetch( bld
, inst
, 0, chan_index
);
2006 dst0
[chan_index
] = lp_build_trunc(&bld
->base
, tmp0
);
2010 case TGSI_OPCODE_SHL
:
2016 case TGSI_OPCODE_ISHR
:
2022 case TGSI_OPCODE_AND
:
2028 case TGSI_OPCODE_OR
:
2034 case TGSI_OPCODE_MOD
:
2040 case TGSI_OPCODE_XOR
:
2046 case TGSI_OPCODE_SAD
:
2052 case TGSI_OPCODE_TXF
:
2058 case TGSI_OPCODE_TXQ
:
2064 case TGSI_OPCODE_CONT
:
2065 lp_exec_continue(&bld
->exec_mask
);
2068 case TGSI_OPCODE_EMIT
:
2072 case TGSI_OPCODE_ENDPRIM
:
2076 case TGSI_OPCODE_NOP
:
2084 LLVMValueRef pred
[NUM_CHANNELS
];
2086 emit_fetch_predicate( bld
, inst
, pred
);
2088 FOR_EACH_DST0_ENABLED_CHANNEL( inst
, chan_index
) {
2089 emit_store( bld
, inst
, 0, chan_index
, pred
[chan_index
], dst0
[chan_index
]);
2098 lp_build_tgsi_soa(LLVMBuilderRef builder
,
2099 const struct tgsi_token
*tokens
,
2100 struct lp_type type
,
2101 struct lp_build_mask_context
*mask
,
2102 LLVMValueRef consts_ptr
,
2103 const LLVMValueRef
*pos
,
2104 const LLVMValueRef (*inputs
)[NUM_CHANNELS
],
2105 LLVMValueRef (*outputs
)[NUM_CHANNELS
],
2106 struct lp_build_sampler_soa
*sampler
,
2107 const struct tgsi_shader_info
*info
)
2109 struct lp_build_tgsi_soa_context bld
;
2110 struct tgsi_parse_context parse
;
2111 uint num_immediates
= 0;
2112 uint num_instructions
= 0;
2116 struct lp_type res_type
;
2118 assert(type
.length
<= LP_MAX_VECTOR_LENGTH
);
2119 memset(&res_type
, 0, sizeof res_type
);
2120 res_type
.width
= type
.width
;
2121 res_type
.length
= type
.length
;
2124 /* Setup build context */
2125 memset(&bld
, 0, sizeof bld
);
2126 lp_build_context_init(&bld
.base
, builder
, type
);
2127 lp_build_context_init(&bld
.uint_bld
, builder
, lp_uint_type(type
));
2130 bld
.inputs
= inputs
;
2131 bld
.outputs
= outputs
;
2132 bld
.consts_ptr
= consts_ptr
;
2133 bld
.sampler
= sampler
;
2135 bld
.indirect_files
= info
->indirect_files
;
2136 bld
.instructions
= (struct tgsi_full_instruction
*)
2137 MALLOC( LP_MAX_INSTRUCTIONS
* sizeof(struct tgsi_full_instruction
) );
2138 bld
.max_instructions
= LP_MAX_INSTRUCTIONS
;
2140 if (!bld
.instructions
) {
2144 lp_exec_mask_init(&bld
.exec_mask
, &bld
.base
);
2146 tgsi_parse_init( &parse
, tokens
);
2148 while( !tgsi_parse_end_of_tokens( &parse
) ) {
2149 tgsi_parse_token( &parse
);
2151 switch( parse
.FullToken
.Token
.Type
) {
2152 case TGSI_TOKEN_TYPE_DECLARATION
:
2153 /* Inputs already interpolated */
2154 emit_declaration( &bld
, &parse
.FullToken
.FullDeclaration
);
2157 case TGSI_TOKEN_TYPE_INSTRUCTION
:
2159 /* save expanded instruction */
2160 if (num_instructions
== bld
.max_instructions
) {
2161 struct tgsi_full_instruction
*instructions
;
2162 instructions
= REALLOC(bld
.instructions
,
2163 bld
.max_instructions
2164 * sizeof(struct tgsi_full_instruction
),
2165 (bld
.max_instructions
+ LP_MAX_INSTRUCTIONS
)
2166 * sizeof(struct tgsi_full_instruction
));
2167 if (!instructions
) {
2170 bld
.instructions
= instructions
;
2171 bld
.max_instructions
+= LP_MAX_INSTRUCTIONS
;
2174 memcpy(bld
.instructions
+ num_instructions
,
2175 &parse
.FullToken
.FullInstruction
,
2176 sizeof(bld
.instructions
[0]));
2183 case TGSI_TOKEN_TYPE_IMMEDIATE
:
2184 /* simply copy the immediate values into the next immediates[] slot */
2186 const uint size
= parse
.FullToken
.FullImmediate
.Immediate
.NrTokens
- 1;
2188 assert(num_immediates
< LP_MAX_TGSI_IMMEDIATES
);
2189 for( i
= 0; i
< size
; ++i
)
2190 bld
.immediates
[num_immediates
][i
] =
2191 lp_build_const_vec(type
, parse
.FullToken
.FullImmediate
.u
[i
].Float
);
2192 for( i
= size
; i
< 4; ++i
)
2193 bld
.immediates
[num_immediates
][i
] = bld
.base
.undef
;
2198 case TGSI_TOKEN_TYPE_PROPERTY
:
2207 struct tgsi_full_instruction
*instr
= bld
.instructions
+ pc
;
2208 const struct tgsi_opcode_info
*opcode_info
=
2209 tgsi_get_opcode_info(instr
->Instruction
.Opcode
);
2210 if (!emit_instruction( &bld
, instr
, opcode_info
, &pc
))
2211 _debug_printf("warning: failed to translate tgsi opcode %s to LLVM\n",
2212 opcode_info
->mnemonic
);
2216 LLVMBasicBlockRef block
= LLVMGetInsertBlock(builder
);
2217 LLVMValueRef function
= LLVMGetBasicBlockParent(block
);
2218 debug_printf("11111111111111111111111111111 \n");
2219 tgsi_dump(tokens
, 0);
2220 lp_debug_dump_value(function
);
2221 debug_printf("2222222222222222222222222222 \n");
2223 tgsi_parse_free( &parse
);
2226 LLVMModuleRef module
= LLVMGetGlobalParent(
2227 LLVMGetBasicBlockParent(LLVMGetInsertBlock(bld
.base
.builder
)));
2228 LLVMDumpModule(module
);
2232 FREE( bld
.instructions
);