{
if (ir->callee->is_intrinsic) {
nir_intrinsic_op op;
- if (strcmp(ir->callee_name(), "__intrinsic_atomic_read") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_read);
+
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_atomic_counter_read:
op = nir_intrinsic_atomic_counter_read_var;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_increment") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_increment);
+ break;
+ case ir_intrinsic_atomic_counter_increment:
op = nir_intrinsic_atomic_counter_inc_var;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_predecrement") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_predecrement);
+ break;
+ case ir_intrinsic_atomic_counter_predecrement:
op = nir_intrinsic_atomic_counter_dec_var;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_load") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_load);
+ break;
+ case ir_intrinsic_image_load:
op = nir_intrinsic_image_load;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_store") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_store);
+ break;
+ case ir_intrinsic_image_store:
op = nir_intrinsic_image_store;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_add") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_add);
+ break;
+ case ir_intrinsic_image_atomic_add:
op = nir_intrinsic_image_atomic_add;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_min") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_min);
+ break;
+ case ir_intrinsic_image_atomic_min:
op = nir_intrinsic_image_atomic_min;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_max") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_max);
+ break;
+ case ir_intrinsic_image_atomic_max:
op = nir_intrinsic_image_atomic_max;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_and") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_and);
+ break;
+ case ir_intrinsic_image_atomic_and:
op = nir_intrinsic_image_atomic_and;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_or") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_or);
+ break;
+ case ir_intrinsic_image_atomic_or:
op = nir_intrinsic_image_atomic_or;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_xor") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_xor);
+ break;
+ case ir_intrinsic_image_atomic_xor:
op = nir_intrinsic_image_atomic_xor;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_exchange") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_exchange);
+ break;
+ case ir_intrinsic_image_atomic_exchange:
op = nir_intrinsic_image_atomic_exchange;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_comp_swap") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_comp_swap);
+ break;
+ case ir_intrinsic_image_atomic_comp_swap:
op = nir_intrinsic_image_atomic_comp_swap;
- } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier);
+ break;
+ case ir_intrinsic_memory_barrier:
op = nir_intrinsic_memory_barrier;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_size") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_size);
+ break;
+ case ir_intrinsic_image_size:
op = nir_intrinsic_image_size;
- } else if (strcmp(ir->callee_name(), "__intrinsic_image_samples") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_image_samples);
+ break;
+ case ir_intrinsic_image_samples:
op = nir_intrinsic_image_samples;
- } else if (strcmp(ir->callee_name(), "__intrinsic_store_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_store);
+ break;
+ case ir_intrinsic_ssbo_store:
op = nir_intrinsic_store_ssbo;
- } else if (strcmp(ir->callee_name(), "__intrinsic_load_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_load);
+ break;
+ case ir_intrinsic_ssbo_load:
op = nir_intrinsic_load_ssbo;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_add);
+ break;
+ case ir_intrinsic_ssbo_atomic_add:
op = nir_intrinsic_ssbo_atomic_add;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_and);
+ break;
+ case ir_intrinsic_ssbo_atomic_and:
op = nir_intrinsic_ssbo_atomic_and;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_or);
+ break;
+ case ir_intrinsic_ssbo_atomic_or:
op = nir_intrinsic_ssbo_atomic_or;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_xor);
+ break;
+ case ir_intrinsic_ssbo_atomic_xor:
op = nir_intrinsic_ssbo_atomic_xor;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_min);
+ break;
+ case ir_intrinsic_ssbo_atomic_min:
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_ssbo_atomic_imin;
op = nir_intrinsic_ssbo_atomic_umin;
else
unreachable("Invalid type");
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_max);
+ break;
+ case ir_intrinsic_ssbo_atomic_max:
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_ssbo_atomic_imax;
op = nir_intrinsic_ssbo_atomic_umax;
else
unreachable("Invalid type");
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_exchange);
+ break;
+ case ir_intrinsic_ssbo_atomic_exchange:
op = nir_intrinsic_ssbo_atomic_exchange;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_ssbo") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_comp_swap);
+ break;
+ case ir_intrinsic_ssbo_atomic_comp_swap:
op = nir_intrinsic_ssbo_atomic_comp_swap;
- } else if (strcmp(ir->callee_name(), "__intrinsic_shader_clock") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shader_clock);
+ break;
+ case ir_intrinsic_shader_clock:
op = nir_intrinsic_shader_clock;
- } else if (strcmp(ir->callee_name(), "__intrinsic_group_memory_barrier") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_group_memory_barrier);
+ break;
+ case ir_intrinsic_group_memory_barrier:
op = nir_intrinsic_group_memory_barrier;
- } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_atomic_counter") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_atomic_counter);
+ break;
+ case ir_intrinsic_memory_barrier_atomic_counter:
op = nir_intrinsic_memory_barrier_atomic_counter;
- } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_buffer") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_buffer);
+ break;
+ case ir_intrinsic_memory_barrier_buffer:
op = nir_intrinsic_memory_barrier_buffer;
- } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_image") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_image);
+ break;
+ case ir_intrinsic_memory_barrier_image:
op = nir_intrinsic_memory_barrier_image;
- } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_shared);
+ break;
+ case ir_intrinsic_memory_barrier_shared:
op = nir_intrinsic_memory_barrier_shared;
- } else if (strcmp(ir->callee_name(), "__intrinsic_load_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_load);
+ break;
+ case ir_intrinsic_shared_load:
op = nir_intrinsic_load_shared;
- } else if (strcmp(ir->callee_name(), "__intrinsic_store_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_store);
+ break;
+ case ir_intrinsic_shared_store:
op = nir_intrinsic_store_shared;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_add);
+ break;
+ case ir_intrinsic_shared_atomic_add:
op = nir_intrinsic_shared_atomic_add;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_and);
+ break;
+ case ir_intrinsic_shared_atomic_and:
op = nir_intrinsic_shared_atomic_and;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_or);
+ break;
+ case ir_intrinsic_shared_atomic_or:
op = nir_intrinsic_shared_atomic_or;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_xor);
+ break;
+ case ir_intrinsic_shared_atomic_xor:
op = nir_intrinsic_shared_atomic_xor;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_min);
+ break;
+ case ir_intrinsic_shared_atomic_min:
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_shared_atomic_imin;
op = nir_intrinsic_shared_atomic_umin;
else
unreachable("Invalid type");
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_max);
+ break;
+ case ir_intrinsic_shared_atomic_max:
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_shared_atomic_imax;
op = nir_intrinsic_shared_atomic_umax;
else
unreachable("Invalid type");
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_exchange);
+ break;
+ case ir_intrinsic_shared_atomic_exchange:
op = nir_intrinsic_shared_atomic_exchange;
- } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_shared") == 0) {
- assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_comp_swap);
+ break;
+ case ir_intrinsic_shared_atomic_comp_swap:
op = nir_intrinsic_shared_atomic_comp_swap;
- } else {
+ break;
+ default:
unreachable("not reached");
}
void
glsl_to_tgsi_visitor::visit_atomic_counter_intrinsic(ir_call *ir)
{
- const char *callee = ir->callee->function_name();
exec_node *param = ir->actual_parameters.get_head();
ir_dereference *deref = static_cast<ir_dereference *>(param);
ir_variable *location = deref->variable_referenced();
glsl_to_tgsi_instruction *inst;
- if (!strcmp("__intrinsic_atomic_read", callee)) {
+ if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_read) {
inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, offset);
- } else if (!strcmp("__intrinsic_atomic_increment", callee)) {
+ } else if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_increment) {
inst = emit_asm(ir, TGSI_OPCODE_ATOMUADD, dst, offset,
st_src_reg_for_int(1));
- } else if (!strcmp("__intrinsic_atomic_predecrement", callee)) {
+ } else if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_predecrement) {
inst = emit_asm(ir, TGSI_OPCODE_ATOMUADD, dst, offset,
st_src_reg_for_int(-1));
emit_asm(ir, TGSI_OPCODE_ADD, dst, this->result, st_src_reg_for_int(-1));
st_src_reg data = this->result, data2 = undef_src;
unsigned opcode;
- if (!strcmp("__intrinsic_atomic_add", callee))
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_atomic_counter_add:
opcode = TGSI_OPCODE_ATOMUADD;
- else if (!strcmp("__intrinsic_atomic_min", callee))
+ break;
+ case ir_intrinsic_atomic_counter_min:
opcode = TGSI_OPCODE_ATOMIMIN;
- else if (!strcmp("__intrinsic_atomic_max", callee))
+ break;
+ case ir_intrinsic_atomic_counter_max:
opcode = TGSI_OPCODE_ATOMIMAX;
- else if (!strcmp("__intrinsic_atomic_and", callee))
+ break;
+ case ir_intrinsic_atomic_counter_and:
opcode = TGSI_OPCODE_ATOMAND;
- else if (!strcmp("__intrinsic_atomic_or", callee))
+ break;
+ case ir_intrinsic_atomic_counter_or:
opcode = TGSI_OPCODE_ATOMOR;
- else if (!strcmp("__intrinsic_atomic_xor", callee))
+ break;
+ case ir_intrinsic_atomic_counter_xor:
opcode = TGSI_OPCODE_ATOMXOR;
- else if (!strcmp("__intrinsic_atomic_exchange", callee))
+ break;
+ case ir_intrinsic_atomic_counter_exchange:
opcode = TGSI_OPCODE_ATOMXCHG;
- else if (!strcmp("__intrinsic_atomic_comp_swap", callee)) {
+ break;
+ case ir_intrinsic_atomic_counter_comp_swap: {
opcode = TGSI_OPCODE_ATOMCAS;
param = param->get_next();
val = ((ir_instruction *)param)->as_rvalue();
val->accept(this);
data2 = this->result;
- } else if (!strcmp("__intrinsic_atomic_sub", callee)) {
+ break;
+ }
+ case ir_intrinsic_atomic_counter_sub: {
opcode = TGSI_OPCODE_ATOMUADD;
st_src_reg res = get_temp(glsl_type::uvec4_type);
st_dst_reg dstres = st_dst_reg(res);
dstres.writemask = dst.writemask;
emit_asm(ir, TGSI_OPCODE_INEG, dstres, data);
data = res;
- } else {
+ break;
+ }
+ default:
assert(!"Unexpected intrinsic");
return;
}
void
glsl_to_tgsi_visitor::visit_ssbo_intrinsic(ir_call *ir)
{
- const char *callee = ir->callee->function_name();
exec_node *param = ir->actual_parameters.get_head();
ir_rvalue *block = ((ir_instruction *)param)->as_rvalue();
glsl_to_tgsi_instruction *inst;
- if (!strcmp("__intrinsic_load_ssbo", callee)) {
+ if (ir->callee->intrinsic_id == ir_intrinsic_ssbo_load) {
inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, off);
if (dst.type == GLSL_TYPE_BOOL)
emit_asm(ir, TGSI_OPCODE_USNE, dst, st_src_reg(dst), st_src_reg_for_int(0));
- } else if (!strcmp("__intrinsic_store_ssbo", callee)) {
+ } else if (ir->callee->intrinsic_id == ir_intrinsic_ssbo_store) {
param = param->get_next();
ir_rvalue *val = ((ir_instruction *)param)->as_rvalue();
val->accept(this);
st_src_reg data = this->result, data2 = undef_src;
unsigned opcode;
- if (!strcmp("__intrinsic_atomic_add_ssbo", callee))
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_ssbo_atomic_add:
opcode = TGSI_OPCODE_ATOMUADD;
- else if (!strcmp("__intrinsic_atomic_min_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_min:
opcode = TGSI_OPCODE_ATOMIMIN;
- else if (!strcmp("__intrinsic_atomic_max_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_max:
opcode = TGSI_OPCODE_ATOMIMAX;
- else if (!strcmp("__intrinsic_atomic_and_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_and:
opcode = TGSI_OPCODE_ATOMAND;
- else if (!strcmp("__intrinsic_atomic_or_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_or:
opcode = TGSI_OPCODE_ATOMOR;
- else if (!strcmp("__intrinsic_atomic_xor_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_xor:
opcode = TGSI_OPCODE_ATOMXOR;
- else if (!strcmp("__intrinsic_atomic_exchange_ssbo", callee))
+ break;
+ case ir_intrinsic_ssbo_atomic_exchange:
opcode = TGSI_OPCODE_ATOMXCHG;
- else if (!strcmp("__intrinsic_atomic_comp_swap_ssbo", callee)) {
+ break;
+ case ir_intrinsic_ssbo_atomic_comp_swap:
opcode = TGSI_OPCODE_ATOMCAS;
param = param->get_next();
val = ((ir_instruction *)param)->as_rvalue();
val->accept(this);
data2 = this->result;
- } else {
+ break;
+ default:
assert(!"Unexpected intrinsic");
return;
}
void
glsl_to_tgsi_visitor::visit_membar_intrinsic(ir_call *ir)
{
- const char *callee = ir->callee->function_name();
-
- if (!strcmp("__intrinsic_memory_barrier", callee))
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_memory_barrier:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER |
TGSI_MEMBAR_ATOMIC_BUFFER |
TGSI_MEMBAR_SHADER_IMAGE |
TGSI_MEMBAR_SHARED));
- else if (!strcmp("__intrinsic_memory_barrier_atomic_counter", callee))
+ break;
+ case ir_intrinsic_memory_barrier_atomic_counter:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_ATOMIC_BUFFER));
- else if (!strcmp("__intrinsic_memory_barrier_buffer", callee))
+ break;
+ case ir_intrinsic_memory_barrier_buffer:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER));
- else if (!strcmp("__intrinsic_memory_barrier_image", callee))
+ break;
+ case ir_intrinsic_memory_barrier_image:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_SHADER_IMAGE));
- else if (!strcmp("__intrinsic_memory_barrier_shared", callee))
+ break;
+ case ir_intrinsic_memory_barrier_shared:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_SHARED));
- else if (!strcmp("__intrinsic_group_memory_barrier", callee))
+ break;
+ case ir_intrinsic_group_memory_barrier:
emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst,
st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER |
TGSI_MEMBAR_ATOMIC_BUFFER |
TGSI_MEMBAR_SHADER_IMAGE |
TGSI_MEMBAR_SHARED |
TGSI_MEMBAR_THREAD_GROUP));
- else
+ break;
+ default:
assert(!"Unexpected memory barrier intrinsic");
+ }
}
void
glsl_to_tgsi_visitor::visit_shared_intrinsic(ir_call *ir)
{
- const char *callee = ir->callee->function_name();
exec_node *param = ir->actual_parameters.get_head();
ir_rvalue *offset = ((ir_instruction *)param)->as_rvalue();
glsl_to_tgsi_instruction *inst;
- if (!strcmp("__intrinsic_load_shared", callee)) {
+ if (ir->callee->intrinsic_id == ir_intrinsic_shared_load) {
inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, off);
inst->buffer = buffer;
- } else if (!strcmp("__intrinsic_store_shared", callee)) {
+ } else if (ir->callee->intrinsic_id == ir_intrinsic_shared_store) {
param = param->get_next();
ir_rvalue *val = ((ir_instruction *)param)->as_rvalue();
val->accept(this);
st_src_reg data = this->result, data2 = undef_src;
unsigned opcode;
- if (!strcmp("__intrinsic_atomic_add_shared", callee))
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_shared_atomic_add:
opcode = TGSI_OPCODE_ATOMUADD;
- else if (!strcmp("__intrinsic_atomic_min_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_min:
opcode = TGSI_OPCODE_ATOMIMIN;
- else if (!strcmp("__intrinsic_atomic_max_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_max:
opcode = TGSI_OPCODE_ATOMIMAX;
- else if (!strcmp("__intrinsic_atomic_and_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_and:
opcode = TGSI_OPCODE_ATOMAND;
- else if (!strcmp("__intrinsic_atomic_or_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_or:
opcode = TGSI_OPCODE_ATOMOR;
- else if (!strcmp("__intrinsic_atomic_xor_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_xor:
opcode = TGSI_OPCODE_ATOMXOR;
- else if (!strcmp("__intrinsic_atomic_exchange_shared", callee))
+ break;
+ case ir_intrinsic_shared_atomic_exchange:
opcode = TGSI_OPCODE_ATOMXCHG;
- else if (!strcmp("__intrinsic_atomic_comp_swap_shared", callee)) {
+ break;
+ case ir_intrinsic_shared_atomic_comp_swap:
opcode = TGSI_OPCODE_ATOMCAS;
param = param->get_next();
val = ((ir_instruction *)param)->as_rvalue();
val->accept(this);
data2 = this->result;
- } else {
+ break;
+ default:
assert(!"Unexpected intrinsic");
return;
}
void
glsl_to_tgsi_visitor::visit_image_intrinsic(ir_call *ir)
{
- const char *callee = ir->callee->function_name();
exec_node *param = ir->actual_parameters.get_head();
ir_dereference *img = (ir_dereference *)param;
glsl_to_tgsi_instruction *inst;
- if (!strcmp("__intrinsic_image_size", callee)) {
+ if (ir->callee->intrinsic_id == ir_intrinsic_image_size) {
dst.writemask = WRITEMASK_XYZ;
inst = emit_asm(ir, TGSI_OPCODE_RESQ, dst);
- } else if (!strcmp("__intrinsic_image_samples", callee)) {
+ } else if (ir->callee->intrinsic_id == ir_intrinsic_image_samples) {
st_src_reg res = get_temp(glsl_type::ivec4_type);
st_dst_reg dstres = st_dst_reg(res);
dstres.writemask = WRITEMASK_W;
assert(param->is_tail_sentinel());
unsigned opcode;
- if (!strcmp("__intrinsic_image_load", callee))
+ switch (ir->callee->intrinsic_id) {
+ case ir_intrinsic_image_load:
opcode = TGSI_OPCODE_LOAD;
- else if (!strcmp("__intrinsic_image_store", callee))
+ break;
+ case ir_intrinsic_image_store:
opcode = TGSI_OPCODE_STORE;
- else if (!strcmp("__intrinsic_image_atomic_add", callee))
+ break;
+ case ir_intrinsic_image_atomic_add:
opcode = TGSI_OPCODE_ATOMUADD;
- else if (!strcmp("__intrinsic_image_atomic_min", callee))
+ break;
+ case ir_intrinsic_image_atomic_min:
opcode = TGSI_OPCODE_ATOMIMIN;
- else if (!strcmp("__intrinsic_image_atomic_max", callee))
+ break;
+ case ir_intrinsic_image_atomic_max:
opcode = TGSI_OPCODE_ATOMIMAX;
- else if (!strcmp("__intrinsic_image_atomic_and", callee))
+ break;
+ case ir_intrinsic_image_atomic_and:
opcode = TGSI_OPCODE_ATOMAND;
- else if (!strcmp("__intrinsic_image_atomic_or", callee))
+ break;
+ case ir_intrinsic_image_atomic_or:
opcode = TGSI_OPCODE_ATOMOR;
- else if (!strcmp("__intrinsic_image_atomic_xor", callee))
+ break;
+ case ir_intrinsic_image_atomic_xor:
opcode = TGSI_OPCODE_ATOMXOR;
- else if (!strcmp("__intrinsic_image_atomic_exchange", callee))
+ break;
+ case ir_intrinsic_image_atomic_exchange:
opcode = TGSI_OPCODE_ATOMXCHG;
- else if (!strcmp("__intrinsic_image_atomic_comp_swap", callee))
+ break;
+ case ir_intrinsic_image_atomic_comp_swap:
opcode = TGSI_OPCODE_ATOMCAS;
- else {
+ break;
+ default:
assert(!"Unexpected intrinsic");
return;
}
{
glsl_to_tgsi_instruction *call_inst;
ir_function_signature *sig = ir->callee;
- const char *callee = sig->function_name();
function_entry *entry;
int i;
/* Filter out intrinsics */
- if (!strcmp("__intrinsic_atomic_read", callee) ||
- !strcmp("__intrinsic_atomic_increment", callee) ||
- !strcmp("__intrinsic_atomic_predecrement", callee) ||
- !strcmp("__intrinsic_atomic_add", callee) ||
- !strcmp("__intrinsic_atomic_sub", callee) ||
- !strcmp("__intrinsic_atomic_min", callee) ||
- !strcmp("__intrinsic_atomic_max", callee) ||
- !strcmp("__intrinsic_atomic_and", callee) ||
- !strcmp("__intrinsic_atomic_or", callee) ||
- !strcmp("__intrinsic_atomic_xor", callee) ||
- !strcmp("__intrinsic_atomic_exchange", callee) ||
- !strcmp("__intrinsic_atomic_comp_swap", callee)) {
+ switch (sig->intrinsic_id) {
+ case ir_intrinsic_invalid:
+ break;
+
+ case ir_intrinsic_atomic_counter_read:
+ case ir_intrinsic_atomic_counter_increment:
+ case ir_intrinsic_atomic_counter_predecrement:
+ case ir_intrinsic_atomic_counter_add:
+ case ir_intrinsic_atomic_counter_sub:
+ case ir_intrinsic_atomic_counter_min:
+ case ir_intrinsic_atomic_counter_max:
+ case ir_intrinsic_atomic_counter_and:
+ case ir_intrinsic_atomic_counter_or:
+ case ir_intrinsic_atomic_counter_xor:
+ case ir_intrinsic_atomic_counter_exchange:
+ case ir_intrinsic_atomic_counter_comp_swap:
visit_atomic_counter_intrinsic(ir);
return;
- }
- if (!strcmp("__intrinsic_load_ssbo", callee) ||
- !strcmp("__intrinsic_store_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_add_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_min_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_max_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_and_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_or_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_xor_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_exchange_ssbo", callee) ||
- !strcmp("__intrinsic_atomic_comp_swap_ssbo", callee)) {
+ case ir_intrinsic_ssbo_load:
+ case ir_intrinsic_ssbo_store:
+ case ir_intrinsic_ssbo_atomic_add:
+ case ir_intrinsic_ssbo_atomic_min:
+ case ir_intrinsic_ssbo_atomic_max:
+ case ir_intrinsic_ssbo_atomic_and:
+ case ir_intrinsic_ssbo_atomic_or:
+ case ir_intrinsic_ssbo_atomic_xor:
+ case ir_intrinsic_ssbo_atomic_exchange:
+ case ir_intrinsic_ssbo_atomic_comp_swap:
visit_ssbo_intrinsic(ir);
return;
- }
- if (!strcmp("__intrinsic_memory_barrier", callee) ||
- !strcmp("__intrinsic_memory_barrier_atomic_counter", callee) ||
- !strcmp("__intrinsic_memory_barrier_buffer", callee) ||
- !strcmp("__intrinsic_memory_barrier_image", callee) ||
- !strcmp("__intrinsic_memory_barrier_shared", callee) ||
- !strcmp("__intrinsic_group_memory_barrier", callee)) {
+ case ir_intrinsic_memory_barrier:
+ case ir_intrinsic_memory_barrier_atomic_counter:
+ case ir_intrinsic_memory_barrier_buffer:
+ case ir_intrinsic_memory_barrier_image:
+ case ir_intrinsic_memory_barrier_shared:
+ case ir_intrinsic_group_memory_barrier:
visit_membar_intrinsic(ir);
return;
- }
- if (!strcmp("__intrinsic_load_shared", callee) ||
- !strcmp("__intrinsic_store_shared", callee) ||
- !strcmp("__intrinsic_atomic_add_shared", callee) ||
- !strcmp("__intrinsic_atomic_min_shared", callee) ||
- !strcmp("__intrinsic_atomic_max_shared", callee) ||
- !strcmp("__intrinsic_atomic_and_shared", callee) ||
- !strcmp("__intrinsic_atomic_or_shared", callee) ||
- !strcmp("__intrinsic_atomic_xor_shared", callee) ||
- !strcmp("__intrinsic_atomic_exchange_shared", callee) ||
- !strcmp("__intrinsic_atomic_comp_swap_shared", callee)) {
+ case ir_intrinsic_shared_load:
+ case ir_intrinsic_shared_store:
+ case ir_intrinsic_shared_atomic_add:
+ case ir_intrinsic_shared_atomic_min:
+ case ir_intrinsic_shared_atomic_max:
+ case ir_intrinsic_shared_atomic_and:
+ case ir_intrinsic_shared_atomic_or:
+ case ir_intrinsic_shared_atomic_xor:
+ case ir_intrinsic_shared_atomic_exchange:
+ case ir_intrinsic_shared_atomic_comp_swap:
visit_shared_intrinsic(ir);
return;
- }
- if (!strcmp("__intrinsic_image_load", callee) ||
- !strcmp("__intrinsic_image_store", callee) ||
- !strcmp("__intrinsic_image_atomic_add", callee) ||
- !strcmp("__intrinsic_image_atomic_min", callee) ||
- !strcmp("__intrinsic_image_atomic_max", callee) ||
- !strcmp("__intrinsic_image_atomic_and", callee) ||
- !strcmp("__intrinsic_image_atomic_or", callee) ||
- !strcmp("__intrinsic_image_atomic_xor", callee) ||
- !strcmp("__intrinsic_image_atomic_exchange", callee) ||
- !strcmp("__intrinsic_image_atomic_comp_swap", callee) ||
- !strcmp("__intrinsic_image_size", callee) ||
- !strcmp("__intrinsic_image_samples", callee)) {
+ case ir_intrinsic_image_load:
+ case ir_intrinsic_image_store:
+ case ir_intrinsic_image_atomic_add:
+ case ir_intrinsic_image_atomic_min:
+ case ir_intrinsic_image_atomic_max:
+ case ir_intrinsic_image_atomic_and:
+ case ir_intrinsic_image_atomic_or:
+ case ir_intrinsic_image_atomic_xor:
+ case ir_intrinsic_image_atomic_exchange:
+ case ir_intrinsic_image_atomic_comp_swap:
+ case ir_intrinsic_image_size:
+ case ir_intrinsic_image_samples:
visit_image_intrinsic(ir);
return;
+
+ case ir_intrinsic_generic_load:
+ case ir_intrinsic_generic_store:
+ case ir_intrinsic_generic_atomic_add:
+ case ir_intrinsic_generic_atomic_and:
+ case ir_intrinsic_generic_atomic_or:
+ case ir_intrinsic_generic_atomic_xor:
+ case ir_intrinsic_generic_atomic_min:
+ case ir_intrinsic_generic_atomic_max:
+ case ir_intrinsic_generic_atomic_exchange:
+ case ir_intrinsic_generic_atomic_comp_swap:
+ case ir_intrinsic_shader_clock:
+ unreachable("Invalid intrinsic");
}
entry = get_function_signature(sig);