*location = SYSTEM_VALUE_GLOBAL_INVOCATION_INDEX;
set_mode_system_value(b, mode);
break;
+ case SpvBuiltInGlobalOffset:
+ *location = SYSTEM_VALUE_BASE_GLOBAL_INVOCATION_ID;
+ set_mode_system_value(b, mode);
+ break;
case SpvBuiltInBaseVertex:
/* OpenGL gl_BaseVertex (SYSTEM_VALUE_BASE_VERTEX) is not the same
* semantic as Vulkan BaseVertex (SYSTEM_VALUE_FIRST_VERTEX).
return nir_shrink_zero_pad_vec(b, val, num_components);
}
+static bool
+vtn_get_mem_operands(struct vtn_builder *b, const uint32_t *w, unsigned count,
+ unsigned *idx, SpvMemoryAccessMask *access, unsigned *alignment,
+ SpvScope *dest_scope, SpvScope *src_scope)
+{
+ *access = 0;
+ *alignment = 0;
+ if (*idx >= count)
+ return false;
+
+ *access = w[(*idx)++];
+ if (*access & SpvMemoryAccessAlignedMask) {
+ vtn_assert(*idx < count);
+ *alignment = w[(*idx)++];
+ }
+
+ if (*access & SpvMemoryAccessMakePointerAvailableMask) {
+ vtn_assert(*idx < count);
+ vtn_assert(dest_scope);
+ *dest_scope = vtn_constant_uint(b, w[(*idx)++]);
+ }
+
+ if (*access & SpvMemoryAccessMakePointerVisibleMask) {
+ vtn_assert(*idx < count);
+ vtn_assert(src_scope);
+ *src_scope = vtn_constant_uint(b, w[(*idx)++]);
+ }
+
+ return true;
+}
+
+SpvMemorySemanticsMask
+vtn_mode_to_memory_semantics(enum vtn_variable_mode mode)
+{
+ switch (mode) {
+ case vtn_variable_mode_ssbo:
+ case vtn_variable_mode_phys_ssbo:
+ return SpvMemorySemanticsUniformMemoryMask;
+ case vtn_variable_mode_workgroup:
+ return SpvMemorySemanticsWorkgroupMemoryMask;
+ case vtn_variable_mode_cross_workgroup:
+ return SpvMemorySemanticsCrossWorkgroupMemoryMask;
+ case vtn_variable_mode_atomic_counter:
+ return SpvMemorySemanticsAtomicCounterMemoryMask;
+ case vtn_variable_mode_image:
+ return SpvMemorySemanticsImageMemoryMask;
+ case vtn_variable_mode_output:
+ return SpvMemorySemanticsOutputMemoryMask;
+ default:
+ return SpvMemorySemanticsMaskNone;
+ }
+}
+
void
vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
const uint32_t *w, unsigned count)
break;
}
+ case SpvOpConstantSampler: {
+ /* Synthesize a pointer-to-sampler type, create a variable of that type,
+ * and give the variable a constant initializer with the sampler params */
+ struct vtn_type *sampler_type = vtn_value(b, w[1], vtn_value_type_type)->type;
+ struct vtn_value *val = vtn_push_value(b, w[2], vtn_value_type_pointer);
+
+ struct vtn_type *ptr_type = rzalloc(b, struct vtn_type);
+ ptr_type = rzalloc(b, struct vtn_type);
+ ptr_type->base_type = vtn_base_type_pointer;
+ ptr_type->deref = sampler_type;
+ ptr_type->storage_class = SpvStorageClassUniform;
+
+ ptr_type->type = nir_address_format_to_glsl_type(
+ vtn_mode_to_address_format(b, vtn_variable_mode_function));
+
+ vtn_create_variable(b, val, ptr_type, ptr_type->storage_class, NULL, NULL);
+
+ nir_variable *nir_var = val->pointer->var->var;
+ nir_var->data.sampler.is_inline_sampler = true;
+ nir_var->data.sampler.addressing_mode = w[3];
+ nir_var->data.sampler.normalized_coordinates = w[4];
+ nir_var->data.sampler.filter_mode = w[5];
+
+ break;
+ }
+
case SpvOpAccessChain:
case SpvOpPtrAccessChain:
case SpvOpInBoundsAccessChain:
vtn_assert_types_equal(b, opcode, res_type, src_val->type->deref);
- if (count > 4) {
- unsigned idx = 5;
- SpvMemoryAccessMask access = w[4];
- if (access & SpvMemoryAccessAlignedMask)
- idx++;
-
- if (access & SpvMemoryAccessMakePointerVisibleMask) {
- SpvMemorySemanticsMask semantics =
- SpvMemorySemanticsMakeVisibleMask |
- vtn_storage_class_to_memory_semantics(src->ptr_type->storage_class);
-
- SpvScope scope = vtn_constant_uint(b, w[idx]);
- vtn_emit_memory_barrier(b, scope, semantics);
- }
+ unsigned idx = 4, alignment;
+ SpvMemoryAccessMask access;
+ SpvScope scope;
+ vtn_get_mem_operands(b, w, count, &idx, &access, &alignment, NULL, &scope);
+ if (access & SpvMemoryAccessMakePointerVisibleMask) {
+ SpvMemorySemanticsMask semantics =
+ SpvMemorySemanticsMakeVisibleMask |
+ vtn_mode_to_memory_semantics(src->mode);
+ vtn_emit_memory_barrier(b, scope, semantics);
}
vtn_push_ssa_value(b, w[2], vtn_variable_load(b, src));
vtn_assert_types_equal(b, opcode, dest_val->type->deref, src_val->type);
+ unsigned idx = 3, alignment;
+ SpvMemoryAccessMask access;
+ SpvScope scope;
+ vtn_get_mem_operands(b, w, count, &idx, &access, &alignment, &scope, NULL);
+
struct vtn_ssa_value *src = vtn_ssa_value(b, w[2]);
vtn_variable_store(b, src, dest);
- if (count > 3) {
- unsigned idx = 4;
- SpvMemoryAccessMask access = w[3];
-
- if (access & SpvMemoryAccessAlignedMask)
- idx++;
-
- if (access & SpvMemoryAccessMakePointerAvailableMask) {
- SpvMemorySemanticsMask semantics =
- SpvMemorySemanticsMakeAvailableMask |
- vtn_storage_class_to_memory_semantics(dest->ptr_type->storage_class);
- SpvScope scope = vtn_constant_uint(b, w[idx]);
- vtn_emit_memory_barrier(b, scope, semantics);
- }
+ if (access & SpvMemoryAccessMakePointerAvailableMask) {
+ SpvMemorySemanticsMask semantics =
+ SpvMemorySemanticsMakeAvailableMask |
+ vtn_mode_to_memory_semantics(dest->mode);
+ vtn_emit_memory_barrier(b, scope, semantics);
}
break;
}