anv_execbuf_add_bo_set(execbuf, cmd_buffer->surface_relocs.deps, 0,
&cmd_buffer->device->alloc);
+ /* Add the BOs for all the pinned buffers */
+ if (cmd_buffer->device->pinned_buffers->entries) {
+ struct set *pinned_bos = _mesa_pointer_set_create(NULL);
+ if (pinned_bos == NULL)
+ return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
+ set_foreach(cmd_buffer->device->pinned_buffers, entry) {
+ const struct anv_buffer *buffer = entry->key;
+ _mesa_set_add(pinned_bos, buffer->address.bo);
+ }
+ anv_execbuf_add_bo_set(execbuf, pinned_bos, 0,
+ &cmd_buffer->device->alloc);
+ _mesa_set_destroy(pinned_bos, NULL);
+ }
+
struct anv_block_pool *pool;
pool = &cmd_buffer->device->dynamic_state_pool.block_pool;
anv_block_pool_foreach_bo(bo, pool) {
VkPhysicalDevice physicalDevice,
VkPhysicalDeviceFeatures2* pFeatures)
{
+ ANV_FROM_HANDLE(anv_physical_device, pdevice, physicalDevice);
anv_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
vk_foreach_struct(ext, pFeatures->pNext) {
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
VkPhysicalDevice16BitStorageFeatures *features =
(VkPhysicalDevice16BitStorageFeatures *)ext;
- ANV_FROM_HANDLE(anv_physical_device, pdevice, physicalDevice);
-
features->storageBuffer16BitAccess = pdevice->info.gen >= 8;
features->uniformAndStorageBuffer16BitAccess = pdevice->info.gen >= 8;
features->storagePushConstant16 = pdevice->info.gen >= 8;
break;
}
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT: {
+ VkPhysicalDeviceBufferAddressFeaturesEXT *features = (void *)ext;
+ features->bufferDeviceAddress = pdevice->use_softpin &&
+ pdevice->info.gen >= 8;
+ features->bufferDeviceAddressCaptureReplay = false;
+ features->bufferDeviceAddressMultiDevice = false;
+ break;
+ }
+
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
(VkPhysicalDeviceConditionalRenderingFeaturesEXT*)ext;
if (device->info.gen >= 10)
anv_device_init_hiz_clear_value_bo(device);
+ if (physical_device->use_softpin)
+ device->pinned_buffers = _mesa_pointer_set_create(NULL);
+
anv_scratch_pool_init(device, &device->scratch_pool);
anv_queue_init(device, &device->queue);
anv_queue_finish(&device->queue);
+ if (physical_device->use_softpin)
+ _mesa_set_destroy(device->pinned_buffers, NULL);
+
#ifdef HAVE_VALGRIND
/* We only need to free these to prevent valgrind errors. The backing
* BO will go away in a couple of lines so we don't actually leak.
buffer->usage = pCreateInfo->usage;
buffer->address = ANV_NULL_ADDRESS;
+ if (buffer->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) {
+ pthread_mutex_lock(&device->mutex);
+ _mesa_set_add(device->pinned_buffers, buffer);
+ pthread_mutex_unlock(&device->mutex);
+ }
+
*pBuffer = anv_buffer_to_handle(buffer);
return VK_SUCCESS;
if (!buffer)
return;
+ if (buffer->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) {
+ pthread_mutex_lock(&device->mutex);
+ _mesa_set_remove_key(device->pinned_buffers, buffer);
+ pthread_mutex_unlock(&device->mutex);
+ }
+
vk_free2(&device->alloc, pAllocator, buffer);
}
+VkDeviceAddress anv_GetBufferDeviceAddressEXT(
+ VkDevice device,
+ const VkBufferDeviceAddressInfoEXT* pInfo)
+{
+ ANV_FROM_HANDLE(anv_buffer, buffer, pInfo->buffer);
+
+ assert(buffer->address.bo->flags & EXEC_OBJECT_PINNED);
+
+ return anv_address_physical(buffer->address);
+}
+
void
anv_fill_buffer_surface_state(struct anv_device *device, struct anv_state state,
enum isl_format format,
Extension('VK_KHR_xcb_surface', 6, 'VK_USE_PLATFORM_XCB_KHR'),
Extension('VK_KHR_xlib_surface', 6, 'VK_USE_PLATFORM_XLIB_KHR'),
Extension('VK_EXT_acquire_xlib_display', 1, 'VK_USE_PLATFORM_XLIB_XRANDR_EXT'),
+ Extension('VK_EXT_buffer_device_address', 1,
+ 'device->use_softpin && device->info.gen >= 8'),
Extension('VK_EXT_calibrated_timestamps', 1, True),
Extension('VK_EXT_conditional_rendering', 1, 'device->info.gen >= 8 || device->info.is_haswell'),
Extension('VK_EXT_debug_report', 8, True),
.int64 = pdevice->info.gen >= 8,
.min_lod = true,
.multiview = true,
+ .physical_storage_buffer_address = pdevice->info.gen >= 8 &&
+ pdevice->use_softpin,
.post_depth_coverage = pdevice->info.gen >= 9,
.shader_viewport_index_layer = true,
.stencil_export = pdevice->info.gen >= 9,
},
.ubo_ptr_type = glsl_vector_type(GLSL_TYPE_UINT, 2),
.ssbo_ptr_type = glsl_vector_type(GLSL_TYPE_UINT, 2),
+ .phys_ssbo_ptr_type = glsl_vector_type(GLSL_TYPE_UINT64, 1),
.push_const_ptr_type = glsl_uint_type(),
.shared_ptr_type = glsl_uint_type(),
};
NIR_PASS_V(nir, nir_lower_explicit_io, nir_var_mem_ubo | nir_var_mem_ssbo,
nir_address_format_vk_index_offset);
+ NIR_PASS_V(nir, nir_lower_explicit_io, nir_var_mem_global,
+ nir_address_format_64bit_global);
+
NIR_PASS_V(nir, nir_propagate_invariant);
NIR_PASS_V(nir, nir_lower_io_to_temporaries,
entry_point->impl, true, false);
struct anv_bo trivial_batch_bo;
struct anv_bo hiz_clear_bo;
+ /* Set of pointers to anv_buffer objects for all pinned buffers. Pinned
+ * buffers are always resident because they could be used at any time via
+ * VK_EXT_buffer_device_address.
+ */
+ struct set * pinned_buffers;
+
struct anv_pipeline_cache default_pipeline_cache;
struct blorp_context blorp;