vk/0.210.0: Switch to the new-style handle declarations
[mesa.git] / src / vulkan / anv_device.c
index eb38adfa4268dd64eedd811c76ae84403580e557..aca082ac1b51b26e90896afdcdd91242c065c052 100644 (file)
@@ -31,6 +31,8 @@
 #include "mesa/main/git_sha1.h"
 #include "util/strtod.h"
 
+#include "gen7_pack.h"
+
 struct anv_dispatch_table dtable;
 
 static void
@@ -81,11 +83,14 @@ anv_physical_device_init(struct anv_physical_device *device,
       goto fail;
    }
 
-   if (device->info->gen == 7 &&
-       !device->info->is_haswell && !device->info->is_baytrail) {
-      fprintf(stderr, "WARNING: Ivy Bridge Vulkan support is incomplete");
+   if (device->info->is_haswell) {
+      fprintf(stderr, "WARNING: Haswell Vulkan support is incomplete\n");
+   } else if (device->info->gen == 7 && !device->info->is_baytrail) {
+      fprintf(stderr, "WARNING: Ivy Bridge Vulkan support is incomplete\n");
+   } else if (device->info->gen == 9) {
+      fprintf(stderr, "WARNING: Skylake Vulkan support is incomplete\n");
    } else if (device->info->gen == 8 && !device->info->is_cherryview) {
-      /* Briadwell is as fully supported as anything */
+      /* Broadwell is as fully supported as anything */
    } else {
       result = vk_errorf(VK_UNSUPPORTED,
                          "Vulkan not yet supported on %s", device->name);
@@ -115,7 +120,7 @@ anv_physical_device_init(struct anv_physical_device *device,
                          "non-llc gpu");
       goto fail;
    }
-   
+
    close(fd);
 
    brw_process_intel_debug_variable();
@@ -128,8 +133,10 @@ anv_physical_device_init(struct anv_physical_device *device,
    device->compiler->shader_debug_log = compiler_debug_log;
    device->compiler->shader_perf_log = compiler_perf_log;
 
+   isl_device_init(&device->isl_dev, device->info);
+
    return VK_SUCCESS;
-   
+
 fail:
    close(fd);
    return result;
@@ -633,7 +640,7 @@ VkResult anv_CreateDevice(
          return vk_error(VK_ERROR_EXTENSION_NOT_PRESENT);
    }
 
-   anv_set_dispatch_gen(physical_device->info->gen);
+   anv_set_dispatch_devinfo(physical_device->info);
 
    device = anv_instance_alloc(instance, sizeof(*device), 8,
                                VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
@@ -647,7 +654,7 @@ VkResult anv_CreateDevice(
    device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
    if (device->fd == -1)
       goto fail_device;
-      
+
    device->context_id = anv_gem_create_context(device);
    if (device->context_id == -1)
       goto fail_fd;
@@ -661,15 +668,18 @@ VkResult anv_CreateDevice(
    anv_state_pool_init(&device->dynamic_state_pool,
                        &device->dynamic_state_block_pool);
 
-   anv_block_pool_init(&device->instruction_block_pool, device, 2048);
+   anv_block_pool_init(&device->instruction_block_pool, device, 4096);
    anv_block_pool_init(&device->surface_state_block_pool, device, 4096);
 
    anv_state_pool_init(&device->surface_state_pool,
                        &device->surface_state_block_pool);
 
+   anv_bo_init_new(&device->workaround_bo, device, 1024);
+
    anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
 
    device->info = *physical_device->info;
+   device->isl_dev = physical_device->isl_dev;
 
    anv_queue_init(device, &device->queue);
 
@@ -705,6 +715,9 @@ void anv_DestroyDevice(
    anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
 #endif
 
+   anv_gem_munmap(device->workaround_bo.map, device->workaround_bo.size);
+   anv_gem_close(device, device->workaround_bo.gem_handle);
+
    anv_bo_pool_finish(&device->batch_bo_pool);
    anv_state_pool_finish(&device->dynamic_state_pool);
    anv_block_pool_finish(&device->dynamic_state_block_pool);
@@ -1015,7 +1028,7 @@ VkResult anv_MapMemory(
    mem->map_size = size;
 
    *ppData = mem->map;
-   
+
    return VK_SUCCESS;
 }
 
@@ -1286,14 +1299,24 @@ VkResult anv_WaitForFences(
     uint64_t                                    timeout)
 {
    ANV_FROM_HANDLE(anv_device, device, _device);
+
+   /* DRM_IOCTL_I915_GEM_WAIT uses a signed 64 bit timeout and is supposed
+    * to block indefinitely timeouts <= 0.  Unfortunately, this was broken
+    * for a couple of kernel releases.  Since there's no way to know
+    * whether or not the kernel we're using is one of the broken ones, the
+    * best we can do is to clamp the timeout to INT64_MAX.  This limits the
+    * maximum timeout from 584 years to 292 years - likely not a big deal.
+    */
+   if (timeout > INT64_MAX)
+      timeout = INT64_MAX;
+
    int64_t t = timeout;
-   int ret;
 
    /* FIXME: handle !waitAll */
 
    for (uint32_t i = 0; i < fenceCount; i++) {
       ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
-      ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
+      int ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
       if (ret == -1 && errno == ETIME) {
          return VK_TIMEOUT;
       } else if (ret == -1) {
@@ -1313,7 +1336,7 @@ VkResult anv_CreateSemaphore(
     const VkSemaphoreCreateInfo*                pCreateInfo,
     VkSemaphore*                                pSemaphore)
 {
-   pSemaphore->handle = 1;
+   *pSemaphore = (VkSemaphore)1;
    stub_return(VK_SUCCESS);
 }
 
@@ -1415,58 +1438,39 @@ void anv_DestroyBuffer(
 void
 anv_fill_buffer_surface_state(struct anv_device *device, void *state,
                               const struct anv_format *format,
-                              uint32_t offset, uint32_t range)
+                              uint32_t offset, uint32_t range, uint32_t stride)
 {
    switch (device->info.gen) {
    case 7:
-      gen7_fill_buffer_surface_state(state, format, offset, range);
+      if (device->info.is_haswell)
+         gen75_fill_buffer_surface_state(state, format, offset, range, stride);
+      else
+         gen7_fill_buffer_surface_state(state, format, offset, range, stride);
       break;
    case 8:
-      gen8_fill_buffer_surface_state(state, format, offset, range);
+      gen8_fill_buffer_surface_state(state, format, offset, range, stride);
+      break;
+   case 9:
+      gen9_fill_buffer_surface_state(state, format, offset, range, stride);
       break;
    default:
       unreachable("unsupported gen\n");
    }
 }
 
-VkResult
-anv_buffer_view_create(
-   struct anv_device *                          device,
-   const VkBufferViewCreateInfo*                pCreateInfo,
-   struct anv_buffer_view **                    bview_out)
+VkResult anv_CreateBufferView(
+    VkDevice                                    _device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    VkBufferView*                               pView)
 {
-   ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
-   struct anv_buffer_view *bview;
-
-   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
-
-   bview = anv_device_alloc(device, sizeof(*bview), 8,
-                            VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
-   if (bview == NULL)
-      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
-
-   *bview = (struct anv_buffer_view) {
-      .bo = buffer->bo,
-      .offset = buffer->offset + pCreateInfo->offset,
-      .surface_state = anv_state_pool_alloc(&device->surface_state_pool, 64, 64),
-      .format = anv_format_for_vk_format(pCreateInfo->format),
-      .range = pCreateInfo->range,
-   };
-
-   *bview_out = bview;
-
-   return VK_SUCCESS;
+   stub_return(VK_UNSUPPORTED);
 }
 
 void anv_DestroyBufferView(
     VkDevice                                    _device,
     VkBufferView                                _bview)
 {
-   ANV_FROM_HANDLE(anv_device, device, _device);
-   ANV_FROM_HANDLE(anv_buffer_view, bview, _bview);
-
-   anv_state_pool_free(&device->surface_state_pool, bview->surface_state);
-   anv_device_free(device, bview);
+   stub();
 }
 
 void anv_DestroySampler(
@@ -1479,356 +1483,6 @@ void anv_DestroySampler(
    anv_device_free(device, sampler);
 }
 
-// Descriptor set functions
-
-VkResult anv_CreateDescriptorSetLayout(
-    VkDevice                                    _device,
-    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
-    VkDescriptorSetLayout*                      pSetLayout)
-{
-   ANV_FROM_HANDLE(anv_device, device, _device);
-   struct anv_descriptor_set_layout *set_layout;
-   uint32_t s;
-
-   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
-
-   uint32_t immutable_sampler_count = 0;
-   for (uint32_t b = 0; b < pCreateInfo->count; b++) {
-      if (pCreateInfo->pBinding[b].pImmutableSamplers)
-         immutable_sampler_count += pCreateInfo->pBinding[b].arraySize;
-   }
-
-   size_t size = sizeof(struct anv_descriptor_set_layout) +
-                 pCreateInfo->count * sizeof(set_layout->binding[0]) +
-                 immutable_sampler_count * sizeof(struct anv_sampler *);
-
-   set_layout = anv_device_alloc(device, size, 8,
-                                 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
-   if (!set_layout)
-      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
-
-   /* We just allocate all the samplers at the end of the struct */
-   struct anv_sampler **samplers =
-      (struct anv_sampler **)&set_layout->binding[pCreateInfo->count];
-
-   set_layout->binding_count = pCreateInfo->count;
-   set_layout->shader_stages = 0;
-   set_layout->size = 0;
-
-   /* Initialize all binding_layout entries to -1 */
-   memset(set_layout->binding, -1,
-          pCreateInfo->count * sizeof(set_layout->binding[0]));
-
-   /* Initialize all samplers to 0 */
-   memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
-
-   uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
-   uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
-   uint32_t dynamic_offset_count = 0;
-
-   for (uint32_t b = 0; b < pCreateInfo->count; b++) {
-      uint32_t array_size = MAX2(1, pCreateInfo->pBinding[b].arraySize);
-      set_layout->binding[b].array_size = array_size;
-      set_layout->binding[b].descriptor_index = set_layout->size;
-      set_layout->size += array_size;
-
-      switch (pCreateInfo->pBinding[b].descriptorType) {
-      case VK_DESCRIPTOR_TYPE_SAMPLER:
-      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
-         for_each_bit(s, pCreateInfo->pBinding[b].stageFlags) {
-            set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
-            sampler_count[s] += array_size;
-         }
-         break;
-      default:
-         break;
-      }
-
-      switch (pCreateInfo->pBinding[b].descriptorType) {
-      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
-      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
-      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
-      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
-      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
-      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
-      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
-      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
-      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
-      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
-         for_each_bit(s, pCreateInfo->pBinding[b].stageFlags) {
-            set_layout->binding[b].stage[s].surface_index = surface_count[s];
-            surface_count[s] += array_size;
-         }
-         break;
-      default:
-         break;
-      }
-
-      switch (pCreateInfo->pBinding[b].descriptorType) {
-      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
-      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
-         set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
-         dynamic_offset_count += array_size;
-         break;
-      default:
-         break;
-      }
-
-      if (pCreateInfo->pBinding[b].pImmutableSamplers) {
-         set_layout->binding[b].immutable_samplers = samplers;
-         samplers += array_size;
-
-         for (uint32_t i = 0; i < array_size; i++)
-            set_layout->binding[b].immutable_samplers[i] =
-               anv_sampler_from_handle(pCreateInfo->pBinding[b].pImmutableSamplers[i]);
-      } else {
-         set_layout->binding[b].immutable_samplers = NULL;
-      }
-
-      set_layout->shader_stages |= pCreateInfo->pBinding[b].stageFlags;
-   }
-
-   set_layout->dynamic_offset_count = dynamic_offset_count;
-
-   *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
-
-   return VK_SUCCESS;
-}
-
-void anv_DestroyDescriptorSetLayout(
-    VkDevice                                    _device,
-    VkDescriptorSetLayout                       _set_layout)
-{
-   ANV_FROM_HANDLE(anv_device, device, _device);
-   ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
-
-   anv_device_free(device, set_layout);
-}
-
-VkResult anv_CreateDescriptorPool(
-    VkDevice                                    device,
-    const VkDescriptorPoolCreateInfo*           pCreateInfo,
-    VkDescriptorPool*                           pDescriptorPool)
-{
-   anv_finishme("VkDescriptorPool is a stub");
-   pDescriptorPool->handle = 1;
-   return VK_SUCCESS;
-}
-
-void anv_DestroyDescriptorPool(
-    VkDevice                                    _device,
-    VkDescriptorPool                            _pool)
-{
-   anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
-}
-
-VkResult anv_ResetDescriptorPool(
-    VkDevice                                    device,
-    VkDescriptorPool                            descriptorPool)
-{
-   anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
-   return VK_SUCCESS;
-}
-
-VkResult
-anv_descriptor_set_create(struct anv_device *device,
-                          const struct anv_descriptor_set_layout *layout,
-                          struct anv_descriptor_set **out_set)
-{
-   struct anv_descriptor_set *set;
-   size_t size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]);
-
-   set = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
-   if (!set)
-      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
-
-   /* A descriptor set may not be 100% filled. Clear the set so we can can
-    * later detect holes in it.
-    */
-   memset(set, 0, size);
-
-   set->layout = layout;
-
-   /* Go through and fill out immutable samplers if we have any */
-   struct anv_descriptor *desc = set->descriptors;
-   for (uint32_t b = 0; b < layout->binding_count; b++) {
-      if (layout->binding[b].immutable_samplers) {
-         for (uint32_t i = 0; i < layout->binding[b].array_size; i++)
-            desc[i].sampler = layout->binding[b].immutable_samplers[i];
-      }
-      desc += layout->binding[b].array_size;
-   }
-
-   *out_set = set;
-
-   return VK_SUCCESS;
-}
-
-void
-anv_descriptor_set_destroy(struct anv_device *device,
-                           struct anv_descriptor_set *set)
-{
-   anv_device_free(device, set);
-}
-
-VkResult anv_AllocDescriptorSets(
-    VkDevice                                    _device,
-    VkDescriptorPool                            descriptorPool,
-    VkDescriptorSetUsage                        setUsage,
-    uint32_t                                    count,
-    const VkDescriptorSetLayout*                pSetLayouts,
-    VkDescriptorSet*                            pDescriptorSets)
-{
-   ANV_FROM_HANDLE(anv_device, device, _device);
-
-   VkResult result = VK_SUCCESS;
-   struct anv_descriptor_set *set;
-   uint32_t i;
-
-   for (i = 0; i < count; i++) {
-      ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
-
-      result = anv_descriptor_set_create(device, layout, &set);
-      if (result != VK_SUCCESS)
-         break;
-
-      pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
-   }
-
-   if (result != VK_SUCCESS)
-      anv_FreeDescriptorSets(_device, descriptorPool, i, pDescriptorSets);
-
-   return result;
-}
-
-VkResult anv_FreeDescriptorSets(
-    VkDevice                                    _device,
-    VkDescriptorPool                            descriptorPool,
-    uint32_t                                    count,
-    const VkDescriptorSet*                      pDescriptorSets)
-{
-   ANV_FROM_HANDLE(anv_device, device, _device);
-
-   for (uint32_t i = 0; i < count; i++) {
-      ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
-
-      anv_descriptor_set_destroy(device, set);
-   }
-
-   return VK_SUCCESS;
-}
-
-void anv_UpdateDescriptorSets(
-    VkDevice                                    device,
-    uint32_t                                    writeCount,
-    const VkWriteDescriptorSet*                 pDescriptorWrites,
-    uint32_t                                    copyCount,
-    const VkCopyDescriptorSet*                  pDescriptorCopies)
-{
-   for (uint32_t i = 0; i < writeCount; i++) {
-      const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
-      ANV_FROM_HANDLE(anv_descriptor_set, set, write->destSet);
-      const struct anv_descriptor_set_binding_layout *bind_layout =
-         &set->layout->binding[write->destBinding];
-      struct anv_descriptor *desc =
-         &set->descriptors[bind_layout->descriptor_index];
-
-      switch (write->descriptorType) {
-      case VK_DESCRIPTOR_TYPE_SAMPLER:
-         for (uint32_t j = 0; j < write->count; j++) {
-            ANV_FROM_HANDLE(anv_sampler, sampler,
-                            write->pDescriptors[j].sampler);
-
-            desc[j] = (struct anv_descriptor) {
-               .type = ANV_DESCRIPTOR_TYPE_SAMPLER,
-               .sampler = sampler,
-            };
-         }
-         break;
-
-      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
-         for (uint32_t j = 0; j < write->count; j++) {
-            ANV_FROM_HANDLE(anv_image_view, iview,
-                            write->pDescriptors[j].imageView);
-            ANV_FROM_HANDLE(anv_sampler, sampler,
-                            write->pDescriptors[j].sampler);
-
-            desc[j].type = ANV_DESCRIPTOR_TYPE_IMAGE_VIEW_AND_SAMPLER;
-            desc[j].image_view = iview;
-
-            /* If this descriptor has an immutable sampler, we don't want
-             * to stomp on it.
-             */
-            if (sampler)
-               desc->sampler = sampler;
-         }
-         break;
-
-      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
-      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
-         for (uint32_t j = 0; j < write->count; j++) {
-            ANV_FROM_HANDLE(anv_image_view, iview,
-                            write->pDescriptors[j].imageView);
-
-            desc[j] = (struct anv_descriptor) {
-               .type = ANV_DESCRIPTOR_TYPE_IMAGE_VIEW,
-               .image_view = iview,
-            };
-         }
-         break;
-
-      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
-      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
-         anv_finishme("texel buffers not implemented");
-         break;
-
-      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
-         anv_finishme("input attachments not implemented");
-         break;
-
-      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
-      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
-      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
-      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
-         for (uint32_t j = 0; j < write->count; j++) {
-            if (write->pDescriptors[j].bufferView.handle) {
-               ANV_FROM_HANDLE(anv_buffer_view, bview,
-                               write->pDescriptors[j].bufferView);
-
-               desc[j] = (struct anv_descriptor) {
-                  .type = ANV_DESCRIPTOR_TYPE_BUFFER_VIEW,
-                  .buffer_view = bview,
-               };
-            } else {
-               ANV_FROM_HANDLE(anv_buffer, buffer,
-                               write->pDescriptors[j].bufferInfo.buffer);
-               assert(buffer);
-
-               desc[j] = (struct anv_descriptor) {
-                  .type = ANV_DESCRIPTOR_TYPE_BUFFER_AND_OFFSET,
-                  .buffer = buffer,
-                  .offset = write->pDescriptors[j].bufferInfo.offset,
-                  .range = write->pDescriptors[j].bufferInfo.range,
-               };
-            }
-         }
-
-      default:
-         break;
-      }
-   }
-
-   for (uint32_t i = 0; i < copyCount; i++) {
-      const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
-      ANV_FROM_HANDLE(anv_descriptor_set, src, copy->destSet);
-      ANV_FROM_HANDLE(anv_descriptor_set, dest, copy->destSet);
-      for (uint32_t j = 0; j < copy->count; j++) {
-         dest->descriptors[copy->destBinding + j] =
-            src->descriptors[copy->srcBinding + j];
-      }
-   }
-}
-
 VkResult anv_CreateFramebuffer(
     VkDevice                                    _device,
     const VkFramebufferCreateInfo*              pCreateInfo,