2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "anv_private.h"
31 #include "mesa/main/git_sha1.h"
32 #include "util/strtod.h"
35 anv_env_get_int(const char *name
)
37 const char *val
= getenv(name
);
42 return strtol(val
, NULL
, 0);
46 anv_physical_device_init(struct anv_physical_device
*device
,
47 struct anv_instance
*instance
,
52 fd
= open(path
, O_RDWR
| O_CLOEXEC
);
54 return vk_error(VK_ERROR_UNAVAILABLE
);
56 device
->instance
= instance
;
59 device
->chipset_id
= anv_env_get_int("INTEL_DEVID_OVERRIDE");
60 device
->no_hw
= false;
61 if (device
->chipset_id
) {
62 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
65 device
->chipset_id
= anv_gem_get_param(fd
, I915_PARAM_CHIPSET_ID
);
67 if (!device
->chipset_id
)
70 device
->name
= brw_get_device_name(device
->chipset_id
);
71 device
->info
= brw_get_device_info(device
->chipset_id
, -1);
75 if (anv_gem_get_aperture(fd
, &device
->aperture_size
) == -1)
78 if (!anv_gem_get_param(fd
, I915_PARAM_HAS_WAIT_TIMEOUT
))
81 if (!anv_gem_get_param(fd
, I915_PARAM_HAS_EXECBUF2
))
84 if (!anv_gem_get_param(fd
, I915_PARAM_HAS_LLC
))
87 if (!anv_gem_get_param(fd
, I915_PARAM_HAS_EXEC_CONSTANTS
))
96 return vk_error(VK_ERROR_UNAVAILABLE
);
99 static void *default_alloc(
103 VkSystemAllocType allocType
)
108 static void default_free(
115 static const VkAllocCallbacks default_alloc_callbacks
= {
117 .pfnAlloc
= default_alloc
,
118 .pfnFree
= default_free
121 VkResult
anv_CreateInstance(
122 const VkInstanceCreateInfo
* pCreateInfo
,
123 VkInstance
* pInstance
)
125 struct anv_instance
*instance
;
126 const VkAllocCallbacks
*alloc_callbacks
= &default_alloc_callbacks
;
127 void *user_data
= NULL
;
129 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
);
131 if (pCreateInfo
->pAllocCb
) {
132 alloc_callbacks
= pCreateInfo
->pAllocCb
;
133 user_data
= pCreateInfo
->pAllocCb
->pUserData
;
135 instance
= alloc_callbacks
->pfnAlloc(user_data
, sizeof(*instance
), 8,
136 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
138 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
140 instance
->pAllocUserData
= alloc_callbacks
->pUserData
;
141 instance
->pfnAlloc
= alloc_callbacks
->pfnAlloc
;
142 instance
->pfnFree
= alloc_callbacks
->pfnFree
;
143 instance
->apiVersion
= pCreateInfo
->pAppInfo
->apiVersion
;
144 instance
->physicalDeviceCount
= 0;
148 VG(VALGRIND_CREATE_MEMPOOL(instance
, 0, false));
150 *pInstance
= anv_instance_to_handle(instance
);
155 VkResult
anv_DestroyInstance(
156 VkInstance _instance
)
158 ANV_FROM_HANDLE(anv_instance
, instance
, _instance
);
160 VG(VALGRIND_DESTROY_MEMPOOL(instance
));
164 instance
->pfnFree(instance
->pAllocUserData
, instance
);
170 anv_instance_alloc(struct anv_instance
*instance
, size_t size
,
171 size_t alignment
, VkSystemAllocType allocType
)
173 void *mem
= instance
->pfnAlloc(instance
->pAllocUserData
,
174 size
, alignment
, allocType
);
176 VALGRIND_MEMPOOL_ALLOC(instance
, mem
, size
);
177 VALGRIND_MAKE_MEM_UNDEFINED(mem
, size
);
183 anv_instance_free(struct anv_instance
*instance
, void *mem
)
188 VALGRIND_MEMPOOL_FREE(instance
, mem
);
190 instance
->pfnFree(instance
->pAllocUserData
, mem
);
193 VkResult
anv_EnumeratePhysicalDevices(
194 VkInstance _instance
,
195 uint32_t* pPhysicalDeviceCount
,
196 VkPhysicalDevice
* pPhysicalDevices
)
198 ANV_FROM_HANDLE(anv_instance
, instance
, _instance
);
201 if (instance
->physicalDeviceCount
== 0) {
202 result
= anv_physical_device_init(&instance
->physicalDevice
,
203 instance
, "/dev/dri/renderD128");
204 if (result
!= VK_SUCCESS
)
207 instance
->physicalDeviceCount
= 1;
210 /* pPhysicalDeviceCount is an out parameter if pPhysicalDevices is NULL;
211 * otherwise it's an inout parameter.
213 * The Vulkan spec (git aaed022) says:
215 * pPhysicalDeviceCount is a pointer to an unsigned integer variable
216 * that is initialized with the number of devices the application is
217 * prepared to receive handles to. pname:pPhysicalDevices is pointer to
218 * an array of at least this many VkPhysicalDevice handles [...].
220 * Upon success, if pPhysicalDevices is NULL, vkEnumeratePhysicalDevices
221 * overwrites the contents of the variable pointed to by
222 * pPhysicalDeviceCount with the number of physical devices in in the
223 * instance; otherwise, vkEnumeratePhysicalDevices overwrites
224 * pPhysicalDeviceCount with the number of physical handles written to
227 if (!pPhysicalDevices
) {
228 *pPhysicalDeviceCount
= instance
->physicalDeviceCount
;
229 } else if (*pPhysicalDeviceCount
>= 1) {
230 pPhysicalDevices
[0] = anv_physical_device_to_handle(&instance
->physicalDevice
);
231 *pPhysicalDeviceCount
= 1;
233 *pPhysicalDeviceCount
= 0;
239 VkResult
anv_GetPhysicalDeviceFeatures(
240 VkPhysicalDevice physicalDevice
,
241 VkPhysicalDeviceFeatures
* pFeatures
)
243 anv_finishme("Get correct values for PhysicalDeviceFeatures");
245 *pFeatures
= (VkPhysicalDeviceFeatures
) {
246 .robustBufferAccess
= false,
247 .fullDrawIndexUint32
= false,
248 .imageCubeArray
= false,
249 .independentBlend
= false,
250 .geometryShader
= true,
251 .tessellationShader
= false,
252 .sampleRateShading
= false,
253 .dualSourceBlend
= true,
255 .instancedDrawIndirect
= true,
257 .depthBiasClamp
= false,
258 .fillModeNonSolid
= true,
259 .depthBounds
= false,
262 .textureCompressionETC2
= true,
263 .textureCompressionASTC_LDR
= true,
264 .textureCompressionBC
= true,
265 .pipelineStatisticsQuery
= true,
266 .vertexSideEffects
= false,
267 .tessellationSideEffects
= false,
268 .geometrySideEffects
= false,
269 .fragmentSideEffects
= false,
270 .shaderTessellationPointSize
= false,
271 .shaderGeometryPointSize
= true,
272 .shaderTextureGatherExtended
= true,
273 .shaderStorageImageExtendedFormats
= false,
274 .shaderStorageImageMultisample
= false,
275 .shaderStorageBufferArrayConstantIndexing
= false,
276 .shaderStorageImageArrayConstantIndexing
= false,
277 .shaderUniformBufferArrayDynamicIndexing
= true,
278 .shaderSampledImageArrayDynamicIndexing
= false,
279 .shaderStorageBufferArrayDynamicIndexing
= false,
280 .shaderStorageImageArrayDynamicIndexing
= false,
281 .shaderClipDistance
= false,
282 .shaderCullDistance
= false,
283 .shaderFloat64
= false,
284 .shaderInt64
= false,
285 .shaderFloat16
= false,
286 .shaderInt16
= false,
292 VkResult
anv_GetPhysicalDeviceLimits(
293 VkPhysicalDevice physicalDevice
,
294 VkPhysicalDeviceLimits
* pLimits
)
296 ANV_FROM_HANDLE(anv_physical_device
, physical_device
, physicalDevice
);
297 const struct brw_device_info
*devinfo
= physical_device
->info
;
299 anv_finishme("Get correct values for PhysicalDeviceLimits");
301 *pLimits
= (VkPhysicalDeviceLimits
) {
302 .maxImageDimension1D
= (1 << 14),
303 .maxImageDimension2D
= (1 << 14),
304 .maxImageDimension3D
= (1 << 10),
305 .maxImageDimensionCube
= (1 << 14),
306 .maxImageArrayLayers
= (1 << 10),
307 .maxTexelBufferSize
= (1 << 14),
308 .maxUniformBufferSize
= UINT32_MAX
,
309 .maxStorageBufferSize
= UINT32_MAX
,
310 .maxPushConstantsSize
= 128,
311 .maxMemoryAllocationCount
= UINT32_MAX
,
312 .bufferImageGranularity
= 64, /* A cache line */
313 .maxBoundDescriptorSets
= MAX_SETS
,
314 .maxDescriptorSets
= UINT32_MAX
,
315 .maxPerStageDescriptorSamplers
= 64,
316 .maxPerStageDescriptorUniformBuffers
= 64,
317 .maxPerStageDescriptorStorageBuffers
= 64,
318 .maxPerStageDescriptorSampledImages
= 64,
319 .maxPerStageDescriptorStorageImages
= 64,
320 .maxDescriptorSetSamplers
= 256,
321 .maxDescriptorSetUniformBuffers
= 256,
322 .maxDescriptorSetStorageBuffers
= 256,
323 .maxDescriptorSetSampledImages
= 256,
324 .maxDescriptorSetStorageImages
= 256,
325 .maxVertexInputAttributes
= 32,
326 .maxVertexInputAttributeOffset
= 256,
327 .maxVertexInputBindingStride
= 256,
328 .maxVertexOutputComponents
= 32,
329 .maxTessGenLevel
= 0,
330 .maxTessPatchSize
= 0,
331 .maxTessControlPerVertexInputComponents
= 0,
332 .maxTessControlPerVertexOutputComponents
= 0,
333 .maxTessControlPerPatchOutputComponents
= 0,
334 .maxTessControlTotalOutputComponents
= 0,
335 .maxTessEvaluationInputComponents
= 0,
336 .maxTessEvaluationOutputComponents
= 0,
337 .maxGeometryShaderInvocations
= 6,
338 .maxGeometryInputComponents
= 16,
339 .maxGeometryOutputComponents
= 16,
340 .maxGeometryOutputVertices
= 16,
341 .maxGeometryTotalOutputComponents
= 16,
342 .maxFragmentInputComponents
= 16,
343 .maxFragmentOutputBuffers
= 8,
344 .maxFragmentDualSourceBuffers
= 2,
345 .maxFragmentCombinedOutputResources
= 8,
346 .maxComputeSharedMemorySize
= 1024,
347 .maxComputeWorkGroupCount
= {
348 16 * devinfo
->max_cs_threads
,
349 16 * devinfo
->max_cs_threads
,
350 16 * devinfo
->max_cs_threads
,
352 .maxComputeWorkGroupInvocations
= 16 * devinfo
->max_cs_threads
,
353 .maxComputeWorkGroupSize
= {
354 16 * devinfo
->max_cs_threads
,
355 16 * devinfo
->max_cs_threads
,
356 16 * devinfo
->max_cs_threads
,
358 .subPixelPrecisionBits
= 4 /* FIXME */,
359 .subTexelPrecisionBits
= 4 /* FIXME */,
360 .mipmapPrecisionBits
= 4 /* FIXME */,
361 .maxDrawIndexedIndexValue
= UINT32_MAX
,
362 .maxDrawIndirectInstanceCount
= UINT32_MAX
,
363 .primitiveRestartForPatches
= UINT32_MAX
,
364 .maxSamplerLodBias
= 16,
365 .maxSamplerAnisotropy
= 16,
367 .maxDynamicViewportStates
= UINT32_MAX
,
368 .maxViewportDimensions
= { (1 << 14), (1 << 14) },
369 .viewportBoundsRange
= { -1.0, 1.0 }, /* FIXME */
370 .viewportSubPixelBits
= 13, /* We take a float? */
371 .minMemoryMapAlignment
= 64, /* A cache line */
372 .minTexelBufferOffsetAlignment
= 1,
373 .minUniformBufferOffsetAlignment
= 1,
374 .minStorageBufferOffsetAlignment
= 1,
375 .minTexelOffset
= 0, /* FIXME */
376 .maxTexelOffset
= 0, /* FIXME */
377 .minTexelGatherOffset
= 0, /* FIXME */
378 .maxTexelGatherOffset
= 0, /* FIXME */
379 .minInterpolationOffset
= 0, /* FIXME */
380 .maxInterpolationOffset
= 0, /* FIXME */
381 .subPixelInterpolationOffsetBits
= 0, /* FIXME */
382 .maxFramebufferWidth
= (1 << 14),
383 .maxFramebufferHeight
= (1 << 14),
384 .maxFramebufferLayers
= (1 << 10),
385 .maxFramebufferColorSamples
= 8,
386 .maxFramebufferDepthSamples
= 8,
387 .maxFramebufferStencilSamples
= 8,
388 .maxColorAttachments
= MAX_RTS
,
389 .maxSampledImageColorSamples
= 8,
390 .maxSampledImageDepthSamples
= 8,
391 .maxSampledImageIntegerSamples
= 1,
392 .maxStorageImageSamples
= 1,
393 .maxSampleMaskWords
= 1,
394 .timestampFrequency
= 1000 * 1000 * 1000 / 80,
395 .maxClipDistances
= 0 /* FIXME */,
396 .maxCullDistances
= 0 /* FIXME */,
397 .maxCombinedClipAndCullDistances
= 0 /* FIXME */,
398 .pointSizeRange
= { 0.125, 255.875 },
399 .lineWidthRange
= { 0.0, 7.9921875 },
400 .pointSizeGranularity
= (1.0 / 8.0),
401 .lineWidthGranularity
= (1.0 / 128.0),
407 VkResult
anv_GetPhysicalDeviceProperties(
408 VkPhysicalDevice physicalDevice
,
409 VkPhysicalDeviceProperties
* pProperties
)
411 ANV_FROM_HANDLE(anv_physical_device
, pdevice
, physicalDevice
);
413 *pProperties
= (VkPhysicalDeviceProperties
) {
414 .apiVersion
= VK_MAKE_VERSION(0, 138, 1),
417 .deviceId
= pdevice
->chipset_id
,
418 .deviceType
= VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
,
421 strcpy(pProperties
->deviceName
, pdevice
->name
);
422 snprintf((char *)pProperties
->pipelineCacheUUID
, VK_UUID_LENGTH
,
423 "anv-%s", MESA_GIT_SHA1
+ 4);
428 VkResult
anv_GetPhysicalDeviceQueueCount(
429 VkPhysicalDevice physicalDevice
,
437 VkResult
anv_GetPhysicalDeviceQueueProperties(
438 VkPhysicalDevice physicalDevice
,
440 VkPhysicalDeviceQueueProperties
* pQueueProperties
)
444 *pQueueProperties
= (VkPhysicalDeviceQueueProperties
) {
445 .queueFlags
= VK_QUEUE_GRAPHICS_BIT
|
446 VK_QUEUE_COMPUTE_BIT
|
449 .supportsTimestamps
= true,
455 VkResult
anv_GetPhysicalDeviceMemoryProperties(
456 VkPhysicalDevice physicalDevice
,
457 VkPhysicalDeviceMemoryProperties
* pMemoryProperties
)
459 ANV_FROM_HANDLE(anv_physical_device
, physical_device
, physicalDevice
);
460 VkDeviceSize heap_size
;
462 /* Reserve some wiggle room for the driver by exposing only 75% of the
463 * aperture to the heap.
465 heap_size
= 3 * physical_device
->aperture_size
/ 4;
467 /* The property flags below are valid only for llc platforms. */
468 pMemoryProperties
->memoryTypeCount
= 1;
469 pMemoryProperties
->memoryTypes
[0] = (VkMemoryType
) {
470 .propertyFlags
= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
,
474 pMemoryProperties
->memoryHeapCount
= 1;
475 pMemoryProperties
->memoryHeaps
[0] = (VkMemoryHeap
) {
477 .flags
= VK_MEMORY_HEAP_HOST_LOCAL
,
483 PFN_vkVoidFunction
anv_GetInstanceProcAddr(
487 return anv_lookup_entrypoint(pName
);
490 PFN_vkVoidFunction
anv_GetDeviceProcAddr(
494 return anv_lookup_entrypoint(pName
);
498 parse_debug_flags(struct anv_device
*device
)
500 const char *debug
, *p
, *end
;
502 debug
= getenv("INTEL_DEBUG");
503 device
->dump_aub
= false;
505 for (p
= debug
; *p
; p
= end
+ 1) {
506 end
= strchrnul(p
, ',');
507 if (end
- p
== 3 && memcmp(p
, "aub", 3) == 0)
508 device
->dump_aub
= true;
509 if (end
- p
== 5 && memcmp(p
, "no_hw", 5) == 0)
510 device
->no_hw
= true;
518 anv_queue_init(struct anv_device
*device
, struct anv_queue
*queue
)
520 queue
->device
= device
;
521 queue
->pool
= &device
->surface_state_pool
;
523 queue
->completed_serial
= anv_state_pool_alloc(queue
->pool
, 4, 4);
524 if (queue
->completed_serial
.map
== NULL
)
525 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY
);
527 *(uint32_t *)queue
->completed_serial
.map
= 0;
528 queue
->next_serial
= 1;
534 anv_queue_finish(struct anv_queue
*queue
)
537 /* This gets torn down with the device so we only need to do this if
538 * valgrind is present.
540 anv_state_pool_free(queue
->pool
, queue
->completed_serial
);
545 anv_device_init_border_colors(struct anv_device
*device
)
547 static const VkClearColorValue border_colors
[] = {
548 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK
] = { .f32
= { 0.0, 0.0, 0.0, 0.0 } },
549 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK
] = { .f32
= { 0.0, 0.0, 0.0, 1.0 } },
550 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE
] = { .f32
= { 1.0, 1.0, 1.0, 1.0 } },
551 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK
] = { .u32
= { 0, 0, 0, 0 } },
552 [VK_BORDER_COLOR_INT_OPAQUE_BLACK
] = { .u32
= { 0, 0, 0, 1 } },
553 [VK_BORDER_COLOR_INT_OPAQUE_WHITE
] = { .u32
= { 1, 1, 1, 1 } },
556 device
->border_colors
=
557 anv_state_pool_alloc(&device
->dynamic_state_pool
,
558 sizeof(border_colors
), 32);
559 memcpy(device
->border_colors
.map
, border_colors
, sizeof(border_colors
));
562 VkResult
anv_CreateDevice(
563 VkPhysicalDevice physicalDevice
,
564 const VkDeviceCreateInfo
* pCreateInfo
,
567 ANV_FROM_HANDLE(anv_physical_device
, physical_device
, physicalDevice
);
568 struct anv_instance
*instance
= physical_device
->instance
;
569 struct anv_device
*device
;
571 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO
);
573 device
= anv_instance_alloc(instance
, sizeof(*device
), 8,
574 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
576 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
578 device
->no_hw
= physical_device
->no_hw
;
579 parse_debug_flags(device
);
581 device
->instance
= physical_device
->instance
;
583 /* XXX(chadv): Can we dup() physicalDevice->fd here? */
584 device
->fd
= open(physical_device
->path
, O_RDWR
| O_CLOEXEC
);
585 if (device
->fd
== -1)
588 device
->context_id
= anv_gem_create_context(device
);
589 if (device
->context_id
== -1)
592 anv_bo_pool_init(&device
->batch_bo_pool
, device
, ANV_CMD_BUFFER_BATCH_SIZE
);
594 anv_block_pool_init(&device
->dynamic_state_block_pool
, device
, 2048);
596 anv_state_pool_init(&device
->dynamic_state_pool
,
597 &device
->dynamic_state_block_pool
);
599 anv_block_pool_init(&device
->instruction_block_pool
, device
, 2048);
600 anv_block_pool_init(&device
->surface_state_block_pool
, device
, 2048);
602 anv_state_pool_init(&device
->surface_state_pool
,
603 &device
->surface_state_block_pool
);
605 anv_block_pool_init(&device
->scratch_block_pool
, device
, 0x10000);
607 device
->info
= *physical_device
->info
;
609 device
->compiler
= anv_compiler_create(device
);
610 device
->aub_writer
= NULL
;
612 pthread_mutex_init(&device
->mutex
, NULL
);
614 anv_queue_init(device
, &device
->queue
);
616 anv_device_init_meta(device
);
618 anv_device_init_border_colors(device
);
620 *pDevice
= anv_device_to_handle(device
);
627 anv_device_free(device
, device
);
629 return vk_error(VK_ERROR_UNAVAILABLE
);
632 VkResult
anv_DestroyDevice(
635 ANV_FROM_HANDLE(anv_device
, device
, _device
);
637 anv_compiler_destroy(device
->compiler
);
639 anv_queue_finish(&device
->queue
);
641 anv_device_finish_meta(device
);
644 /* We only need to free these to prevent valgrind errors. The backing
645 * BO will go away in a couple of lines so we don't actually leak.
647 anv_state_pool_free(&device
->dynamic_state_pool
, device
->border_colors
);
650 anv_bo_pool_finish(&device
->batch_bo_pool
);
651 anv_state_pool_finish(&device
->dynamic_state_pool
);
652 anv_block_pool_finish(&device
->dynamic_state_block_pool
);
653 anv_block_pool_finish(&device
->instruction_block_pool
);
654 anv_state_pool_finish(&device
->surface_state_pool
);
655 anv_block_pool_finish(&device
->surface_state_block_pool
);
656 anv_block_pool_finish(&device
->scratch_block_pool
);
660 if (device
->aub_writer
)
661 anv_aub_writer_destroy(device
->aub_writer
);
663 anv_instance_free(device
->instance
, device
);
668 static const VkExtensionProperties global_extensions
[] = {
670 .extName
= "VK_WSI_LunarG",
675 VkResult
anv_GetGlobalExtensionProperties(
676 const char* pLayerName
,
678 VkExtensionProperties
* pProperties
)
680 if (pProperties
== NULL
) {
681 *pCount
= ARRAY_SIZE(global_extensions
);
685 assert(*pCount
< ARRAY_SIZE(global_extensions
));
687 *pCount
= ARRAY_SIZE(global_extensions
);
688 memcpy(pProperties
, global_extensions
, sizeof(global_extensions
));
693 VkResult
anv_GetPhysicalDeviceExtensionProperties(
694 VkPhysicalDevice physicalDevice
,
695 const char* pLayerName
,
697 VkExtensionProperties
* pProperties
)
699 if (pProperties
== NULL
) {
704 /* None supported at this time */
705 return vk_error(VK_ERROR_INVALID_EXTENSION
);
708 VkResult
anv_GetGlobalLayerProperties(
710 VkLayerProperties
* pProperties
)
712 if (pProperties
== NULL
) {
717 /* None supported at this time */
718 return vk_error(VK_ERROR_INVALID_LAYER
);
721 VkResult
anv_GetPhysicalDeviceLayerProperties(
722 VkPhysicalDevice physicalDevice
,
724 VkLayerProperties
* pProperties
)
726 if (pProperties
== NULL
) {
731 /* None supported at this time */
732 return vk_error(VK_ERROR_INVALID_LAYER
);
735 VkResult
anv_GetDeviceQueue(
737 uint32_t queueNodeIndex
,
741 ANV_FROM_HANDLE(anv_device
, device
, _device
);
743 assert(queueIndex
== 0);
745 *pQueue
= anv_queue_to_handle(&device
->queue
);
750 VkResult
anv_QueueSubmit(
752 uint32_t cmdBufferCount
,
753 const VkCmdBuffer
* pCmdBuffers
,
756 ANV_FROM_HANDLE(anv_queue
, queue
, _queue
);
757 ANV_FROM_HANDLE(anv_fence
, fence
, _fence
);
758 struct anv_device
*device
= queue
->device
;
761 for (uint32_t i
= 0; i
< cmdBufferCount
; i
++) {
762 ANV_FROM_HANDLE(anv_cmd_buffer
, cmd_buffer
, pCmdBuffers
[i
]);
764 assert(cmd_buffer
->level
== VK_CMD_BUFFER_LEVEL_PRIMARY
);
766 if (device
->dump_aub
)
767 anv_cmd_buffer_dump(cmd_buffer
);
769 if (!device
->no_hw
) {
770 ret
= anv_gem_execbuffer(device
, &cmd_buffer
->execbuf2
.execbuf
);
772 return vk_error(VK_ERROR_UNKNOWN
);
775 ret
= anv_gem_execbuffer(device
, &fence
->execbuf
);
777 return vk_error(VK_ERROR_UNKNOWN
);
780 for (uint32_t i
= 0; i
< cmd_buffer
->execbuf2
.bo_count
; i
++)
781 cmd_buffer
->execbuf2
.bos
[i
]->offset
= cmd_buffer
->execbuf2
.objects
[i
].offset
;
783 *(uint32_t *)queue
->completed_serial
.map
= cmd_buffer
->serial
;
790 VkResult
anv_QueueWaitIdle(
793 ANV_FROM_HANDLE(anv_queue
, queue
, _queue
);
795 return vkDeviceWaitIdle(anv_device_to_handle(queue
->device
));
798 VkResult
anv_DeviceWaitIdle(
801 ANV_FROM_HANDLE(anv_device
, device
, _device
);
802 struct anv_state state
;
803 struct anv_batch batch
;
804 struct drm_i915_gem_execbuffer2 execbuf
;
805 struct drm_i915_gem_exec_object2 exec2_objects
[1];
806 struct anv_bo
*bo
= NULL
;
811 state
= anv_state_pool_alloc(&device
->dynamic_state_pool
, 32, 32);
812 bo
= &device
->dynamic_state_pool
.block_pool
->bo
;
813 batch
.start
= batch
.next
= state
.map
;
814 batch
.end
= state
.map
+ 32;
815 anv_batch_emit(&batch
, GEN8_MI_BATCH_BUFFER_END
);
816 anv_batch_emit(&batch
, GEN8_MI_NOOP
);
818 exec2_objects
[0].handle
= bo
->gem_handle
;
819 exec2_objects
[0].relocation_count
= 0;
820 exec2_objects
[0].relocs_ptr
= 0;
821 exec2_objects
[0].alignment
= 0;
822 exec2_objects
[0].offset
= bo
->offset
;
823 exec2_objects
[0].flags
= 0;
824 exec2_objects
[0].rsvd1
= 0;
825 exec2_objects
[0].rsvd2
= 0;
827 execbuf
.buffers_ptr
= (uintptr_t) exec2_objects
;
828 execbuf
.buffer_count
= 1;
829 execbuf
.batch_start_offset
= state
.offset
;
830 execbuf
.batch_len
= batch
.next
- state
.map
;
831 execbuf
.cliprects_ptr
= 0;
832 execbuf
.num_cliprects
= 0;
837 I915_EXEC_HANDLE_LUT
| I915_EXEC_NO_RELOC
| I915_EXEC_RENDER
;
838 execbuf
.rsvd1
= device
->context_id
;
841 if (!device
->no_hw
) {
842 ret
= anv_gem_execbuffer(device
, &execbuf
);
844 result
= vk_error(VK_ERROR_UNKNOWN
);
849 ret
= anv_gem_wait(device
, bo
->gem_handle
, &timeout
);
851 result
= vk_error(VK_ERROR_UNKNOWN
);
856 anv_state_pool_free(&device
->dynamic_state_pool
, state
);
861 anv_state_pool_free(&device
->dynamic_state_pool
, state
);
867 anv_device_alloc(struct anv_device
* device
,
870 VkSystemAllocType allocType
)
872 return anv_instance_alloc(device
->instance
, size
, alignment
, allocType
);
876 anv_device_free(struct anv_device
* device
,
879 anv_instance_free(device
->instance
, mem
);
883 anv_bo_init_new(struct anv_bo
*bo
, struct anv_device
*device
, uint64_t size
)
885 bo
->gem_handle
= anv_gem_create(device
, size
);
887 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY
);
897 VkResult
anv_AllocMemory(
899 const VkMemoryAllocInfo
* pAllocInfo
,
900 VkDeviceMemory
* pMem
)
902 ANV_FROM_HANDLE(anv_device
, device
, _device
);
903 struct anv_device_memory
*mem
;
906 assert(pAllocInfo
->sType
== VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO
);
908 if (pAllocInfo
->memoryTypeIndex
!= 0) {
909 /* We support exactly one memory heap. */
910 return vk_error(VK_ERROR_INVALID_VALUE
);
913 /* FINISHME: Fail if allocation request exceeds heap size. */
915 mem
= anv_device_alloc(device
, sizeof(*mem
), 8,
916 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
918 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
920 result
= anv_bo_init_new(&mem
->bo
, device
, pAllocInfo
->allocationSize
);
921 if (result
!= VK_SUCCESS
)
924 *pMem
= anv_device_memory_to_handle(mem
);
929 anv_device_free(device
, mem
);
934 VkResult
anv_FreeMemory(
938 ANV_FROM_HANDLE(anv_device
, device
, _device
);
939 ANV_FROM_HANDLE(anv_device_memory
, mem
, _mem
);
942 anv_gem_munmap(mem
->bo
.map
, mem
->bo
.size
);
944 if (mem
->bo
.gem_handle
!= 0)
945 anv_gem_close(device
, mem
->bo
.gem_handle
);
947 anv_device_free(device
, mem
);
952 VkResult
anv_MapMemory(
957 VkMemoryMapFlags flags
,
960 ANV_FROM_HANDLE(anv_device
, device
, _device
);
961 ANV_FROM_HANDLE(anv_device_memory
, mem
, _mem
);
963 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
964 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
965 * at a time is valid. We could just mmap up front and return an offset
966 * pointer here, but that may exhaust virtual memory on 32 bit
969 mem
->map
= anv_gem_mmap(device
, mem
->bo
.gem_handle
, offset
, size
);
970 mem
->map_size
= size
;
977 VkResult
anv_UnmapMemory(
981 ANV_FROM_HANDLE(anv_device_memory
, mem
, _mem
);
983 anv_gem_munmap(mem
->map
, mem
->map_size
);
988 VkResult
anv_FlushMappedMemoryRanges(
990 uint32_t memRangeCount
,
991 const VkMappedMemoryRange
* pMemRanges
)
993 /* clflush here for !llc platforms */
998 VkResult
anv_InvalidateMappedMemoryRanges(
1000 uint32_t memRangeCount
,
1001 const VkMappedMemoryRange
* pMemRanges
)
1003 return anv_FlushMappedMemoryRanges(device
, memRangeCount
, pMemRanges
);
1006 VkResult
anv_GetBufferMemoryRequirements(
1009 VkMemoryRequirements
* pMemoryRequirements
)
1011 ANV_FROM_HANDLE(anv_buffer
, buffer
, _buffer
);
1013 /* The Vulkan spec (git aaed022) says:
1015 * memoryTypeBits is a bitfield and contains one bit set for every
1016 * supported memory type for the resource. The bit `1<<i` is set if and
1017 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1018 * structure for the physical device is supported.
1020 * We support exactly one memory type.
1022 pMemoryRequirements
->memoryTypeBits
= 1;
1024 pMemoryRequirements
->size
= buffer
->size
;
1025 pMemoryRequirements
->alignment
= 16;
1030 VkResult
anv_GetImageMemoryRequirements(
1033 VkMemoryRequirements
* pMemoryRequirements
)
1035 ANV_FROM_HANDLE(anv_image
, image
, _image
);
1037 /* The Vulkan spec (git aaed022) says:
1039 * memoryTypeBits is a bitfield and contains one bit set for every
1040 * supported memory type for the resource. The bit `1<<i` is set if and
1041 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1042 * structure for the physical device is supported.
1044 * We support exactly one memory type.
1046 pMemoryRequirements
->memoryTypeBits
= 1;
1048 pMemoryRequirements
->size
= image
->size
;
1049 pMemoryRequirements
->alignment
= image
->alignment
;
1054 VkResult
anv_GetImageSparseMemoryRequirements(
1057 uint32_t* pNumRequirements
,
1058 VkSparseImageMemoryRequirements
* pSparseMemoryRequirements
)
1060 return vk_error(VK_UNSUPPORTED
);
1063 VkResult
anv_GetDeviceMemoryCommitment(
1065 VkDeviceMemory memory
,
1066 VkDeviceSize
* pCommittedMemoryInBytes
)
1068 *pCommittedMemoryInBytes
= 0;
1069 stub_return(VK_SUCCESS
);
1072 VkResult
anv_BindBufferMemory(
1075 VkDeviceMemory _mem
,
1076 VkDeviceSize memOffset
)
1078 ANV_FROM_HANDLE(anv_device_memory
, mem
, _mem
);
1079 ANV_FROM_HANDLE(anv_buffer
, buffer
, _buffer
);
1081 buffer
->bo
= &mem
->bo
;
1082 buffer
->offset
= memOffset
;
1087 VkResult
anv_BindImageMemory(
1090 VkDeviceMemory _mem
,
1091 VkDeviceSize memOffset
)
1093 ANV_FROM_HANDLE(anv_device_memory
, mem
, _mem
);
1094 ANV_FROM_HANDLE(anv_image
, image
, _image
);
1096 image
->bo
= &mem
->bo
;
1097 image
->offset
= memOffset
;
1102 VkResult
anv_QueueBindSparseBufferMemory(
1105 uint32_t numBindings
,
1106 const VkSparseMemoryBindInfo
* pBindInfo
)
1108 stub_return(VK_UNSUPPORTED
);
1111 VkResult
anv_QueueBindSparseImageOpaqueMemory(
1114 uint32_t numBindings
,
1115 const VkSparseMemoryBindInfo
* pBindInfo
)
1117 stub_return(VK_UNSUPPORTED
);
1120 VkResult
anv_QueueBindSparseImageMemory(
1123 uint32_t numBindings
,
1124 const VkSparseImageMemoryBindInfo
* pBindInfo
)
1126 stub_return(VK_UNSUPPORTED
);
1129 VkResult
anv_CreateFence(
1131 const VkFenceCreateInfo
* pCreateInfo
,
1134 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1135 struct anv_fence
*fence
;
1136 struct anv_batch batch
;
1139 const uint32_t fence_size
= 128;
1141 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_FENCE_CREATE_INFO
);
1143 fence
= anv_device_alloc(device
, sizeof(*fence
), 8,
1144 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1146 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1148 result
= anv_bo_init_new(&fence
->bo
, device
, fence_size
);
1149 if (result
!= VK_SUCCESS
)
1153 anv_gem_mmap(device
, fence
->bo
.gem_handle
, 0, fence
->bo
.size
);
1154 batch
.next
= batch
.start
= fence
->bo
.map
;
1155 batch
.end
= fence
->bo
.map
+ fence
->bo
.size
;
1156 anv_batch_emit(&batch
, GEN8_MI_BATCH_BUFFER_END
);
1157 anv_batch_emit(&batch
, GEN8_MI_NOOP
);
1159 fence
->exec2_objects
[0].handle
= fence
->bo
.gem_handle
;
1160 fence
->exec2_objects
[0].relocation_count
= 0;
1161 fence
->exec2_objects
[0].relocs_ptr
= 0;
1162 fence
->exec2_objects
[0].alignment
= 0;
1163 fence
->exec2_objects
[0].offset
= fence
->bo
.offset
;
1164 fence
->exec2_objects
[0].flags
= 0;
1165 fence
->exec2_objects
[0].rsvd1
= 0;
1166 fence
->exec2_objects
[0].rsvd2
= 0;
1168 fence
->execbuf
.buffers_ptr
= (uintptr_t) fence
->exec2_objects
;
1169 fence
->execbuf
.buffer_count
= 1;
1170 fence
->execbuf
.batch_start_offset
= 0;
1171 fence
->execbuf
.batch_len
= batch
.next
- fence
->bo
.map
;
1172 fence
->execbuf
.cliprects_ptr
= 0;
1173 fence
->execbuf
.num_cliprects
= 0;
1174 fence
->execbuf
.DR1
= 0;
1175 fence
->execbuf
.DR4
= 0;
1177 fence
->execbuf
.flags
=
1178 I915_EXEC_HANDLE_LUT
| I915_EXEC_NO_RELOC
| I915_EXEC_RENDER
;
1179 fence
->execbuf
.rsvd1
= device
->context_id
;
1180 fence
->execbuf
.rsvd2
= 0;
1182 *pFence
= anv_fence_to_handle(fence
);
1187 anv_device_free(device
, fence
);
1192 VkResult
anv_DestroyFence(
1196 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1197 ANV_FROM_HANDLE(anv_fence
, fence
, _fence
);
1199 anv_gem_munmap(fence
->bo
.map
, fence
->bo
.size
);
1200 anv_gem_close(device
, fence
->bo
.gem_handle
);
1201 anv_device_free(device
, fence
);
1206 VkResult
anv_ResetFences(
1208 uint32_t fenceCount
,
1209 const VkFence
* pFences
)
1211 for (uint32_t i
= 0; i
< fenceCount
; i
++) {
1212 ANV_FROM_HANDLE(anv_fence
, fence
, pFences
[i
]);
1213 fence
->ready
= false;
1219 VkResult
anv_GetFenceStatus(
1223 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1224 ANV_FROM_HANDLE(anv_fence
, fence
, _fence
);
1231 ret
= anv_gem_wait(device
, fence
->bo
.gem_handle
, &t
);
1233 fence
->ready
= true;
1237 return VK_NOT_READY
;
1240 VkResult
anv_WaitForFences(
1242 uint32_t fenceCount
,
1243 const VkFence
* pFences
,
1247 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1248 int64_t t
= timeout
;
1251 /* FIXME: handle !waitAll */
1253 for (uint32_t i
= 0; i
< fenceCount
; i
++) {
1254 ANV_FROM_HANDLE(anv_fence
, fence
, pFences
[i
]);
1255 ret
= anv_gem_wait(device
, fence
->bo
.gem_handle
, &t
);
1256 if (ret
== -1 && errno
== ETIME
)
1259 return vk_error(VK_ERROR_UNKNOWN
);
1265 // Queue semaphore functions
1267 VkResult
anv_CreateSemaphore(
1269 const VkSemaphoreCreateInfo
* pCreateInfo
,
1270 VkSemaphore
* pSemaphore
)
1272 stub_return(VK_UNSUPPORTED
);
1275 VkResult
anv_DestroySemaphore(
1277 VkSemaphore semaphore
)
1279 stub_return(VK_UNSUPPORTED
);
1282 VkResult
anv_QueueSignalSemaphore(
1284 VkSemaphore semaphore
)
1286 stub_return(VK_UNSUPPORTED
);
1289 VkResult
anv_QueueWaitSemaphore(
1291 VkSemaphore semaphore
)
1293 stub_return(VK_UNSUPPORTED
);
1298 VkResult
anv_CreateEvent(
1300 const VkEventCreateInfo
* pCreateInfo
,
1303 stub_return(VK_UNSUPPORTED
);
1306 VkResult
anv_DestroyEvent(
1310 stub_return(VK_UNSUPPORTED
);
1313 VkResult
anv_GetEventStatus(
1317 stub_return(VK_UNSUPPORTED
);
1320 VkResult
anv_SetEvent(
1324 stub_return(VK_UNSUPPORTED
);
1327 VkResult
anv_ResetEvent(
1331 stub_return(VK_UNSUPPORTED
);
1336 VkResult
anv_CreateBuffer(
1338 const VkBufferCreateInfo
* pCreateInfo
,
1341 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1342 struct anv_buffer
*buffer
;
1344 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO
);
1346 buffer
= anv_device_alloc(device
, sizeof(*buffer
), 8,
1347 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1349 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1351 buffer
->size
= pCreateInfo
->size
;
1355 *pBuffer
= anv_buffer_to_handle(buffer
);
1360 VkResult
anv_DestroyBuffer(
1364 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1365 ANV_FROM_HANDLE(anv_buffer
, buffer
, _buffer
);
1367 anv_device_free(device
, buffer
);
1372 // Buffer view functions
1375 anv_fill_buffer_surface_state(void *state
, VkFormat format
,
1376 uint32_t offset
, uint32_t range
)
1378 const struct anv_format
*info
;
1380 info
= anv_format_for_vk_format(format
);
1381 /* This assumes RGBA float format. */
1382 uint32_t stride
= 4;
1383 uint32_t num_elements
= range
/ stride
;
1385 struct GEN8_RENDER_SURFACE_STATE surface_state
= {
1386 .SurfaceType
= SURFTYPE_BUFFER
,
1387 .SurfaceArray
= false,
1388 .SurfaceFormat
= info
->surface_format
,
1389 .SurfaceVerticalAlignment
= VALIGN4
,
1390 .SurfaceHorizontalAlignment
= HALIGN4
,
1392 .VerticalLineStride
= 0,
1393 .VerticalLineStrideOffset
= 0,
1394 .SamplerL2BypassModeDisable
= true,
1395 .RenderCacheReadWriteMode
= WriteOnlyCache
,
1396 .MemoryObjectControlState
= GEN8_MOCS
,
1397 .BaseMipLevel
= 0.0,
1399 .Height
= (num_elements
>> 7) & 0x3fff,
1400 .Width
= num_elements
& 0x7f,
1401 .Depth
= (num_elements
>> 21) & 0x3f,
1402 .SurfacePitch
= stride
- 1,
1403 .MinimumArrayElement
= 0,
1404 .NumberofMultisamples
= MULTISAMPLECOUNT_1
,
1409 .AuxiliarySurfaceMode
= AUX_NONE
,
1411 .GreenClearColor
= 0,
1412 .BlueClearColor
= 0,
1413 .AlphaClearColor
= 0,
1414 .ShaderChannelSelectRed
= SCS_RED
,
1415 .ShaderChannelSelectGreen
= SCS_GREEN
,
1416 .ShaderChannelSelectBlue
= SCS_BLUE
,
1417 .ShaderChannelSelectAlpha
= SCS_ALPHA
,
1418 .ResourceMinLOD
= 0.0,
1419 /* FIXME: We assume that the image must be bound at this time. */
1420 .SurfaceBaseAddress
= { NULL
, offset
},
1423 GEN8_RENDER_SURFACE_STATE_pack(NULL
, state
, &surface_state
);
1426 VkResult
anv_CreateBufferView(
1428 const VkBufferViewCreateInfo
* pCreateInfo
,
1429 VkBufferView
* pView
)
1431 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1432 ANV_FROM_HANDLE(anv_buffer
, buffer
, pCreateInfo
->buffer
);
1433 struct anv_buffer_view
*bview
;
1434 struct anv_surface_view
*view
;
1436 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
);
1438 bview
= anv_device_alloc(device
, sizeof(*view
), 8,
1439 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1441 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1443 view
= &bview
->view
;
1444 view
->bo
= buffer
->bo
;
1445 view
->offset
= buffer
->offset
+ pCreateInfo
->offset
;
1446 view
->surface_state
=
1447 anv_state_pool_alloc(&device
->surface_state_pool
, 64, 64);
1448 view
->format
= pCreateInfo
->format
;
1449 view
->range
= pCreateInfo
->range
;
1451 anv_fill_buffer_surface_state(view
->surface_state
.map
,
1452 pCreateInfo
->format
,
1453 view
->offset
, pCreateInfo
->range
);
1455 *pView
= anv_buffer_view_to_handle(bview
);
1460 VkResult
anv_DestroyBufferView(
1462 VkBufferView _bview
)
1464 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1465 ANV_FROM_HANDLE(anv_buffer_view
, bview
, _bview
);
1467 anv_surface_view_fini(device
, &bview
->view
);
1468 anv_device_free(device
, bview
);
1473 // Sampler functions
1475 VkResult
anv_CreateSampler(
1477 const VkSamplerCreateInfo
* pCreateInfo
,
1478 VkSampler
* pSampler
)
1480 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1481 struct anv_sampler
*sampler
;
1482 uint32_t mag_filter
, min_filter
, max_anisotropy
;
1484 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
);
1486 sampler
= anv_device_alloc(device
, sizeof(*sampler
), 8,
1487 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1489 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1491 static const uint32_t vk_to_gen_tex_filter
[] = {
1492 [VK_TEX_FILTER_NEAREST
] = MAPFILTER_NEAREST
,
1493 [VK_TEX_FILTER_LINEAR
] = MAPFILTER_LINEAR
1496 static const uint32_t vk_to_gen_mipmap_mode
[] = {
1497 [VK_TEX_MIPMAP_MODE_BASE
] = MIPFILTER_NONE
,
1498 [VK_TEX_MIPMAP_MODE_NEAREST
] = MIPFILTER_NEAREST
,
1499 [VK_TEX_MIPMAP_MODE_LINEAR
] = MIPFILTER_LINEAR
1502 static const uint32_t vk_to_gen_tex_address
[] = {
1503 [VK_TEX_ADDRESS_WRAP
] = TCM_WRAP
,
1504 [VK_TEX_ADDRESS_MIRROR
] = TCM_MIRROR
,
1505 [VK_TEX_ADDRESS_CLAMP
] = TCM_CLAMP
,
1506 [VK_TEX_ADDRESS_MIRROR_ONCE
] = TCM_MIRROR_ONCE
,
1507 [VK_TEX_ADDRESS_CLAMP_BORDER
] = TCM_CLAMP_BORDER
,
1510 static const uint32_t vk_to_gen_compare_op
[] = {
1511 [VK_COMPARE_OP_NEVER
] = PREFILTEROPNEVER
,
1512 [VK_COMPARE_OP_LESS
] = PREFILTEROPLESS
,
1513 [VK_COMPARE_OP_EQUAL
] = PREFILTEROPEQUAL
,
1514 [VK_COMPARE_OP_LESS_EQUAL
] = PREFILTEROPLEQUAL
,
1515 [VK_COMPARE_OP_GREATER
] = PREFILTEROPGREATER
,
1516 [VK_COMPARE_OP_NOT_EQUAL
] = PREFILTEROPNOTEQUAL
,
1517 [VK_COMPARE_OP_GREATER_EQUAL
] = PREFILTEROPGEQUAL
,
1518 [VK_COMPARE_OP_ALWAYS
] = PREFILTEROPALWAYS
,
1521 if (pCreateInfo
->maxAnisotropy
> 1) {
1522 mag_filter
= MAPFILTER_ANISOTROPIC
;
1523 min_filter
= MAPFILTER_ANISOTROPIC
;
1524 max_anisotropy
= (pCreateInfo
->maxAnisotropy
- 2) / 2;
1526 mag_filter
= vk_to_gen_tex_filter
[pCreateInfo
->magFilter
];
1527 min_filter
= vk_to_gen_tex_filter
[pCreateInfo
->minFilter
];
1528 max_anisotropy
= RATIO21
;
1531 struct GEN8_SAMPLER_STATE sampler_state
= {
1532 .SamplerDisable
= false,
1533 .TextureBorderColorMode
= DX10OGL
,
1534 .LODPreClampMode
= 0,
1535 .BaseMipLevel
= 0.0,
1536 .MipModeFilter
= vk_to_gen_mipmap_mode
[pCreateInfo
->mipMode
],
1537 .MagModeFilter
= mag_filter
,
1538 .MinModeFilter
= min_filter
,
1539 .TextureLODBias
= pCreateInfo
->mipLodBias
* 256,
1540 .AnisotropicAlgorithm
= EWAApproximation
,
1541 .MinLOD
= pCreateInfo
->minLod
,
1542 .MaxLOD
= pCreateInfo
->maxLod
,
1543 .ChromaKeyEnable
= 0,
1544 .ChromaKeyIndex
= 0,
1546 .ShadowFunction
= vk_to_gen_compare_op
[pCreateInfo
->compareOp
],
1547 .CubeSurfaceControlMode
= 0,
1549 .IndirectStatePointer
=
1550 device
->border_colors
.offset
+
1551 pCreateInfo
->borderColor
* sizeof(float) * 4,
1553 .LODClampMagnificationMode
= MIPNONE
,
1554 .MaximumAnisotropy
= max_anisotropy
,
1555 .RAddressMinFilterRoundingEnable
= 0,
1556 .RAddressMagFilterRoundingEnable
= 0,
1557 .VAddressMinFilterRoundingEnable
= 0,
1558 .VAddressMagFilterRoundingEnable
= 0,
1559 .UAddressMinFilterRoundingEnable
= 0,
1560 .UAddressMagFilterRoundingEnable
= 0,
1561 .TrilinearFilterQuality
= 0,
1562 .NonnormalizedCoordinateEnable
= 0,
1563 .TCXAddressControlMode
= vk_to_gen_tex_address
[pCreateInfo
->addressU
],
1564 .TCYAddressControlMode
= vk_to_gen_tex_address
[pCreateInfo
->addressV
],
1565 .TCZAddressControlMode
= vk_to_gen_tex_address
[pCreateInfo
->addressW
],
1568 GEN8_SAMPLER_STATE_pack(NULL
, sampler
->state
, &sampler_state
);
1570 *pSampler
= anv_sampler_to_handle(sampler
);
1575 VkResult
anv_DestroySampler(
1579 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1580 ANV_FROM_HANDLE(anv_sampler
, sampler
, _sampler
);
1582 anv_device_free(device
, sampler
);
1587 // Descriptor set functions
1589 VkResult
anv_CreateDescriptorSetLayout(
1591 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
1592 VkDescriptorSetLayout
* pSetLayout
)
1594 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1595 struct anv_descriptor_set_layout
*set_layout
;
1597 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
1599 uint32_t sampler_count
[VK_SHADER_STAGE_NUM
] = { 0, };
1600 uint32_t surface_count
[VK_SHADER_STAGE_NUM
] = { 0, };
1601 uint32_t num_dynamic_buffers
= 0;
1603 uint32_t stages
= 0;
1606 for (uint32_t i
= 0; i
< pCreateInfo
->count
; i
++) {
1607 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1608 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1609 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1610 for_each_bit(s
, pCreateInfo
->pBinding
[i
].stageFlags
)
1611 sampler_count
[s
] += pCreateInfo
->pBinding
[i
].arraySize
;
1617 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1618 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1619 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1620 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1621 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1622 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1623 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1624 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1625 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1626 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1627 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1628 for_each_bit(s
, pCreateInfo
->pBinding
[i
].stageFlags
)
1629 surface_count
[s
] += pCreateInfo
->pBinding
[i
].arraySize
;
1635 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1636 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1637 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1638 num_dynamic_buffers
+= pCreateInfo
->pBinding
[i
].arraySize
;
1644 stages
|= pCreateInfo
->pBinding
[i
].stageFlags
;
1645 count
+= pCreateInfo
->pBinding
[i
].arraySize
;
1648 uint32_t sampler_total
= 0;
1649 uint32_t surface_total
= 0;
1650 for (uint32_t s
= 0; s
< VK_SHADER_STAGE_NUM
; s
++) {
1651 sampler_total
+= sampler_count
[s
];
1652 surface_total
+= surface_count
[s
];
1655 size_t size
= sizeof(*set_layout
) +
1656 (sampler_total
+ surface_total
) * sizeof(set_layout
->entries
[0]);
1657 set_layout
= anv_device_alloc(device
, size
, 8,
1658 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1660 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1662 set_layout
->num_dynamic_buffers
= num_dynamic_buffers
;
1663 set_layout
->count
= count
;
1664 set_layout
->shader_stages
= stages
;
1666 struct anv_descriptor_slot
*p
= set_layout
->entries
;
1667 struct anv_descriptor_slot
*sampler
[VK_SHADER_STAGE_NUM
];
1668 struct anv_descriptor_slot
*surface
[VK_SHADER_STAGE_NUM
];
1669 for (uint32_t s
= 0; s
< VK_SHADER_STAGE_NUM
; s
++) {
1670 set_layout
->stage
[s
].surface_count
= surface_count
[s
];
1671 set_layout
->stage
[s
].surface_start
= surface
[s
] = p
;
1672 p
+= surface_count
[s
];
1673 set_layout
->stage
[s
].sampler_count
= sampler_count
[s
];
1674 set_layout
->stage
[s
].sampler_start
= sampler
[s
] = p
;
1675 p
+= sampler_count
[s
];
1678 uint32_t descriptor
= 0;
1679 int8_t dynamic_slot
= 0;
1681 for (uint32_t i
= 0; i
< pCreateInfo
->count
; i
++) {
1682 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1683 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1684 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1685 for_each_bit(s
, pCreateInfo
->pBinding
[i
].stageFlags
)
1686 for (uint32_t j
= 0; j
< pCreateInfo
->pBinding
[i
].arraySize
; j
++) {
1687 sampler
[s
]->index
= descriptor
+ j
;
1688 sampler
[s
]->dynamic_slot
= -1;
1696 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1697 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1698 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1706 switch (pCreateInfo
->pBinding
[i
].descriptorType
) {
1707 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1708 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1709 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1710 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1711 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1712 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1713 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1714 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1715 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1716 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1717 for_each_bit(s
, pCreateInfo
->pBinding
[i
].stageFlags
)
1718 for (uint32_t j
= 0; j
< pCreateInfo
->pBinding
[i
].arraySize
; j
++) {
1719 surface
[s
]->index
= descriptor
+ j
;
1721 surface
[s
]->dynamic_slot
= dynamic_slot
+ j
;
1723 surface
[s
]->dynamic_slot
= -1;
1732 dynamic_slot
+= pCreateInfo
->pBinding
[i
].arraySize
;
1734 descriptor
+= pCreateInfo
->pBinding
[i
].arraySize
;
1737 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
1742 VkResult
anv_DestroyDescriptorSetLayout(
1744 VkDescriptorSetLayout _set_layout
)
1746 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1747 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
1749 anv_device_free(device
, set_layout
);
1754 VkResult
anv_CreateDescriptorPool(
1756 VkDescriptorPoolUsage poolUsage
,
1758 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
1759 VkDescriptorPool
* pDescriptorPool
)
1761 anv_finishme("VkDescriptorPool is a stub");
1762 pDescriptorPool
->handle
= 1;
1766 VkResult
anv_DestroyDescriptorPool(
1768 VkDescriptorPool _pool
)
1770 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1774 VkResult
anv_ResetDescriptorPool(
1776 VkDescriptorPool descriptorPool
)
1778 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1783 anv_descriptor_set_create(struct anv_device
*device
,
1784 const struct anv_descriptor_set_layout
*layout
,
1785 struct anv_descriptor_set
**out_set
)
1787 struct anv_descriptor_set
*set
;
1788 size_t size
= sizeof(*set
) + layout
->count
* sizeof(set
->descriptors
[0]);
1790 set
= anv_device_alloc(device
, size
, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1792 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1794 /* A descriptor set may not be 100% filled. Clear the set so we can can
1795 * later detect holes in it.
1797 memset(set
, 0, size
);
1805 anv_descriptor_set_destroy(struct anv_device
*device
,
1806 struct anv_descriptor_set
*set
)
1808 anv_device_free(device
, set
);
1811 VkResult
anv_AllocDescriptorSets(
1813 VkDescriptorPool descriptorPool
,
1814 VkDescriptorSetUsage setUsage
,
1816 const VkDescriptorSetLayout
* pSetLayouts
,
1817 VkDescriptorSet
* pDescriptorSets
,
1820 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1823 struct anv_descriptor_set
*set
;
1825 for (uint32_t i
= 0; i
< count
; i
++) {
1826 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
, pSetLayouts
[i
]);
1828 result
= anv_descriptor_set_create(device
, layout
, &set
);
1829 if (result
!= VK_SUCCESS
) {
1834 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
1842 VkResult
anv_FreeDescriptorSets(
1844 VkDescriptorPool descriptorPool
,
1846 const VkDescriptorSet
* pDescriptorSets
)
1848 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1850 for (uint32_t i
= 0; i
< count
; i
++) {
1851 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
1853 anv_descriptor_set_destroy(device
, set
);
1859 VkResult
anv_UpdateDescriptorSets(
1861 uint32_t writeCount
,
1862 const VkWriteDescriptorSet
* pDescriptorWrites
,
1864 const VkCopyDescriptorSet
* pDescriptorCopies
)
1866 for (uint32_t i
= 0; i
< writeCount
; i
++) {
1867 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
1868 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->destSet
);
1870 switch (write
->descriptorType
) {
1871 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1872 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1873 for (uint32_t j
= 0; j
< write
->count
; j
++) {
1874 set
->descriptors
[write
->destBinding
+ j
].sampler
=
1875 anv_sampler_from_handle(write
->pDescriptors
[j
].sampler
);
1878 if (write
->descriptorType
== VK_DESCRIPTOR_TYPE_SAMPLER
)
1883 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1884 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1885 for (uint32_t j
= 0; j
< write
->count
; j
++) {
1886 ANV_FROM_HANDLE(anv_image_view
, iview
,
1887 write
->pDescriptors
[j
].imageView
);
1888 set
->descriptors
[write
->destBinding
+ j
].view
= &iview
->view
;
1892 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1893 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1894 anv_finishme("texel buffers not implemented");
1897 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1898 anv_finishme("input attachments not implemented");
1901 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1902 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1903 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1904 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
1905 for (uint32_t j
= 0; j
< write
->count
; j
++) {
1906 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
1907 write
->pDescriptors
[j
].bufferView
);
1908 set
->descriptors
[write
->destBinding
+ j
].view
= &bview
->view
;
1916 for (uint32_t i
= 0; i
< copyCount
; i
++) {
1917 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
1918 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->destSet
);
1919 ANV_FROM_HANDLE(anv_descriptor_set
, dest
, copy
->destSet
);
1920 for (uint32_t j
= 0; j
< copy
->count
; j
++) {
1921 dest
->descriptors
[copy
->destBinding
+ j
] =
1922 src
->descriptors
[copy
->srcBinding
+ j
];
1929 // State object functions
1931 static inline int64_t
1932 clamp_int64(int64_t x
, int64_t min
, int64_t max
)
1942 VkResult
anv_CreateDynamicViewportState(
1944 const VkDynamicViewportStateCreateInfo
* pCreateInfo
,
1945 VkDynamicViewportState
* pState
)
1947 ANV_FROM_HANDLE(anv_device
, device
, _device
);
1948 struct anv_dynamic_vp_state
*state
;
1950 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO
);
1952 state
= anv_device_alloc(device
, sizeof(*state
), 8,
1953 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
1955 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
1957 unsigned count
= pCreateInfo
->viewportAndScissorCount
;
1958 state
->sf_clip_vp
= anv_state_pool_alloc(&device
->dynamic_state_pool
,
1960 state
->cc_vp
= anv_state_pool_alloc(&device
->dynamic_state_pool
,
1962 state
->scissor
= anv_state_pool_alloc(&device
->dynamic_state_pool
,
1965 for (uint32_t i
= 0; i
< pCreateInfo
->viewportAndScissorCount
; i
++) {
1966 const VkViewport
*vp
= &pCreateInfo
->pViewports
[i
];
1967 const VkRect2D
*s
= &pCreateInfo
->pScissors
[i
];
1969 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport
= {
1970 .ViewportMatrixElementm00
= vp
->width
/ 2,
1971 .ViewportMatrixElementm11
= vp
->height
/ 2,
1972 .ViewportMatrixElementm22
= (vp
->maxDepth
- vp
->minDepth
) / 2,
1973 .ViewportMatrixElementm30
= vp
->originX
+ vp
->width
/ 2,
1974 .ViewportMatrixElementm31
= vp
->originY
+ vp
->height
/ 2,
1975 .ViewportMatrixElementm32
= (vp
->maxDepth
+ vp
->minDepth
) / 2,
1976 .XMinClipGuardband
= -1.0f
,
1977 .XMaxClipGuardband
= 1.0f
,
1978 .YMinClipGuardband
= -1.0f
,
1979 .YMaxClipGuardband
= 1.0f
,
1980 .XMinViewPort
= vp
->originX
,
1981 .XMaxViewPort
= vp
->originX
+ vp
->width
- 1,
1982 .YMinViewPort
= vp
->originY
,
1983 .YMaxViewPort
= vp
->originY
+ vp
->height
- 1,
1986 struct GEN8_CC_VIEWPORT cc_viewport
= {
1987 .MinimumDepth
= vp
->minDepth
,
1988 .MaximumDepth
= vp
->maxDepth
1991 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1992 * ymax < ymin for empty clips. In case clip x, y, width height are all
1993 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1994 * what we want. Just special case empty clips and produce a canonical
1996 static const struct GEN8_SCISSOR_RECT empty_scissor
= {
1997 .ScissorRectangleYMin
= 1,
1998 .ScissorRectangleXMin
= 1,
1999 .ScissorRectangleYMax
= 0,
2000 .ScissorRectangleXMax
= 0
2003 const int max
= 0xffff;
2004 struct GEN8_SCISSOR_RECT scissor
= {
2005 /* Do this math using int64_t so overflow gets clamped correctly. */
2006 .ScissorRectangleYMin
= clamp_int64(s
->offset
.y
, 0, max
),
2007 .ScissorRectangleXMin
= clamp_int64(s
->offset
.x
, 0, max
),
2008 .ScissorRectangleYMax
= clamp_int64((uint64_t) s
->offset
.y
+ s
->extent
.height
- 1, 0, max
),
2009 .ScissorRectangleXMax
= clamp_int64((uint64_t) s
->offset
.x
+ s
->extent
.width
- 1, 0, max
)
2012 GEN8_SF_CLIP_VIEWPORT_pack(NULL
, state
->sf_clip_vp
.map
+ i
* 64, &sf_clip_viewport
);
2013 GEN8_CC_VIEWPORT_pack(NULL
, state
->cc_vp
.map
+ i
* 32, &cc_viewport
);
2015 if (s
->extent
.width
<= 0 || s
->extent
.height
<= 0) {
2016 GEN8_SCISSOR_RECT_pack(NULL
, state
->scissor
.map
+ i
* 32, &empty_scissor
);
2018 GEN8_SCISSOR_RECT_pack(NULL
, state
->scissor
.map
+ i
* 32, &scissor
);
2022 *pState
= anv_dynamic_vp_state_to_handle(state
);
2027 VkResult
anv_DestroyDynamicViewportState(
2029 VkDynamicViewportState _vp_state
)
2031 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2032 ANV_FROM_HANDLE(anv_dynamic_vp_state
, vp_state
, _vp_state
);
2034 anv_state_pool_free(&device
->dynamic_state_pool
, vp_state
->sf_clip_vp
);
2035 anv_state_pool_free(&device
->dynamic_state_pool
, vp_state
->cc_vp
);
2036 anv_state_pool_free(&device
->dynamic_state_pool
, vp_state
->scissor
);
2038 anv_device_free(device
, vp_state
);
2043 VkResult
anv_CreateDynamicRasterState(
2045 const VkDynamicRasterStateCreateInfo
* pCreateInfo
,
2046 VkDynamicRasterState
* pState
)
2048 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2049 struct anv_dynamic_rs_state
*state
;
2051 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DYNAMIC_RASTER_STATE_CREATE_INFO
);
2053 state
= anv_device_alloc(device
, sizeof(*state
), 8,
2054 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2056 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
2058 struct GEN8_3DSTATE_SF sf
= {
2059 GEN8_3DSTATE_SF_header
,
2060 .LineWidth
= pCreateInfo
->lineWidth
,
2063 GEN8_3DSTATE_SF_pack(NULL
, state
->state_sf
, &sf
);
2065 bool enable_bias
= pCreateInfo
->depthBias
!= 0.0f
||
2066 pCreateInfo
->slopeScaledDepthBias
!= 0.0f
;
2067 struct GEN8_3DSTATE_RASTER raster
= {
2068 .GlobalDepthOffsetEnableSolid
= enable_bias
,
2069 .GlobalDepthOffsetEnableWireframe
= enable_bias
,
2070 .GlobalDepthOffsetEnablePoint
= enable_bias
,
2071 .GlobalDepthOffsetConstant
= pCreateInfo
->depthBias
,
2072 .GlobalDepthOffsetScale
= pCreateInfo
->slopeScaledDepthBias
,
2073 .GlobalDepthOffsetClamp
= pCreateInfo
->depthBiasClamp
2076 GEN8_3DSTATE_RASTER_pack(NULL
, state
->state_raster
, &raster
);
2078 *pState
= anv_dynamic_rs_state_to_handle(state
);
2083 VkResult
anv_DestroyDynamicRasterState(
2085 VkDynamicRasterState _rs_state
)
2087 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2088 ANV_FROM_HANDLE(anv_dynamic_rs_state
, rs_state
, _rs_state
);
2090 anv_device_free(device
, rs_state
);
2095 VkResult
anv_CreateDynamicColorBlendState(
2097 const VkDynamicColorBlendStateCreateInfo
* pCreateInfo
,
2098 VkDynamicColorBlendState
* pState
)
2100 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2101 struct anv_dynamic_cb_state
*state
;
2103 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DYNAMIC_COLOR_BLEND_STATE_CREATE_INFO
);
2105 state
= anv_device_alloc(device
, sizeof(*state
), 8,
2106 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2108 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
2110 struct GEN8_COLOR_CALC_STATE color_calc_state
= {
2111 .BlendConstantColorRed
= pCreateInfo
->blendConst
[0],
2112 .BlendConstantColorGreen
= pCreateInfo
->blendConst
[1],
2113 .BlendConstantColorBlue
= pCreateInfo
->blendConst
[2],
2114 .BlendConstantColorAlpha
= pCreateInfo
->blendConst
[3]
2117 GEN8_COLOR_CALC_STATE_pack(NULL
, state
->state_color_calc
, &color_calc_state
);
2119 *pState
= anv_dynamic_cb_state_to_handle(state
);
2124 VkResult
anv_DestroyDynamicColorBlendState(
2126 VkDynamicColorBlendState _cb_state
)
2128 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2129 ANV_FROM_HANDLE(anv_dynamic_cb_state
, cb_state
, _cb_state
);
2131 anv_device_free(device
, cb_state
);
2136 VkResult
anv_CreateDynamicDepthStencilState(
2138 const VkDynamicDepthStencilStateCreateInfo
* pCreateInfo
,
2139 VkDynamicDepthStencilState
* pState
)
2141 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2142 struct anv_dynamic_ds_state
*state
;
2144 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DYNAMIC_DEPTH_STENCIL_STATE_CREATE_INFO
);
2146 state
= anv_device_alloc(device
, sizeof(*state
), 8,
2147 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2149 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
2151 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil
= {
2152 GEN8_3DSTATE_WM_DEPTH_STENCIL_header
,
2154 /* Is this what we need to do? */
2155 .StencilBufferWriteEnable
= pCreateInfo
->stencilWriteMask
!= 0,
2157 .StencilTestMask
= pCreateInfo
->stencilReadMask
& 0xff,
2158 .StencilWriteMask
= pCreateInfo
->stencilWriteMask
& 0xff,
2160 .BackfaceStencilTestMask
= pCreateInfo
->stencilReadMask
& 0xff,
2161 .BackfaceStencilWriteMask
= pCreateInfo
->stencilWriteMask
& 0xff,
2164 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL
, state
->state_wm_depth_stencil
,
2167 struct GEN8_COLOR_CALC_STATE color_calc_state
= {
2168 .StencilReferenceValue
= pCreateInfo
->stencilFrontRef
,
2169 .BackFaceStencilReferenceValue
= pCreateInfo
->stencilBackRef
2172 GEN8_COLOR_CALC_STATE_pack(NULL
, state
->state_color_calc
, &color_calc_state
);
2174 *pState
= anv_dynamic_ds_state_to_handle(state
);
2179 VkResult
anv_DestroyDynamicDepthStencilState(
2181 VkDynamicDepthStencilState _ds_state
)
2183 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2184 ANV_FROM_HANDLE(anv_dynamic_ds_state
, ds_state
, _ds_state
);
2186 anv_device_free(device
, ds_state
);
2191 VkResult
anv_CreateFramebuffer(
2193 const VkFramebufferCreateInfo
* pCreateInfo
,
2194 VkFramebuffer
* pFramebuffer
)
2196 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2197 struct anv_framebuffer
*framebuffer
;
2199 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
);
2201 size_t size
= sizeof(*framebuffer
) +
2202 sizeof(struct anv_attachment_view
*) * pCreateInfo
->attachmentCount
;
2203 framebuffer
= anv_device_alloc(device
, size
, 8,
2204 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2205 if (framebuffer
== NULL
)
2206 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
2208 framebuffer
->attachment_count
= pCreateInfo
->attachmentCount
;
2209 for (uint32_t i
= 0; i
< pCreateInfo
->attachmentCount
; i
++) {
2210 ANV_FROM_HANDLE(anv_attachment_view
, view
,
2211 pCreateInfo
->pAttachments
[i
].view
);
2213 framebuffer
->attachments
[i
] = view
;
2216 framebuffer
->width
= pCreateInfo
->width
;
2217 framebuffer
->height
= pCreateInfo
->height
;
2218 framebuffer
->layers
= pCreateInfo
->layers
;
2220 anv_CreateDynamicViewportState(anv_device_to_handle(device
),
2221 &(VkDynamicViewportStateCreateInfo
) {
2222 .sType
= VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO
,
2223 .viewportAndScissorCount
= 1,
2224 .pViewports
= (VkViewport
[]) {
2228 .width
= pCreateInfo
->width
,
2229 .height
= pCreateInfo
->height
,
2234 .pScissors
= (VkRect2D
[]) {
2236 { pCreateInfo
->width
, pCreateInfo
->height
} },
2239 &framebuffer
->vp_state
);
2241 *pFramebuffer
= anv_framebuffer_to_handle(framebuffer
);
2246 VkResult
anv_DestroyFramebuffer(
2250 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2251 ANV_FROM_HANDLE(anv_framebuffer
, fb
, _fb
);
2253 anv_DestroyDynamicViewportState(anv_device_to_handle(device
),
2255 anv_device_free(device
, fb
);
2260 VkResult
anv_CreateRenderPass(
2262 const VkRenderPassCreateInfo
* pCreateInfo
,
2263 VkRenderPass
* pRenderPass
)
2265 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2266 struct anv_render_pass
*pass
;
2269 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
);
2271 size
= sizeof(*pass
) +
2272 pCreateInfo
->subpassCount
* sizeof(struct anv_subpass
);
2273 pass
= anv_device_alloc(device
, size
, 8,
2274 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2276 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
2278 /* Clear the subpasses along with the parent pass. This required because
2279 * each array member of anv_subpass must be a valid pointer if not NULL.
2281 memset(pass
, 0, size
);
2283 pass
->attachment_count
= pCreateInfo
->attachmentCount
;
2284 pass
->subpass_count
= pCreateInfo
->subpassCount
;
2286 size
= pCreateInfo
->attachmentCount
* sizeof(*pass
->attachments
);
2287 pass
->attachments
= anv_device_alloc(device
, size
, 8,
2288 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2289 for (uint32_t i
= 0; i
< pCreateInfo
->attachmentCount
; i
++) {
2290 pass
->attachments
[i
].format
= pCreateInfo
->pAttachments
[i
].format
;
2291 pass
->attachments
[i
].samples
= pCreateInfo
->pAttachments
[i
].samples
;
2292 pass
->attachments
[i
].load_op
= pCreateInfo
->pAttachments
[i
].loadOp
;
2293 pass
->attachments
[i
].stencil_load_op
= pCreateInfo
->pAttachments
[i
].stencilLoadOp
;
2294 // pass->attachments[i].store_op = pCreateInfo->pAttachments[i].storeOp;
2295 // pass->attachments[i].stencil_store_op = pCreateInfo->pAttachments[i].stencilStoreOp;
2298 for (uint32_t i
= 0; i
< pCreateInfo
->subpassCount
; i
++) {
2299 const VkSubpassDescription
*desc
= &pCreateInfo
->pSubpasses
[i
];
2300 struct anv_subpass
*subpass
= &pass
->subpasses
[i
];
2302 subpass
->input_count
= desc
->inputCount
;
2303 subpass
->color_count
= desc
->colorCount
;
2305 if (desc
->inputCount
> 0) {
2306 subpass
->input_attachments
=
2307 anv_device_alloc(device
, desc
->inputCount
* sizeof(uint32_t),
2308 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2310 for (uint32_t j
= 0; j
< desc
->inputCount
; j
++) {
2311 subpass
->input_attachments
[j
]
2312 = desc
->inputAttachments
[j
].attachment
;
2316 if (desc
->colorCount
> 0) {
2317 subpass
->color_attachments
=
2318 anv_device_alloc(device
, desc
->colorCount
* sizeof(uint32_t),
2319 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2321 for (uint32_t j
= 0; j
< desc
->colorCount
; j
++) {
2322 subpass
->color_attachments
[j
]
2323 = desc
->colorAttachments
[j
].attachment
;
2327 if (desc
->resolveAttachments
) {
2328 subpass
->resolve_attachments
=
2329 anv_device_alloc(device
, desc
->colorCount
* sizeof(uint32_t),
2330 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
2332 for (uint32_t j
= 0; j
< desc
->colorCount
; j
++) {
2333 subpass
->resolve_attachments
[j
]
2334 = desc
->resolveAttachments
[j
].attachment
;
2338 subpass
->depth_stencil_attachment
= desc
->depthStencilAttachment
.attachment
;
2341 *pRenderPass
= anv_render_pass_to_handle(pass
);
2346 VkResult
anv_DestroyRenderPass(
2350 ANV_FROM_HANDLE(anv_device
, device
, _device
);
2351 ANV_FROM_HANDLE(anv_render_pass
, pass
, _pass
);
2353 anv_device_free(device
, pass
->attachments
);
2355 for (uint32_t i
= 0; i
< pass
->subpass_count
; i
++) {
2356 /* In VkSubpassCreateInfo, each of the attachment arrays may be null.
2357 * Don't free the null arrays.
2359 struct anv_subpass
*subpass
= &pass
->subpasses
[i
];
2361 anv_device_free(device
, subpass
->input_attachments
);
2362 anv_device_free(device
, subpass
->color_attachments
);
2363 anv_device_free(device
, subpass
->resolve_attachments
);
2366 anv_device_free(device
, pass
);
2371 VkResult
anv_GetRenderAreaGranularity(
2373 VkRenderPass renderPass
,
2374 VkExtent2D
* pGranularity
)
2376 *pGranularity
= (VkExtent2D
) { 1, 1 };
2381 void vkCmdDbgMarkerBegin(
2382 VkCmdBuffer cmdBuffer
,
2383 const char* pMarker
)
2384 __attribute__ ((visibility ("default")));
2386 void vkCmdDbgMarkerEnd(
2387 VkCmdBuffer cmdBuffer
)
2388 __attribute__ ((visibility ("default")));
2390 void vkCmdDbgMarkerBegin(
2391 VkCmdBuffer cmdBuffer
,
2392 const char* pMarker
)
2396 void vkCmdDbgMarkerEnd(
2397 VkCmdBuffer cmdBuffer
)