vulkan/overlay: add a margin to the size of the window
[mesa.git] / src / vulkan / overlay-layer / overlay.cpp
1 /*
2 * Copyright © 2019 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <string.h>
25 #include <stdlib.h>
26 #include <assert.h>
27
28 #include <vulkan/vulkan.h>
29 #include <vulkan/vk_layer.h>
30
31 #include "imgui.h"
32
33 #include "overlay_params.h"
34
35 #include "util/debug.h"
36 #include "util/hash_table.h"
37 #include "util/list.h"
38 #include "util/ralloc.h"
39 #include "util/os_time.h"
40 #include "util/simple_mtx.h"
41
42 #include "vk_enum_to_str.h"
43 #include "vk_util.h"
44
45 /* Mapped from VkInstace/VkPhysicalDevice */
46 struct instance_data {
47 struct vk_instance_dispatch_table vtable;
48 VkInstance instance;
49
50 struct overlay_params params;
51 bool pipeline_statistics_enabled;
52 };
53
54 struct frame_stat {
55 uint64_t stats[OVERLAY_PARAM_ENABLED_MAX];
56 };
57
58 /* Mapped from VkDevice */
59 struct queue_data;
60 struct device_data {
61 struct instance_data *instance;
62
63 PFN_vkSetDeviceLoaderData set_device_loader_data;
64
65 struct vk_device_dispatch_table vtable;
66 VkPhysicalDevice physical_device;
67 VkDevice device;
68
69 VkPhysicalDeviceProperties properties;
70
71 struct queue_data *graphic_queue;
72
73 struct queue_data **queues;
74 uint32_t n_queues;
75
76 /* For a single frame */
77 struct frame_stat frame_stats;
78 };
79
80 /* Mapped from VkCommandBuffer */
81 struct command_buffer_data {
82 struct device_data *device;
83
84 VkCommandBufferLevel level;
85
86 VkCommandBuffer cmd_buffer;
87 VkQueryPool pipeline_query_pool;
88 VkQueryPool timestamp_query_pool;
89 uint32_t query_index;
90
91 struct frame_stat stats;
92
93 struct list_head link; /* link into queue_data::running_command_buffer */
94 };
95
96 /* Mapped from VkQueue */
97 struct queue_data {
98 struct device_data *device;
99
100 VkQueue queue;
101 VkQueueFlags flags;
102 uint32_t family_index;
103 uint64_t timestamp_mask;
104
105 VkFence queries_fence;
106
107 struct list_head running_command_buffer;
108 };
109
110 /* Mapped from VkSwapchainKHR */
111 struct swapchain_data {
112 struct device_data *device;
113
114 VkSwapchainKHR swapchain;
115 unsigned width, height;
116 VkFormat format;
117
118 uint32_t n_images;
119 VkImage *images;
120 VkImageView *image_views;
121 VkFramebuffer *framebuffers;
122
123 VkRenderPass render_pass;
124
125 VkDescriptorPool descriptor_pool;
126 VkDescriptorSetLayout descriptor_layout;
127 VkDescriptorSet descriptor_set;
128
129 VkSampler font_sampler;
130
131 VkPipelineLayout pipeline_layout;
132 VkPipeline pipeline;
133
134 VkCommandPool command_pool;
135
136 struct {
137 VkCommandBuffer command_buffer;
138
139 VkBuffer vertex_buffer;
140 VkDeviceMemory vertex_buffer_mem;
141 VkDeviceSize vertex_buffer_size;
142
143 VkBuffer index_buffer;
144 VkDeviceMemory index_buffer_mem;
145 VkDeviceSize index_buffer_size;
146 } frame_data[2];
147
148 bool font_uploaded;
149 VkImage font_image;
150 VkImageView font_image_view;
151 VkDeviceMemory font_mem;
152 VkBuffer upload_font_buffer;
153 VkDeviceMemory upload_font_buffer_mem;
154
155 VkSemaphore submission_semaphore;
156
157 /**/
158 ImGuiContext* imgui_context;
159 ImVec2 window_size;
160
161 /**/
162 uint64_t n_frames;
163 uint64_t last_present_time;
164
165 unsigned n_frames_since_update;
166 uint64_t last_fps_update;
167 double fps;
168
169 enum overlay_param_enabled stat_selector;
170 double time_dividor;
171 struct frame_stat stats_min, stats_max;
172 struct frame_stat frames_stats[200];
173
174 /* Over a single frame */
175 struct frame_stat frame_stats;
176
177 /* Over fps_sampling_period */
178 struct frame_stat accumulated_stats;
179 };
180
181 static const VkQueryPipelineStatisticFlags overlay_query_flags =
182 VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT |
183 VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT |
184 VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT |
185 VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT |
186 VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT |
187 VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT |
188 VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT |
189 VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT |
190 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT |
191 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT |
192 VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT;
193 #define OVERLAY_QUERY_COUNT (11)
194
195 static struct hash_table *vk_object_to_data = NULL;
196 static simple_mtx_t vk_object_to_data_mutex = _SIMPLE_MTX_INITIALIZER_NP;
197
198 thread_local ImGuiContext* __MesaImGui;
199
200 static inline void ensure_vk_object_map(void)
201 {
202 if (!vk_object_to_data) {
203 vk_object_to_data = _mesa_hash_table_create(NULL,
204 _mesa_hash_pointer,
205 _mesa_key_pointer_equal);
206 }
207 }
208
209 #define FIND_SWAPCHAIN_DATA(obj) ((struct swapchain_data *)find_object_data((void *) obj))
210 #define FIND_CMD_BUFFER_DATA(obj) ((struct command_buffer_data *)find_object_data((void *) obj))
211 #define FIND_DEVICE_DATA(obj) ((struct device_data *)find_object_data((void *) obj))
212 #define FIND_QUEUE_DATA(obj) ((struct queue_data *)find_object_data((void *) obj))
213 #define FIND_PHYSICAL_DEVICE_DATA(obj) ((struct instance_data *)find_object_data((void *) obj))
214 #define FIND_INSTANCE_DATA(obj) ((struct instance_data *)find_object_data((void *) obj))
215 static void *find_object_data(void *obj)
216 {
217 simple_mtx_lock(&vk_object_to_data_mutex);
218 ensure_vk_object_map();
219 struct hash_entry *entry = _mesa_hash_table_search(vk_object_to_data, obj);
220 void *data = entry ? entry->data : NULL;
221 simple_mtx_unlock(&vk_object_to_data_mutex);
222 return data;
223 }
224
225 static void map_object(void *obj, void *data)
226 {
227 simple_mtx_lock(&vk_object_to_data_mutex);
228 ensure_vk_object_map();
229 _mesa_hash_table_insert(vk_object_to_data, obj, data);
230 simple_mtx_unlock(&vk_object_to_data_mutex);
231 }
232
233 static void unmap_object(void *obj)
234 {
235 simple_mtx_lock(&vk_object_to_data_mutex);
236 struct hash_entry *entry = _mesa_hash_table_search(vk_object_to_data, obj);
237 _mesa_hash_table_remove(vk_object_to_data, entry);
238 simple_mtx_unlock(&vk_object_to_data_mutex);
239 }
240
241 /**/
242
243 #define VK_CHECK(expr) \
244 do { \
245 VkResult __result = (expr); \
246 if (__result != VK_SUCCESS) { \
247 fprintf(stderr, "'%s' line %i failed with %s\n", \
248 #expr, __LINE__, vk_Result_to_str(__result)); \
249 } \
250 } while (0)
251
252 /**/
253
254 static VkLayerInstanceCreateInfo *get_instance_chain_info(const VkInstanceCreateInfo *pCreateInfo,
255 VkLayerFunction func)
256 {
257 vk_foreach_struct(item, pCreateInfo->pNext) {
258 if (item->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO &&
259 ((VkLayerInstanceCreateInfo *) item)->function == func)
260 return (VkLayerInstanceCreateInfo *) item;
261 }
262 unreachable("instance chain info not found");
263 return NULL;
264 }
265
266 static VkLayerDeviceCreateInfo *get_device_chain_info(const VkDeviceCreateInfo *pCreateInfo,
267 VkLayerFunction func)
268 {
269 vk_foreach_struct(item, pCreateInfo->pNext) {
270 if (item->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO &&
271 ((VkLayerDeviceCreateInfo *) item)->function == func)
272 return (VkLayerDeviceCreateInfo *)item;
273 }
274 unreachable("device chain info not found");
275 return NULL;
276 }
277
278 static struct VkBaseOutStructure *
279 clone_chain(const struct VkBaseInStructure *chain)
280 {
281 struct VkBaseOutStructure *head = NULL, *tail = NULL;
282
283 vk_foreach_struct_const(item, chain) {
284 size_t item_size = vk_structure_type_size(item);
285 struct VkBaseOutStructure *new_item =
286 (struct VkBaseOutStructure *)malloc(item_size);;
287
288 memcpy(new_item, item, item_size);
289
290 if (!head)
291 head = new_item;
292 if (tail)
293 tail->pNext = new_item;
294 tail = new_item;
295 }
296
297 return head;
298 }
299
300 static void
301 free_chain(struct VkBaseOutStructure *chain)
302 {
303 while (chain) {
304 void *node = chain;
305 chain = chain->pNext;
306 free(node);
307 }
308 }
309
310 /**/
311
312 static void check_vk_result(VkResult err)
313 {
314 if (err != VK_SUCCESS)
315 printf("ERROR!\n");
316 }
317
318 static struct instance_data *new_instance_data(VkInstance instance)
319 {
320 struct instance_data *data = rzalloc(NULL, struct instance_data);
321 data->instance = instance;
322 map_object(data->instance, data);
323 return data;
324 }
325
326 static void destroy_instance_data(struct instance_data *data)
327 {
328 if (data->params.output_file)
329 fclose(data->params.output_file);
330 unmap_object(data->instance);
331 ralloc_free(data);
332 }
333
334 static void instance_data_map_physical_devices(struct instance_data *instance_data,
335 bool map)
336 {
337 uint32_t physicalDeviceCount = 0;
338 instance_data->vtable.EnumeratePhysicalDevices(instance_data->instance,
339 &physicalDeviceCount,
340 NULL);
341
342 VkPhysicalDevice *physicalDevices = (VkPhysicalDevice *) malloc(sizeof(VkPhysicalDevice) * physicalDeviceCount);
343 instance_data->vtable.EnumeratePhysicalDevices(instance_data->instance,
344 &physicalDeviceCount,
345 physicalDevices);
346
347 for (uint32_t i = 0; i < physicalDeviceCount; i++) {
348 if (map)
349 map_object(physicalDevices[i], instance_data);
350 else
351 unmap_object(physicalDevices[i]);
352 }
353
354 free(physicalDevices);
355 }
356
357 /**/
358 static struct device_data *new_device_data(VkDevice device, struct instance_data *instance)
359 {
360 struct device_data *data = rzalloc(NULL, struct device_data);
361 data->instance = instance;
362 data->device = device;
363 map_object(data->device, data);
364 return data;
365 }
366
367 static struct queue_data *new_queue_data(VkQueue queue,
368 const VkQueueFamilyProperties *family_props,
369 uint32_t family_index,
370 struct device_data *device_data)
371 {
372 struct queue_data *data = rzalloc(device_data, struct queue_data);
373 data->device = device_data;
374 data->queue = queue;
375 data->flags = family_props->queueFlags;
376 data->timestamp_mask = (1ul << family_props->timestampValidBits) - 1;
377 data->family_index = family_index;
378 LIST_INITHEAD(&data->running_command_buffer);
379 map_object(data->queue, data);
380
381 /* Fence synchronizing access to queries on that queue. */
382 VkFenceCreateInfo fence_info = {};
383 fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
384 fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
385 VkResult err = device_data->vtable.CreateFence(device_data->device,
386 &fence_info,
387 NULL,
388 &data->queries_fence);
389 check_vk_result(err);
390
391 if (data->flags & VK_QUEUE_GRAPHICS_BIT)
392 device_data->graphic_queue = data;
393
394 return data;
395 }
396
397 static void destroy_queue(struct queue_data *data)
398 {
399 struct device_data *device_data = data->device;
400 device_data->vtable.DestroyFence(device_data->device, data->queries_fence, NULL);
401 unmap_object(data->queue);
402 ralloc_free(data);
403 }
404
405 static void device_map_queues(struct device_data *data,
406 const VkDeviceCreateInfo *pCreateInfo)
407 {
408 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
409 data->n_queues += pCreateInfo->pQueueCreateInfos[i].queueCount;
410 data->queues = ralloc_array(data, struct queue_data *, data->n_queues);
411
412 struct instance_data *instance_data = data->instance;
413 uint32_t n_family_props;
414 instance_data->vtable.GetPhysicalDeviceQueueFamilyProperties(data->physical_device,
415 &n_family_props,
416 NULL);
417 VkQueueFamilyProperties *family_props =
418 (VkQueueFamilyProperties *)malloc(sizeof(VkQueueFamilyProperties) * n_family_props);
419 instance_data->vtable.GetPhysicalDeviceQueueFamilyProperties(data->physical_device,
420 &n_family_props,
421 family_props);
422
423 uint32_t queue_index = 0;
424 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
425 for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount; j++) {
426 VkQueue queue;
427 data->vtable.GetDeviceQueue(data->device,
428 pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex,
429 j, &queue);
430
431 VK_CHECK(data->set_device_loader_data(data->device, queue));
432
433 data->queues[queue_index++] =
434 new_queue_data(queue, &family_props[pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex],
435 pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, data);
436 }
437 }
438
439 free(family_props);
440 }
441
442 static void device_unmap_queues(struct device_data *data)
443 {
444 for (uint32_t i = 0; i < data->n_queues; i++)
445 destroy_queue(data->queues[i]);
446 }
447
448 static void destroy_device_data(struct device_data *data)
449 {
450 unmap_object(data->device);
451 ralloc_free(data);
452 }
453
454 /**/
455 static struct command_buffer_data *new_command_buffer_data(VkCommandBuffer cmd_buffer,
456 VkCommandBufferLevel level,
457 VkQueryPool pipeline_query_pool,
458 VkQueryPool timestamp_query_pool,
459 uint32_t query_index,
460 struct device_data *device_data)
461 {
462 struct command_buffer_data *data = rzalloc(NULL, struct command_buffer_data);
463 data->device = device_data;
464 data->cmd_buffer = cmd_buffer;
465 data->level = level;
466 data->pipeline_query_pool = pipeline_query_pool;
467 data->timestamp_query_pool = timestamp_query_pool;
468 data->query_index = query_index;
469 list_inithead(&data->link);
470 map_object((void *) data->cmd_buffer, data);
471 return data;
472 }
473
474 static void destroy_command_buffer_data(struct command_buffer_data *data)
475 {
476 unmap_object((void *) data->cmd_buffer);
477 list_delinit(&data->link);
478 ralloc_free(data);
479 }
480
481 /**/
482 static struct swapchain_data *new_swapchain_data(VkSwapchainKHR swapchain,
483 struct device_data *device_data)
484 {
485 struct swapchain_data *data = rzalloc(NULL, struct swapchain_data);
486 data->device = device_data;
487 data->swapchain = swapchain;
488 data->window_size = ImVec2(300, 300);
489 map_object((void *) data->swapchain, data);
490 return data;
491 }
492
493 static void destroy_swapchain_data(struct swapchain_data *data)
494 {
495 unmap_object((void *) data->swapchain);
496 ralloc_free(data);
497 }
498
499 static void snapshot_swapchain_frame(struct swapchain_data *data)
500 {
501 struct device_data *device_data = data->device;
502 struct instance_data *instance_data = device_data->instance;
503 uint32_t f_idx = data->n_frames % ARRAY_SIZE(data->frames_stats);
504 uint64_t now = os_time_get(); /* us */
505
506 if (data->last_present_time) {
507 data->frame_stats.stats[OVERLAY_PARAM_ENABLED_frame_timing] =
508 now - data->last_present_time;
509 }
510
511 memset(&data->frames_stats[f_idx], 0, sizeof(data->frames_stats[f_idx]));
512 for (int s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++) {
513 data->frames_stats[f_idx].stats[s] += device_data->frame_stats.stats[s] + data->frame_stats.stats[s];
514 data->accumulated_stats.stats[s] += device_data->frame_stats.stats[s] + data->frame_stats.stats[s];
515 }
516
517 if (data->last_fps_update) {
518 double elapsed = (double)(now - data->last_fps_update); /* us */
519 if (elapsed >= instance_data->params.fps_sampling_period) {
520 data->fps = 1000000.0f * data->n_frames_since_update / elapsed;
521 if (instance_data->params.output_file) {
522 fprintf(instance_data->params.output_file, "%.2f\n", data->fps);
523 fflush(instance_data->params.output_file);
524 }
525
526 memset(&data->accumulated_stats, 0, sizeof(data->accumulated_stats));
527 data->n_frames_since_update = 0;
528 data->last_fps_update = now;
529 }
530 } else {
531 data->last_fps_update = now;
532 }
533
534 memset(&device_data->frame_stats, 0, sizeof(device_data->frame_stats));
535 memset(&data->frame_stats, 0, sizeof(device_data->frame_stats));
536
537 data->last_present_time = now;
538 data->n_frames++;
539 data->n_frames_since_update++;
540 }
541
542 static float get_time_stat(void *_data, int _idx)
543 {
544 struct swapchain_data *data = (struct swapchain_data *) _data;
545 if ((ARRAY_SIZE(data->frames_stats) - _idx) > data->n_frames)
546 return 0.0f;
547 int idx = ARRAY_SIZE(data->frames_stats) +
548 data->n_frames < ARRAY_SIZE(data->frames_stats) ?
549 _idx - data->n_frames :
550 _idx + data->n_frames;
551 idx %= ARRAY_SIZE(data->frames_stats);
552 /* Time stats are in us. */
553 return data->frames_stats[idx].stats[data->stat_selector] / data->time_dividor;
554 }
555
556 static float get_stat(void *_data, int _idx)
557 {
558 struct swapchain_data *data = (struct swapchain_data *) _data;
559 if ((ARRAY_SIZE(data->frames_stats) - _idx) > data->n_frames)
560 return 0.0f;
561 int idx = ARRAY_SIZE(data->frames_stats) +
562 data->n_frames < ARRAY_SIZE(data->frames_stats) ?
563 _idx - data->n_frames :
564 _idx + data->n_frames;
565 idx %= ARRAY_SIZE(data->frames_stats);
566 return data->frames_stats[idx].stats[data->stat_selector];
567 }
568
569 static void position_layer(struct swapchain_data *data)
570
571 {
572 struct device_data *device_data = data->device;
573 struct instance_data *instance_data = device_data->instance;
574 const float margin = 10.0f;
575
576 ImGui::SetNextWindowBgAlpha(0.5);
577 ImGui::SetNextWindowSize(data->window_size, ImGuiCond_Always);
578 switch (instance_data->params.position) {
579 case LAYER_POSITION_TOP_LEFT:
580 ImGui::SetNextWindowPos(ImVec2(margin, margin), ImGuiCond_Always);
581 break;
582 case LAYER_POSITION_TOP_RIGHT:
583 ImGui::SetNextWindowPos(ImVec2(data->width - data->window_size.x - margin, margin),
584 ImGuiCond_Always);
585 break;
586 case LAYER_POSITION_BOTTOM_LEFT:
587 ImGui::SetNextWindowPos(ImVec2(margin, data->height - data->window_size.y - margin),
588 ImGuiCond_Always);
589 break;
590 case LAYER_POSITION_BOTTOM_RIGHT:
591 ImGui::SetNextWindowPos(ImVec2(data->width - data->window_size.x - margin,
592 data->height - data->window_size.y - margin),
593 ImGuiCond_Always);
594 break;
595 }
596 }
597
598 static void compute_swapchain_display(struct swapchain_data *data)
599 {
600 struct device_data *device_data = data->device;
601 struct instance_data *instance_data = device_data->instance;
602
603 ImGui::SetCurrentContext(data->imgui_context);
604 ImGui::NewFrame();
605 position_layer(data);
606 ImGui::Begin("Mesa overlay");
607 ImGui::Text("Device: %s", device_data->properties.deviceName);
608
609 const char *format_name = vk_Format_to_str(data->format);
610 format_name = format_name ? (format_name + strlen("VK_FORMAT_")) : "unknown";
611 ImGui::Text("Swapchain format: %s", format_name);
612 ImGui::Text("Frames: %" PRIu64, data->n_frames);
613 if (instance_data->params.enabled[OVERLAY_PARAM_ENABLED_fps])
614 ImGui::Text("FPS: %.2f" , data->fps);
615
616 /* Recompute min/max */
617 for (uint32_t s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++) {
618 data->stats_min.stats[s] = UINT64_MAX;
619 data->stats_max.stats[s] = 0;
620 }
621 for (uint32_t f = 0; f < MIN2(data->n_frames, ARRAY_SIZE(data->frames_stats)); f++) {
622 for (uint32_t s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++) {
623 data->stats_min.stats[s] = MIN2(data->frames_stats[f].stats[s],
624 data->stats_min.stats[s]);
625 data->stats_max.stats[s] = MAX2(data->frames_stats[f].stats[s],
626 data->stats_max.stats[s]);
627 }
628 }
629 for (uint32_t s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++) {
630 assert(data->stats_min.stats[s] != UINT64_MAX);
631 }
632
633 for (uint32_t s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++) {
634 if (!instance_data->params.enabled[s] ||
635 s == OVERLAY_PARAM_ENABLED_fps)
636 continue;
637
638 char hash[40];
639 snprintf(hash, sizeof(hash), "##%s", overlay_param_names[s]);
640 data->stat_selector = (enum overlay_param_enabled) s;
641 data->time_dividor = 1000.0f;
642 if (s == OVERLAY_PARAM_ENABLED_gpu_timing)
643 data->time_dividor = 1000000.0f;
644
645 if (s == OVERLAY_PARAM_ENABLED_frame_timing ||
646 s == OVERLAY_PARAM_ENABLED_acquire_timing ||
647 s == OVERLAY_PARAM_ENABLED_gpu_timing) {
648 double min_time = data->stats_min.stats[s] / data->time_dividor;
649 double max_time = data->stats_max.stats[s] / data->time_dividor;
650 ImGui::PlotHistogram(hash, get_time_stat, data,
651 ARRAY_SIZE(data->frames_stats), 0,
652 NULL, min_time, max_time,
653 ImVec2(ImGui::GetContentRegionAvailWidth(), 30));
654 ImGui::Text("%s: %.3fms [%.3f, %.3f]", overlay_param_names[s],
655 get_time_stat(data, ARRAY_SIZE(data->frames_stats) - 1),
656 min_time, max_time);
657 } else {
658 ImGui::PlotHistogram(hash, get_stat, data,
659 ARRAY_SIZE(data->frames_stats), 0,
660 NULL,
661 data->stats_min.stats[s],
662 data->stats_max.stats[s],
663 ImVec2(ImGui::GetContentRegionAvailWidth(), 30));
664 ImGui::Text("%s: %.0f [%" PRIu64 ", %" PRIu64 "]", overlay_param_names[s],
665 get_stat(data, ARRAY_SIZE(data->frames_stats) - 1),
666 data->stats_min.stats[s], data->stats_max.stats[s]);
667 }
668 }
669 data->window_size = ImVec2(data->window_size.x, ImGui::GetCursorPosY() + 10.0f);
670 ImGui::End();
671 ImGui::EndFrame();
672 ImGui::Render();
673 }
674
675 static uint32_t vk_memory_type(struct device_data *data,
676 VkMemoryPropertyFlags properties,
677 uint32_t type_bits)
678 {
679 VkPhysicalDeviceMemoryProperties prop;
680 data->instance->vtable.GetPhysicalDeviceMemoryProperties(data->physical_device, &prop);
681 for (uint32_t i = 0; i < prop.memoryTypeCount; i++)
682 if ((prop.memoryTypes[i].propertyFlags & properties) == properties && type_bits & (1<<i))
683 return i;
684 return 0xFFFFFFFF; // Unable to find memoryType
685 }
686
687 static void ensure_swapchain_fonts(struct swapchain_data *data,
688 VkCommandBuffer command_buffer)
689 {
690 if (data->font_uploaded)
691 return;
692
693 data->font_uploaded = true;
694
695 struct device_data *device_data = data->device;
696 ImGuiIO& io = ImGui::GetIO();
697 unsigned char* pixels;
698 int width, height;
699 io.Fonts->GetTexDataAsRGBA32(&pixels, &width, &height);
700 size_t upload_size = width * height * 4 * sizeof(char);
701
702 /* Upload buffer */
703 VkBufferCreateInfo buffer_info = {};
704 buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
705 buffer_info.size = upload_size;
706 buffer_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
707 buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
708 VK_CHECK(device_data->vtable.CreateBuffer(device_data->device, &buffer_info,
709 NULL, &data->upload_font_buffer));
710 VkMemoryRequirements upload_buffer_req;
711 device_data->vtable.GetBufferMemoryRequirements(device_data->device,
712 data->upload_font_buffer,
713 &upload_buffer_req);
714 VkMemoryAllocateInfo upload_alloc_info = {};
715 upload_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
716 upload_alloc_info.allocationSize = upload_buffer_req.size;
717 upload_alloc_info.memoryTypeIndex = vk_memory_type(device_data,
718 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
719 upload_buffer_req.memoryTypeBits);
720 VK_CHECK(device_data->vtable.AllocateMemory(device_data->device,
721 &upload_alloc_info,
722 NULL,
723 &data->upload_font_buffer_mem));
724 VK_CHECK(device_data->vtable.BindBufferMemory(device_data->device,
725 data->upload_font_buffer,
726 data->upload_font_buffer_mem, 0));
727
728 /* Upload to Buffer */
729 char* map = NULL;
730 VK_CHECK(device_data->vtable.MapMemory(device_data->device,
731 data->upload_font_buffer_mem,
732 0, upload_size, 0, (void**)(&map)));
733 memcpy(map, pixels, upload_size);
734 VkMappedMemoryRange range[1] = {};
735 range[0].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
736 range[0].memory = data->upload_font_buffer_mem;
737 range[0].size = upload_size;
738 VK_CHECK(device_data->vtable.FlushMappedMemoryRanges(device_data->device, 1, range));
739 device_data->vtable.UnmapMemory(device_data->device,
740 data->upload_font_buffer_mem);
741
742 /* Copy buffer to image */
743 VkImageMemoryBarrier copy_barrier[1] = {};
744 copy_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
745 copy_barrier[0].dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
746 copy_barrier[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
747 copy_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
748 copy_barrier[0].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
749 copy_barrier[0].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
750 copy_barrier[0].image = data->font_image;
751 copy_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
752 copy_barrier[0].subresourceRange.levelCount = 1;
753 copy_barrier[0].subresourceRange.layerCount = 1;
754 device_data->vtable.CmdPipelineBarrier(command_buffer,
755 VK_PIPELINE_STAGE_HOST_BIT,
756 VK_PIPELINE_STAGE_TRANSFER_BIT,
757 0, 0, NULL, 0, NULL,
758 1, copy_barrier);
759
760 VkBufferImageCopy region = {};
761 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
762 region.imageSubresource.layerCount = 1;
763 region.imageExtent.width = width;
764 region.imageExtent.height = height;
765 region.imageExtent.depth = 1;
766 device_data->vtable.CmdCopyBufferToImage(command_buffer,
767 data->upload_font_buffer,
768 data->font_image,
769 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
770 1, &region);
771
772 VkImageMemoryBarrier use_barrier[1] = {};
773 use_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
774 use_barrier[0].srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
775 use_barrier[0].dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
776 use_barrier[0].oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
777 use_barrier[0].newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
778 use_barrier[0].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
779 use_barrier[0].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
780 use_barrier[0].image = data->font_image;
781 use_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
782 use_barrier[0].subresourceRange.levelCount = 1;
783 use_barrier[0].subresourceRange.layerCount = 1;
784 device_data->vtable.CmdPipelineBarrier(command_buffer,
785 VK_PIPELINE_STAGE_TRANSFER_BIT,
786 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
787 0,
788 0, NULL,
789 0, NULL,
790 1, use_barrier);
791
792 /* Store our identifier */
793 io.Fonts->TexID = (ImTextureID)(intptr_t)data->font_image;
794 }
795
796 static void CreateOrResizeBuffer(struct device_data *data,
797 VkBuffer *buffer,
798 VkDeviceMemory *buffer_memory,
799 VkDeviceSize *buffer_size,
800 size_t new_size, VkBufferUsageFlagBits usage)
801 {
802 if (*buffer != VK_NULL_HANDLE)
803 data->vtable.DestroyBuffer(data->device, *buffer, NULL);
804 if (*buffer_memory)
805 data->vtable.FreeMemory(data->device, *buffer_memory, NULL);
806
807 VkBufferCreateInfo buffer_info = {};
808 buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
809 buffer_info.size = new_size;
810 buffer_info.usage = usage;
811 buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
812 VK_CHECK(data->vtable.CreateBuffer(data->device, &buffer_info, NULL, buffer));
813
814 VkMemoryRequirements req;
815 data->vtable.GetBufferMemoryRequirements(data->device, *buffer, &req);
816 VkMemoryAllocateInfo alloc_info = {};
817 alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
818 alloc_info.allocationSize = req.size;
819 alloc_info.memoryTypeIndex =
820 vk_memory_type(data, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, req.memoryTypeBits);
821 VK_CHECK(data->vtable.AllocateMemory(data->device, &alloc_info, NULL, buffer_memory));
822
823 VK_CHECK(data->vtable.BindBufferMemory(data->device, *buffer, *buffer_memory, 0));
824 *buffer_size = new_size;
825 }
826
827 static void render_swapchain_display(struct swapchain_data *data,
828 const VkSemaphore *wait_semaphores,
829 unsigned n_wait_semaphores,
830 unsigned image_index)
831 {
832 ImDrawData* draw_data = ImGui::GetDrawData();
833 if (draw_data->TotalVtxCount == 0)
834 return;
835
836 struct device_data *device_data = data->device;
837 uint32_t idx = data->n_frames % ARRAY_SIZE(data->frame_data);
838 VkCommandBuffer command_buffer = data->frame_data[idx].command_buffer;
839
840 device_data->vtable.ResetCommandBuffer(command_buffer, 0);
841
842 VkRenderPassBeginInfo render_pass_info = {};
843 render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
844 render_pass_info.renderPass = data->render_pass;
845 render_pass_info.framebuffer = data->framebuffers[image_index];
846 render_pass_info.renderArea.extent.width = data->width;
847 render_pass_info.renderArea.extent.height = data->height;
848
849 VkCommandBufferBeginInfo buffer_begin_info = {};
850 buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
851
852 device_data->vtable.BeginCommandBuffer(command_buffer, &buffer_begin_info);
853
854 ensure_swapchain_fonts(data, command_buffer);
855
856 /* Bounce the image to display back to color attachment layout for
857 * rendering on top of it.
858 */
859 VkImageMemoryBarrier imb;
860 imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
861 imb.pNext = nullptr;
862 imb.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
863 imb.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
864 imb.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
865 imb.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
866 imb.image = data->images[image_index];
867 imb.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
868 imb.subresourceRange.baseMipLevel = 0;
869 imb.subresourceRange.levelCount = 1;
870 imb.subresourceRange.baseArrayLayer = 0;
871 imb.subresourceRange.layerCount = 1;
872 imb.srcQueueFamilyIndex = device_data->graphic_queue->family_index;
873 imb.dstQueueFamilyIndex = device_data->graphic_queue->family_index;
874 device_data->vtable.CmdPipelineBarrier(command_buffer,
875 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
876 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
877 0, /* dependency flags */
878 0, nullptr, /* memory barriers */
879 0, nullptr, /* buffer memory barriers */
880 1, &imb); /* image memory barriers */
881
882 device_data->vtable.CmdBeginRenderPass(command_buffer, &render_pass_info,
883 VK_SUBPASS_CONTENTS_INLINE);
884
885 /* Create/Resize vertex & index buffers */
886 size_t vertex_size = draw_data->TotalVtxCount * sizeof(ImDrawVert);
887 size_t index_size = draw_data->TotalIdxCount * sizeof(ImDrawIdx);
888 if (data->frame_data[idx].vertex_buffer_size < vertex_size) {
889 CreateOrResizeBuffer(device_data,
890 &data->frame_data[idx].vertex_buffer,
891 &data->frame_data[idx].vertex_buffer_mem,
892 &data->frame_data[idx].vertex_buffer_size,
893 vertex_size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
894 }
895 if (data->frame_data[idx].index_buffer_size < index_size) {
896 CreateOrResizeBuffer(device_data,
897 &data->frame_data[idx].index_buffer,
898 &data->frame_data[idx].index_buffer_mem,
899 &data->frame_data[idx].index_buffer_size,
900 index_size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT);
901 }
902
903 /* Upload vertex & index data */
904 VkBuffer vertex_buffer = data->frame_data[idx].vertex_buffer;
905 VkDeviceMemory vertex_mem = data->frame_data[idx].vertex_buffer_mem;
906 VkBuffer index_buffer = data->frame_data[idx].index_buffer;
907 VkDeviceMemory index_mem = data->frame_data[idx].index_buffer_mem;
908 ImDrawVert* vtx_dst = NULL;
909 ImDrawIdx* idx_dst = NULL;
910 VK_CHECK(device_data->vtable.MapMemory(device_data->device, vertex_mem,
911 0, vertex_size, 0, (void**)(&vtx_dst)));
912 VK_CHECK(device_data->vtable.MapMemory(device_data->device, index_mem,
913 0, index_size, 0, (void**)(&idx_dst)));
914 for (int n = 0; n < draw_data->CmdListsCount; n++)
915 {
916 const ImDrawList* cmd_list = draw_data->CmdLists[n];
917 memcpy(vtx_dst, cmd_list->VtxBuffer.Data, cmd_list->VtxBuffer.Size * sizeof(ImDrawVert));
918 memcpy(idx_dst, cmd_list->IdxBuffer.Data, cmd_list->IdxBuffer.Size * sizeof(ImDrawIdx));
919 vtx_dst += cmd_list->VtxBuffer.Size;
920 idx_dst += cmd_list->IdxBuffer.Size;
921 }
922 VkMappedMemoryRange range[2] = {};
923 range[0].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
924 range[0].memory = vertex_mem;
925 range[0].size = VK_WHOLE_SIZE;
926 range[1].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
927 range[1].memory = index_mem;
928 range[1].size = VK_WHOLE_SIZE;
929 VK_CHECK(device_data->vtable.FlushMappedMemoryRanges(device_data->device, 2, range));
930 device_data->vtable.UnmapMemory(device_data->device, vertex_mem);
931 device_data->vtable.UnmapMemory(device_data->device, index_mem);
932
933 /* Bind pipeline and descriptor sets */
934 device_data->vtable.CmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, data->pipeline);
935 VkDescriptorSet desc_set[1] = { data->descriptor_set };
936 device_data->vtable.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS,
937 data->pipeline_layout, 0, 1, desc_set, 0, NULL);
938
939 /* Bind vertex & index buffers */
940 VkBuffer vertex_buffers[1] = { vertex_buffer };
941 VkDeviceSize vertex_offset[1] = { 0 };
942 device_data->vtable.CmdBindVertexBuffers(command_buffer, 0, 1, vertex_buffers, vertex_offset);
943 device_data->vtable.CmdBindIndexBuffer(command_buffer, index_buffer, 0, VK_INDEX_TYPE_UINT16);
944
945 /* Setup viewport */
946 VkViewport viewport;
947 viewport.x = 0;
948 viewport.y = 0;
949 viewport.width = draw_data->DisplaySize.x;
950 viewport.height = draw_data->DisplaySize.y;
951 viewport.minDepth = 0.0f;
952 viewport.maxDepth = 1.0f;
953 device_data->vtable.CmdSetViewport(command_buffer, 0, 1, &viewport);
954
955
956 /* Setup scale and translation through push constants :
957 *
958 * Our visible imgui space lies from draw_data->DisplayPos (top left) to
959 * draw_data->DisplayPos+data_data->DisplaySize (bottom right). DisplayMin
960 * is typically (0,0) for single viewport apps.
961 */
962 float scale[2];
963 scale[0] = 2.0f / draw_data->DisplaySize.x;
964 scale[1] = 2.0f / draw_data->DisplaySize.y;
965 float translate[2];
966 translate[0] = -1.0f - draw_data->DisplayPos.x * scale[0];
967 translate[1] = -1.0f - draw_data->DisplayPos.y * scale[1];
968 device_data->vtable.CmdPushConstants(command_buffer, data->pipeline_layout,
969 VK_SHADER_STAGE_VERTEX_BIT,
970 sizeof(float) * 0, sizeof(float) * 2, scale);
971 device_data->vtable.CmdPushConstants(command_buffer, data->pipeline_layout,
972 VK_SHADER_STAGE_VERTEX_BIT,
973 sizeof(float) * 2, sizeof(float) * 2, translate);
974
975 // Render the command lists:
976 int vtx_offset = 0;
977 int idx_offset = 0;
978 ImVec2 display_pos = draw_data->DisplayPos;
979 for (int n = 0; n < draw_data->CmdListsCount; n++)
980 {
981 const ImDrawList* cmd_list = draw_data->CmdLists[n];
982 for (int cmd_i = 0; cmd_i < cmd_list->CmdBuffer.Size; cmd_i++)
983 {
984 const ImDrawCmd* pcmd = &cmd_list->CmdBuffer[cmd_i];
985 // Apply scissor/clipping rectangle
986 // FIXME: We could clamp width/height based on clamped min/max values.
987 VkRect2D scissor;
988 scissor.offset.x = (int32_t)(pcmd->ClipRect.x - display_pos.x) > 0 ? (int32_t)(pcmd->ClipRect.x - display_pos.x) : 0;
989 scissor.offset.y = (int32_t)(pcmd->ClipRect.y - display_pos.y) > 0 ? (int32_t)(pcmd->ClipRect.y - display_pos.y) : 0;
990 scissor.extent.width = (uint32_t)(pcmd->ClipRect.z - pcmd->ClipRect.x);
991 scissor.extent.height = (uint32_t)(pcmd->ClipRect.w - pcmd->ClipRect.y + 1); // FIXME: Why +1 here?
992 device_data->vtable.CmdSetScissor(command_buffer, 0, 1, &scissor);
993
994 // Draw
995 device_data->vtable.CmdDrawIndexed(command_buffer, pcmd->ElemCount, 1, idx_offset, vtx_offset, 0);
996
997 idx_offset += pcmd->ElemCount;
998 }
999 vtx_offset += cmd_list->VtxBuffer.Size;
1000 }
1001
1002 device_data->vtable.CmdEndRenderPass(command_buffer);
1003 device_data->vtable.EndCommandBuffer(command_buffer);
1004
1005 if (data->submission_semaphore) {
1006 device_data->vtable.DestroySemaphore(device_data->device,
1007 data->submission_semaphore,
1008 NULL);
1009 }
1010 /* Submission semaphore */
1011 VkSemaphoreCreateInfo semaphore_info = {};
1012 semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
1013 VK_CHECK(device_data->vtable.CreateSemaphore(device_data->device, &semaphore_info,
1014 NULL, &data->submission_semaphore));
1015
1016 VkSubmitInfo submit_info = {};
1017 VkPipelineStageFlags stage_wait = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1018 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1019 submit_info.commandBufferCount = 1;
1020 submit_info.pCommandBuffers = &command_buffer;
1021 submit_info.pWaitDstStageMask = &stage_wait;
1022 submit_info.waitSemaphoreCount = n_wait_semaphores;
1023 submit_info.pWaitSemaphores = wait_semaphores;
1024 submit_info.signalSemaphoreCount = 1;
1025 submit_info.pSignalSemaphores = &data->submission_semaphore;
1026
1027 device_data->vtable.QueueSubmit(device_data->graphic_queue->queue, 1, &submit_info, VK_NULL_HANDLE);
1028 }
1029
1030 static const uint32_t overlay_vert_spv[] = {
1031 #include "overlay.vert.spv.h"
1032 };
1033 static const uint32_t overlay_frag_spv[] = {
1034 #include "overlay.frag.spv.h"
1035 };
1036
1037 static void setup_swapchain_data_pipeline(struct swapchain_data *data)
1038 {
1039 struct device_data *device_data = data->device;
1040 VkShaderModule vert_module, frag_module;
1041
1042 /* Create shader modules */
1043 VkShaderModuleCreateInfo vert_info = {};
1044 vert_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1045 vert_info.codeSize = sizeof(overlay_vert_spv);
1046 vert_info.pCode = overlay_vert_spv;
1047 VK_CHECK(device_data->vtable.CreateShaderModule(device_data->device,
1048 &vert_info, NULL, &vert_module));
1049 VkShaderModuleCreateInfo frag_info = {};
1050 frag_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1051 frag_info.codeSize = sizeof(overlay_frag_spv);
1052 frag_info.pCode = (uint32_t*)overlay_frag_spv;
1053 VK_CHECK(device_data->vtable.CreateShaderModule(device_data->device,
1054 &frag_info, NULL, &frag_module));
1055
1056 /* Font sampler */
1057 VkSamplerCreateInfo sampler_info = {};
1058 sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
1059 sampler_info.magFilter = VK_FILTER_LINEAR;
1060 sampler_info.minFilter = VK_FILTER_LINEAR;
1061 sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
1062 sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1063 sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1064 sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1065 sampler_info.minLod = -1000;
1066 sampler_info.maxLod = 1000;
1067 sampler_info.maxAnisotropy = 1.0f;
1068 VK_CHECK(device_data->vtable.CreateSampler(device_data->device, &sampler_info,
1069 NULL, &data->font_sampler));
1070
1071 /* Descriptor pool */
1072 VkDescriptorPoolSize sampler_pool_size = {};
1073 sampler_pool_size.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1074 sampler_pool_size.descriptorCount = 1;
1075 VkDescriptorPoolCreateInfo desc_pool_info = {};
1076 desc_pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1077 desc_pool_info.maxSets = 1;
1078 desc_pool_info.poolSizeCount = 1;
1079 desc_pool_info.pPoolSizes = &sampler_pool_size;
1080 VK_CHECK(device_data->vtable.CreateDescriptorPool(device_data->device,
1081 &desc_pool_info,
1082 NULL, &data->descriptor_pool));
1083
1084 /* Descriptor layout */
1085 VkSampler sampler[1] = { data->font_sampler };
1086 VkDescriptorSetLayoutBinding binding[1] = {};
1087 binding[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1088 binding[0].descriptorCount = 1;
1089 binding[0].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
1090 binding[0].pImmutableSamplers = sampler;
1091 VkDescriptorSetLayoutCreateInfo set_layout_info = {};
1092 set_layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1093 set_layout_info.bindingCount = 1;
1094 set_layout_info.pBindings = binding;
1095 VK_CHECK(device_data->vtable.CreateDescriptorSetLayout(device_data->device,
1096 &set_layout_info,
1097 NULL, &data->descriptor_layout));
1098
1099 /* Descriptor set */
1100 VkDescriptorSetAllocateInfo alloc_info = {};
1101 alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
1102 alloc_info.descriptorPool = data->descriptor_pool;
1103 alloc_info.descriptorSetCount = 1;
1104 alloc_info.pSetLayouts = &data->descriptor_layout;
1105 VK_CHECK(device_data->vtable.AllocateDescriptorSets(device_data->device,
1106 &alloc_info,
1107 &data->descriptor_set));
1108
1109 /* Constants: we are using 'vec2 offset' and 'vec2 scale' instead of a full
1110 * 3d projection matrix
1111 */
1112 VkPushConstantRange push_constants[1] = {};
1113 push_constants[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
1114 push_constants[0].offset = sizeof(float) * 0;
1115 push_constants[0].size = sizeof(float) * 4;
1116 VkPipelineLayoutCreateInfo layout_info = {};
1117 layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1118 layout_info.setLayoutCount = 1;
1119 layout_info.pSetLayouts = &data->descriptor_layout;
1120 layout_info.pushConstantRangeCount = 1;
1121 layout_info.pPushConstantRanges = push_constants;
1122 VK_CHECK(device_data->vtable.CreatePipelineLayout(device_data->device,
1123 &layout_info,
1124 NULL, &data->pipeline_layout));
1125
1126 VkPipelineShaderStageCreateInfo stage[2] = {};
1127 stage[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1128 stage[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
1129 stage[0].module = vert_module;
1130 stage[0].pName = "main";
1131 stage[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1132 stage[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1133 stage[1].module = frag_module;
1134 stage[1].pName = "main";
1135
1136 VkVertexInputBindingDescription binding_desc[1] = {};
1137 binding_desc[0].stride = sizeof(ImDrawVert);
1138 binding_desc[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1139
1140 VkVertexInputAttributeDescription attribute_desc[3] = {};
1141 attribute_desc[0].location = 0;
1142 attribute_desc[0].binding = binding_desc[0].binding;
1143 attribute_desc[0].format = VK_FORMAT_R32G32_SFLOAT;
1144 attribute_desc[0].offset = IM_OFFSETOF(ImDrawVert, pos);
1145 attribute_desc[1].location = 1;
1146 attribute_desc[1].binding = binding_desc[0].binding;
1147 attribute_desc[1].format = VK_FORMAT_R32G32_SFLOAT;
1148 attribute_desc[1].offset = IM_OFFSETOF(ImDrawVert, uv);
1149 attribute_desc[2].location = 2;
1150 attribute_desc[2].binding = binding_desc[0].binding;
1151 attribute_desc[2].format = VK_FORMAT_R8G8B8A8_UNORM;
1152 attribute_desc[2].offset = IM_OFFSETOF(ImDrawVert, col);
1153
1154 VkPipelineVertexInputStateCreateInfo vertex_info = {};
1155 vertex_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1156 vertex_info.vertexBindingDescriptionCount = 1;
1157 vertex_info.pVertexBindingDescriptions = binding_desc;
1158 vertex_info.vertexAttributeDescriptionCount = 3;
1159 vertex_info.pVertexAttributeDescriptions = attribute_desc;
1160
1161 VkPipelineInputAssemblyStateCreateInfo ia_info = {};
1162 ia_info.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1163 ia_info.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1164
1165 VkPipelineViewportStateCreateInfo viewport_info = {};
1166 viewport_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1167 viewport_info.viewportCount = 1;
1168 viewport_info.scissorCount = 1;
1169
1170 VkPipelineRasterizationStateCreateInfo raster_info = {};
1171 raster_info.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1172 raster_info.polygonMode = VK_POLYGON_MODE_FILL;
1173 raster_info.cullMode = VK_CULL_MODE_NONE;
1174 raster_info.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
1175 raster_info.lineWidth = 1.0f;
1176
1177 VkPipelineMultisampleStateCreateInfo ms_info = {};
1178 ms_info.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1179 ms_info.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1180
1181 VkPipelineColorBlendAttachmentState color_attachment[1] = {};
1182 color_attachment[0].blendEnable = VK_TRUE;
1183 color_attachment[0].srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
1184 color_attachment[0].dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
1185 color_attachment[0].colorBlendOp = VK_BLEND_OP_ADD;
1186 color_attachment[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
1187 color_attachment[0].dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1188 color_attachment[0].alphaBlendOp = VK_BLEND_OP_ADD;
1189 color_attachment[0].colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
1190 VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
1191
1192 VkPipelineDepthStencilStateCreateInfo depth_info = {};
1193 depth_info.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
1194
1195 VkPipelineColorBlendStateCreateInfo blend_info = {};
1196 blend_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1197 blend_info.attachmentCount = 1;
1198 blend_info.pAttachments = color_attachment;
1199
1200 VkDynamicState dynamic_states[2] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR };
1201 VkPipelineDynamicStateCreateInfo dynamic_state = {};
1202 dynamic_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1203 dynamic_state.dynamicStateCount = (uint32_t)IM_ARRAYSIZE(dynamic_states);
1204 dynamic_state.pDynamicStates = dynamic_states;
1205
1206 VkGraphicsPipelineCreateInfo info = {};
1207 info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1208 info.flags = 0;
1209 info.stageCount = 2;
1210 info.pStages = stage;
1211 info.pVertexInputState = &vertex_info;
1212 info.pInputAssemblyState = &ia_info;
1213 info.pViewportState = &viewport_info;
1214 info.pRasterizationState = &raster_info;
1215 info.pMultisampleState = &ms_info;
1216 info.pDepthStencilState = &depth_info;
1217 info.pColorBlendState = &blend_info;
1218 info.pDynamicState = &dynamic_state;
1219 info.layout = data->pipeline_layout;
1220 info.renderPass = data->render_pass;
1221 VK_CHECK(
1222 device_data->vtable.CreateGraphicsPipelines(device_data->device, VK_NULL_HANDLE,
1223 1, &info,
1224 NULL, &data->pipeline));
1225
1226 device_data->vtable.DestroyShaderModule(device_data->device, vert_module, NULL);
1227 device_data->vtable.DestroyShaderModule(device_data->device, frag_module, NULL);
1228
1229 ImGuiIO& io = ImGui::GetIO();
1230 unsigned char* pixels;
1231 int width, height;
1232 io.Fonts->GetTexDataAsRGBA32(&pixels, &width, &height);
1233
1234 /* Font image */
1235 VkImageCreateInfo image_info = {};
1236 image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1237 image_info.imageType = VK_IMAGE_TYPE_2D;
1238 image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
1239 image_info.extent.width = width;
1240 image_info.extent.height = height;
1241 image_info.extent.depth = 1;
1242 image_info.mipLevels = 1;
1243 image_info.arrayLayers = 1;
1244 image_info.samples = VK_SAMPLE_COUNT_1_BIT;
1245 image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
1246 image_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1247 image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1248 image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1249 VK_CHECK(device_data->vtable.CreateImage(device_data->device, &image_info,
1250 NULL, &data->font_image));
1251 VkMemoryRequirements font_image_req;
1252 device_data->vtable.GetImageMemoryRequirements(device_data->device,
1253 data->font_image, &font_image_req);
1254 VkMemoryAllocateInfo image_alloc_info = {};
1255 image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1256 image_alloc_info.allocationSize = font_image_req.size;
1257 image_alloc_info.memoryTypeIndex = vk_memory_type(device_data,
1258 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1259 font_image_req.memoryTypeBits);
1260 VK_CHECK(device_data->vtable.AllocateMemory(device_data->device, &image_alloc_info,
1261 NULL, &data->font_mem));
1262 VK_CHECK(device_data->vtable.BindImageMemory(device_data->device,
1263 data->font_image,
1264 data->font_mem, 0));
1265
1266 /* Font image view */
1267 VkImageViewCreateInfo view_info = {};
1268 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1269 view_info.image = data->font_image;
1270 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
1271 view_info.format = VK_FORMAT_R8G8B8A8_UNORM;
1272 view_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
1273 view_info.subresourceRange.levelCount = 1;
1274 view_info.subresourceRange.layerCount = 1;
1275 VK_CHECK(device_data->vtable.CreateImageView(device_data->device, &view_info,
1276 NULL, &data->font_image_view));
1277
1278 /* Descriptor set */
1279 VkDescriptorImageInfo desc_image[1] = {};
1280 desc_image[0].sampler = data->font_sampler;
1281 desc_image[0].imageView = data->font_image_view;
1282 desc_image[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1283 VkWriteDescriptorSet write_desc[1] = {};
1284 write_desc[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1285 write_desc[0].dstSet = data->descriptor_set;
1286 write_desc[0].descriptorCount = 1;
1287 write_desc[0].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1288 write_desc[0].pImageInfo = desc_image;
1289 device_data->vtable.UpdateDescriptorSets(device_data->device, 1, write_desc, 0, NULL);
1290 }
1291
1292 static void setup_swapchain_data(struct swapchain_data *data,
1293 const VkSwapchainCreateInfoKHR *pCreateInfo)
1294 {
1295 data->width = pCreateInfo->imageExtent.width;
1296 data->height = pCreateInfo->imageExtent.height;
1297 data->format = pCreateInfo->imageFormat;
1298
1299 data->imgui_context = ImGui::CreateContext();
1300 ImGui::SetCurrentContext(data->imgui_context);
1301
1302 ImGui::GetIO().IniFilename = NULL;
1303 ImGui::GetIO().DisplaySize = ImVec2((float)data->width, (float)data->height);
1304
1305 struct device_data *device_data = data->device;
1306
1307 /* Render pass */
1308 VkAttachmentDescription attachment_desc = {};
1309 attachment_desc.format = pCreateInfo->imageFormat;
1310 attachment_desc.samples = VK_SAMPLE_COUNT_1_BIT;
1311 attachment_desc.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
1312 attachment_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1313 attachment_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1314 attachment_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1315 attachment_desc.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1316 attachment_desc.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
1317 VkAttachmentReference color_attachment = {};
1318 color_attachment.attachment = 0;
1319 color_attachment.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1320 VkSubpassDescription subpass = {};
1321 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1322 subpass.colorAttachmentCount = 1;
1323 subpass.pColorAttachments = &color_attachment;
1324 VkSubpassDependency dependency = {};
1325 dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
1326 dependency.dstSubpass = 0;
1327 dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1328 dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1329 dependency.srcAccessMask = 0;
1330 dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
1331 VkRenderPassCreateInfo render_pass_info = {};
1332 render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1333 render_pass_info.attachmentCount = 1;
1334 render_pass_info.pAttachments = &attachment_desc;
1335 render_pass_info.subpassCount = 1;
1336 render_pass_info.pSubpasses = &subpass;
1337 render_pass_info.dependencyCount = 1;
1338 render_pass_info.pDependencies = &dependency;
1339 VK_CHECK(device_data->vtable.CreateRenderPass(device_data->device,
1340 &render_pass_info,
1341 NULL, &data->render_pass));
1342
1343 setup_swapchain_data_pipeline(data);
1344
1345 VK_CHECK(device_data->vtable.GetSwapchainImagesKHR(device_data->device,
1346 data->swapchain,
1347 &data->n_images,
1348 NULL));
1349
1350 data->images = ralloc_array(data, VkImage, data->n_images);
1351 data->image_views = ralloc_array(data, VkImageView, data->n_images);
1352 data->framebuffers = ralloc_array(data, VkFramebuffer, data->n_images);
1353
1354 VK_CHECK(device_data->vtable.GetSwapchainImagesKHR(device_data->device,
1355 data->swapchain,
1356 &data->n_images,
1357 data->images));
1358
1359 /* Image views */
1360 VkImageViewCreateInfo view_info = {};
1361 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1362 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
1363 view_info.format = pCreateInfo->imageFormat;
1364 view_info.components.r = VK_COMPONENT_SWIZZLE_R;
1365 view_info.components.g = VK_COMPONENT_SWIZZLE_G;
1366 view_info.components.b = VK_COMPONENT_SWIZZLE_B;
1367 view_info.components.a = VK_COMPONENT_SWIZZLE_A;
1368 view_info.subresourceRange = { VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 };
1369 for (uint32_t i = 0; i < data->n_images; i++) {
1370 view_info.image = data->images[i];
1371 VK_CHECK(device_data->vtable.CreateImageView(device_data->device,
1372 &view_info, NULL,
1373 &data->image_views[i]));
1374 }
1375
1376 /* Framebuffers */
1377 VkImageView attachment[1];
1378 VkFramebufferCreateInfo fb_info = {};
1379 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
1380 fb_info.renderPass = data->render_pass;
1381 fb_info.attachmentCount = 1;
1382 fb_info.pAttachments = attachment;
1383 fb_info.width = data->width;
1384 fb_info.height = data->height;
1385 fb_info.layers = 1;
1386 for (uint32_t i = 0; i < data->n_images; i++) {
1387 attachment[0] = data->image_views[i];
1388 VK_CHECK(device_data->vtable.CreateFramebuffer(device_data->device, &fb_info,
1389 NULL, &data->framebuffers[i]));
1390 }
1391
1392 /* Command buffer */
1393 VkCommandPoolCreateInfo cmd_buffer_pool_info = {};
1394 cmd_buffer_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1395 cmd_buffer_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1396 cmd_buffer_pool_info.queueFamilyIndex = device_data->graphic_queue->family_index;
1397 VK_CHECK(device_data->vtable.CreateCommandPool(device_data->device,
1398 &cmd_buffer_pool_info,
1399 NULL, &data->command_pool));
1400
1401 VkCommandBuffer cmd_bufs[ARRAY_SIZE(data->frame_data)];
1402
1403 VkCommandBufferAllocateInfo cmd_buffer_info = {};
1404 cmd_buffer_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1405 cmd_buffer_info.commandPool = data->command_pool;
1406 cmd_buffer_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1407 cmd_buffer_info.commandBufferCount = 2;
1408 VK_CHECK(device_data->vtable.AllocateCommandBuffers(device_data->device,
1409 &cmd_buffer_info,
1410 cmd_bufs));
1411 for (uint32_t i = 0; i < ARRAY_SIZE(data->frame_data); i++) {
1412 VK_CHECK(device_data->set_device_loader_data(device_data->device,
1413 cmd_bufs[i]));
1414
1415 data->frame_data[i].command_buffer = cmd_bufs[i];
1416 }
1417 }
1418
1419 static void shutdown_swapchain_data(struct swapchain_data *data)
1420 {
1421 struct device_data *device_data = data->device;
1422
1423 for (uint32_t i = 0; i < data->n_images; i++) {
1424 device_data->vtable.DestroyImageView(device_data->device, data->image_views[i], NULL);
1425 device_data->vtable.DestroyFramebuffer(device_data->device, data->framebuffers[i], NULL);
1426 }
1427
1428 device_data->vtable.DestroyRenderPass(device_data->device, data->render_pass, NULL);
1429
1430 for (uint32_t i = 0; i < ARRAY_SIZE(data->frame_data); i++) {
1431 device_data->vtable.FreeCommandBuffers(device_data->device,
1432 data->command_pool,
1433 1, &data->frame_data[i].command_buffer);
1434 if (data->frame_data[i].vertex_buffer)
1435 device_data->vtable.DestroyBuffer(device_data->device, data->frame_data[i].vertex_buffer, NULL);
1436 if (data->frame_data[i].index_buffer)
1437 device_data->vtable.DestroyBuffer(device_data->device, data->frame_data[i].index_buffer, NULL);
1438 if (data->frame_data[i].vertex_buffer_mem)
1439 device_data->vtable.FreeMemory(device_data->device, data->frame_data[i].vertex_buffer_mem, NULL);
1440 if (data->frame_data[i].index_buffer_mem)
1441 device_data->vtable.FreeMemory(device_data->device, data->frame_data[i].index_buffer_mem, NULL);
1442 }
1443 device_data->vtable.DestroyCommandPool(device_data->device, data->command_pool, NULL);
1444
1445 if (data->submission_semaphore)
1446 device_data->vtable.DestroySemaphore(device_data->device, data->submission_semaphore, NULL);
1447
1448 device_data->vtable.DestroyPipeline(device_data->device, data->pipeline, NULL);
1449 device_data->vtable.DestroyPipelineLayout(device_data->device, data->pipeline_layout, NULL);
1450
1451 device_data->vtable.DestroyDescriptorPool(device_data->device,
1452 data->descriptor_pool, NULL);
1453 device_data->vtable.DestroyDescriptorSetLayout(device_data->device,
1454 data->descriptor_layout, NULL);
1455
1456 device_data->vtable.DestroySampler(device_data->device, data->font_sampler, NULL);
1457 device_data->vtable.DestroyImageView(device_data->device, data->font_image_view, NULL);
1458 device_data->vtable.DestroyImage(device_data->device, data->font_image, NULL);
1459 device_data->vtable.FreeMemory(device_data->device, data->font_mem, NULL);
1460
1461 device_data->vtable.DestroyBuffer(device_data->device, data->upload_font_buffer, NULL);
1462 device_data->vtable.FreeMemory(device_data->device, data->upload_font_buffer_mem, NULL);
1463
1464 ImGui::DestroyContext(data->imgui_context);
1465 }
1466
1467 static void before_present(struct swapchain_data *swapchain_data,
1468 const VkSemaphore *wait_semaphores,
1469 unsigned n_wait_semaphores,
1470 unsigned imageIndex)
1471 {
1472 struct instance_data *instance_data = swapchain_data->device->instance;
1473
1474 snapshot_swapchain_frame(swapchain_data);
1475
1476 if (!instance_data->params.no_display && swapchain_data->n_frames > 0) {
1477 compute_swapchain_display(swapchain_data);
1478 render_swapchain_display(swapchain_data, wait_semaphores, n_wait_semaphores, imageIndex);
1479 }
1480 }
1481
1482 VKAPI_ATTR VkResult VKAPI_CALL overlay_CreateSwapchainKHR(
1483 VkDevice device,
1484 const VkSwapchainCreateInfoKHR* pCreateInfo,
1485 const VkAllocationCallbacks* pAllocator,
1486 VkSwapchainKHR* pSwapchain)
1487 {
1488 struct device_data *device_data = FIND_DEVICE_DATA(device);
1489 VkResult result = device_data->vtable.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
1490 if (result != VK_SUCCESS) return result;
1491
1492 struct swapchain_data *swapchain_data = new_swapchain_data(*pSwapchain, device_data);
1493 setup_swapchain_data(swapchain_data, pCreateInfo);
1494 return result;
1495 }
1496
1497 VKAPI_ATTR void VKAPI_CALL overlay_DestroySwapchainKHR(
1498 VkDevice device,
1499 VkSwapchainKHR swapchain,
1500 const VkAllocationCallbacks* pAllocator)
1501 {
1502 struct swapchain_data *swapchain_data = FIND_SWAPCHAIN_DATA(swapchain);
1503
1504 shutdown_swapchain_data(swapchain_data);
1505 swapchain_data->device->vtable.DestroySwapchainKHR(device, swapchain, pAllocator);
1506 destroy_swapchain_data(swapchain_data);
1507 }
1508
1509 VKAPI_ATTR VkResult VKAPI_CALL overlay_QueuePresentKHR(
1510 VkQueue queue,
1511 const VkPresentInfoKHR* pPresentInfo)
1512 {
1513 struct queue_data *queue_data = FIND_QUEUE_DATA(queue);
1514 struct device_data *device_data = queue_data->device;
1515 struct instance_data *instance_data = device_data->instance;
1516 uint32_t query_results[OVERLAY_QUERY_COUNT];
1517
1518 if (list_length(&queue_data->running_command_buffer) > 0) {
1519 /* Before getting the query results, make sure the operations have
1520 * completed.
1521 */
1522 VkResult err = device_data->vtable.ResetFences(device_data->device,
1523 1, &queue_data->queries_fence);
1524 check_vk_result(err);
1525 err = device_data->vtable.QueueSubmit(queue, 0, NULL, queue_data->queries_fence);
1526 check_vk_result(err);
1527 err = device_data->vtable.WaitForFences(device_data->device,
1528 1, &queue_data->queries_fence,
1529 VK_FALSE, UINT64_MAX);
1530 check_vk_result(err);
1531
1532 /* Now get the results. */
1533 list_for_each_entry_safe(struct command_buffer_data, cmd_buffer_data,
1534 &queue_data->running_command_buffer, link) {
1535 list_delinit(&cmd_buffer_data->link);
1536
1537 if (cmd_buffer_data->pipeline_query_pool) {
1538 memset(query_results, 0, sizeof(query_results));
1539 err =
1540 device_data->vtable.GetQueryPoolResults(device_data->device,
1541 cmd_buffer_data->pipeline_query_pool,
1542 cmd_buffer_data->query_index, 1,
1543 sizeof(uint32_t) * OVERLAY_QUERY_COUNT,
1544 query_results, 0, VK_QUERY_RESULT_WAIT_BIT);
1545 check_vk_result(err);
1546
1547 for (uint32_t i = OVERLAY_PARAM_ENABLED_vertices;
1548 i <= OVERLAY_PARAM_ENABLED_compute_invocations; i++) {
1549 device_data->frame_stats.stats[i] += query_results[i - OVERLAY_PARAM_ENABLED_vertices];
1550 }
1551 }
1552 if (cmd_buffer_data->timestamp_query_pool) {
1553 uint64_t gpu_timestamps[2] = { 0 };
1554 err =
1555 device_data->vtable.GetQueryPoolResults(device_data->device,
1556 cmd_buffer_data->timestamp_query_pool,
1557 cmd_buffer_data->query_index * 2, 2,
1558 2 * sizeof(uint64_t), gpu_timestamps, sizeof(uint64_t),
1559 VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT);
1560 check_vk_result(err);
1561
1562 gpu_timestamps[0] &= queue_data->timestamp_mask;
1563 gpu_timestamps[1] &= queue_data->timestamp_mask;
1564 device_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_gpu_timing] +=
1565 (gpu_timestamps[1] - gpu_timestamps[0]) *
1566 device_data->properties.limits.timestampPeriod;
1567 }
1568 }
1569 }
1570
1571 /* Otherwise we need to add our overlay drawing semaphore to the list of
1572 * semaphores to wait on. If we don't do that the presented picture might
1573 * be have incomplete overlay drawings.
1574 */
1575 VkResult result = VK_SUCCESS;
1576 if (instance_data->params.no_display) {
1577 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) {
1578 VkSwapchainKHR swapchain = pPresentInfo->pSwapchains[i];
1579 struct swapchain_data *swapchain_data = FIND_SWAPCHAIN_DATA(swapchain);
1580
1581 before_present(swapchain_data,
1582 pPresentInfo->pWaitSemaphores,
1583 pPresentInfo->waitSemaphoreCount,
1584 pPresentInfo->pImageIndices[i]);
1585 }
1586 result = queue_data->device->vtable.QueuePresentKHR(queue, pPresentInfo);
1587 } else {
1588 for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) {
1589 VkSwapchainKHR swapchain = pPresentInfo->pSwapchains[i];
1590 struct swapchain_data *swapchain_data = FIND_SWAPCHAIN_DATA(swapchain);
1591 VkPresentInfoKHR present_info = *pPresentInfo;
1592 present_info.swapchainCount = 1;
1593 present_info.pSwapchains = &swapchain;
1594
1595 before_present(swapchain_data,
1596 pPresentInfo->pWaitSemaphores,
1597 pPresentInfo->waitSemaphoreCount,
1598 pPresentInfo->pImageIndices[i]);
1599 /* Because the submission of the overlay draw waits on the semaphores
1600 * handed for present, we don't need to have this present operation
1601 * wait on them as well, we can just wait on the overlay submission
1602 * semaphore.
1603 */
1604 present_info.pWaitSemaphores = &swapchain_data->submission_semaphore;
1605 present_info.waitSemaphoreCount = 1;
1606
1607 VkResult chain_result = queue_data->device->vtable.QueuePresentKHR(queue, &present_info);
1608 if (pPresentInfo->pResults)
1609 pPresentInfo->pResults[i] = chain_result;
1610 if (chain_result != VK_SUCCESS && result == VK_SUCCESS)
1611 result = chain_result;
1612 }
1613 }
1614 return result;
1615 }
1616
1617 VKAPI_ATTR VkResult VKAPI_CALL overlay_AcquireNextImageKHR(
1618 VkDevice device,
1619 VkSwapchainKHR swapchain,
1620 uint64_t timeout,
1621 VkSemaphore semaphore,
1622 VkFence fence,
1623 uint32_t* pImageIndex)
1624 {
1625 struct swapchain_data *swapchain_data = FIND_SWAPCHAIN_DATA(swapchain);
1626 struct device_data *device_data = swapchain_data->device;
1627
1628 uint64_t ts0 = os_time_get();
1629 VkResult result = device_data->vtable.AcquireNextImageKHR(device, swapchain, timeout,
1630 semaphore, fence, pImageIndex);
1631 uint64_t ts1 = os_time_get();
1632
1633 swapchain_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_acquire_timing] += ts1 - ts0;
1634 swapchain_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_acquire]++;
1635
1636 return result;
1637 }
1638
1639 VKAPI_ATTR VkResult VKAPI_CALL overlay_AcquireNextImage2KHR(
1640 VkDevice device,
1641 const VkAcquireNextImageInfoKHR* pAcquireInfo,
1642 uint32_t* pImageIndex)
1643 {
1644 struct swapchain_data *swapchain_data = FIND_SWAPCHAIN_DATA(pAcquireInfo->swapchain);
1645 struct device_data *device_data = swapchain_data->device;
1646
1647 uint64_t ts0 = os_time_get();
1648 VkResult result = device_data->vtable.AcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
1649 uint64_t ts1 = os_time_get();
1650
1651 swapchain_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_acquire_timing] += ts1 - ts0;
1652 swapchain_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_acquire]++;
1653
1654 return result;
1655 }
1656
1657 VKAPI_ATTR void VKAPI_CALL overlay_CmdDraw(
1658 VkCommandBuffer commandBuffer,
1659 uint32_t vertexCount,
1660 uint32_t instanceCount,
1661 uint32_t firstVertex,
1662 uint32_t firstInstance)
1663 {
1664 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1665 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw]++;
1666 struct device_data *device_data = cmd_buffer_data->device;
1667 device_data->vtable.CmdDraw(commandBuffer, vertexCount, instanceCount,
1668 firstVertex, firstInstance);
1669 }
1670
1671 VKAPI_ATTR void VKAPI_CALL overlay_CmdDrawIndexed(
1672 VkCommandBuffer commandBuffer,
1673 uint32_t indexCount,
1674 uint32_t instanceCount,
1675 uint32_t firstIndex,
1676 int32_t vertexOffset,
1677 uint32_t firstInstance)
1678 {
1679 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1680 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw_indexed]++;
1681 struct device_data *device_data = cmd_buffer_data->device;
1682 device_data->vtable.CmdDrawIndexed(commandBuffer, indexCount, instanceCount,
1683 firstIndex, vertexOffset, firstInstance);
1684 }
1685
1686 VKAPI_ATTR void VKAPI_CALL overlay_CmdDrawIndirect(
1687 VkCommandBuffer commandBuffer,
1688 VkBuffer buffer,
1689 VkDeviceSize offset,
1690 uint32_t drawCount,
1691 uint32_t stride)
1692 {
1693 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1694 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw_indirect]++;
1695 struct device_data *device_data = cmd_buffer_data->device;
1696 device_data->vtable.CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
1697 }
1698
1699 VKAPI_ATTR void VKAPI_CALL overlay_CmdDrawIndexedIndirect(
1700 VkCommandBuffer commandBuffer,
1701 VkBuffer buffer,
1702 VkDeviceSize offset,
1703 uint32_t drawCount,
1704 uint32_t stride)
1705 {
1706 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1707 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw_indexed_indirect]++;
1708 struct device_data *device_data = cmd_buffer_data->device;
1709 device_data->vtable.CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
1710 }
1711
1712 VKAPI_ATTR void VKAPI_CALL overlay_CmdDrawIndirectCountKHR(
1713 VkCommandBuffer commandBuffer,
1714 VkBuffer buffer,
1715 VkDeviceSize offset,
1716 VkBuffer countBuffer,
1717 VkDeviceSize countBufferOffset,
1718 uint32_t maxDrawCount,
1719 uint32_t stride)
1720 {
1721 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1722 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw_indirect_count]++;
1723 struct device_data *device_data = cmd_buffer_data->device;
1724 device_data->vtable.CmdDrawIndirectCountKHR(commandBuffer, buffer, offset,
1725 countBuffer, countBufferOffset,
1726 maxDrawCount, stride);
1727 }
1728
1729 VKAPI_ATTR void VKAPI_CALL overlay_CmdDrawIndexedIndirectCountKHR(
1730 VkCommandBuffer commandBuffer,
1731 VkBuffer buffer,
1732 VkDeviceSize offset,
1733 VkBuffer countBuffer,
1734 VkDeviceSize countBufferOffset,
1735 uint32_t maxDrawCount,
1736 uint32_t stride)
1737 {
1738 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1739 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_draw_indexed_indirect_count]++;
1740 struct device_data *device_data = cmd_buffer_data->device;
1741 device_data->vtable.CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset,
1742 countBuffer, countBufferOffset,
1743 maxDrawCount, stride);
1744 }
1745
1746 VKAPI_ATTR void VKAPI_CALL overlay_CmdDispatch(
1747 VkCommandBuffer commandBuffer,
1748 uint32_t groupCountX,
1749 uint32_t groupCountY,
1750 uint32_t groupCountZ)
1751 {
1752 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1753 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_dispatch]++;
1754 struct device_data *device_data = cmd_buffer_data->device;
1755 device_data->vtable.CmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
1756 }
1757
1758 VKAPI_ATTR void VKAPI_CALL overlay_CmdDispatchIndirect(
1759 VkCommandBuffer commandBuffer,
1760 VkBuffer buffer,
1761 VkDeviceSize offset)
1762 {
1763 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1764 cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_dispatch_indirect]++;
1765 struct device_data *device_data = cmd_buffer_data->device;
1766 device_data->vtable.CmdDispatchIndirect(commandBuffer, buffer, offset);
1767 }
1768
1769 VKAPI_ATTR void VKAPI_CALL overlay_CmdBindPipeline(
1770 VkCommandBuffer commandBuffer,
1771 VkPipelineBindPoint pipelineBindPoint,
1772 VkPipeline pipeline)
1773 {
1774 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1775 switch (pipelineBindPoint) {
1776 case VK_PIPELINE_BIND_POINT_GRAPHICS: cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_pipeline_graphics]++; break;
1777 case VK_PIPELINE_BIND_POINT_COMPUTE: cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_pipeline_compute]++; break;
1778 case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV: cmd_buffer_data->stats.stats[OVERLAY_PARAM_ENABLED_pipeline_raytracing]++; break;
1779 default: break;
1780 }
1781 struct device_data *device_data = cmd_buffer_data->device;
1782 device_data->vtable.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1783 }
1784
1785 VKAPI_ATTR VkResult VKAPI_CALL overlay_BeginCommandBuffer(
1786 VkCommandBuffer commandBuffer,
1787 const VkCommandBufferBeginInfo* pBeginInfo)
1788 {
1789 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1790 struct device_data *device_data = cmd_buffer_data->device;
1791
1792 /* We don't record any query in secondary command buffers, just make sure
1793 * we have the right inheritance.
1794 */
1795 if (cmd_buffer_data->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
1796 VkCommandBufferBeginInfo *begin_info = (VkCommandBufferBeginInfo *)
1797 clone_chain((const struct VkBaseInStructure *)pBeginInfo);
1798 VkCommandBufferInheritanceInfo *parent_inhe_info = (VkCommandBufferInheritanceInfo *)
1799 vk_find_struct(begin_info, COMMAND_BUFFER_INHERITANCE_INFO);
1800 VkCommandBufferInheritanceInfo inhe_info = {
1801 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
1802 NULL,
1803 VK_NULL_HANDLE,
1804 0,
1805 VK_NULL_HANDLE,
1806 VK_FALSE,
1807 0,
1808 overlay_query_flags,
1809 };
1810
1811 if (parent_inhe_info)
1812 parent_inhe_info->pipelineStatistics = overlay_query_flags;
1813 else {
1814 inhe_info.pNext = begin_info->pNext;
1815 begin_info->pNext = &inhe_info;
1816 }
1817
1818 VkResult result = device_data->vtable.BeginCommandBuffer(commandBuffer, pBeginInfo);
1819
1820 if (!parent_inhe_info)
1821 begin_info->pNext = inhe_info.pNext;
1822
1823 free_chain((struct VkBaseOutStructure *)begin_info);
1824
1825 return result;
1826 }
1827
1828 /* Primary command buffers with no queries. */
1829 if (!cmd_buffer_data->pipeline_query_pool && cmd_buffer_data->timestamp_query_pool)
1830 return device_data->vtable.BeginCommandBuffer(commandBuffer, pBeginInfo);
1831
1832 /* Otherwise record a begin query as first command. */
1833 VkResult result = device_data->vtable.BeginCommandBuffer(commandBuffer, pBeginInfo);
1834
1835 if (result == VK_SUCCESS) {
1836 if (cmd_buffer_data->pipeline_query_pool) {
1837 device_data->vtable.CmdResetQueryPool(commandBuffer,
1838 cmd_buffer_data->pipeline_query_pool,
1839 cmd_buffer_data->query_index, 1);
1840 }
1841 if (cmd_buffer_data->timestamp_query_pool) {
1842 device_data->vtable.CmdResetQueryPool(commandBuffer,
1843 cmd_buffer_data->timestamp_query_pool,
1844 cmd_buffer_data->query_index * 2, 2);
1845 }
1846 if (cmd_buffer_data->pipeline_query_pool) {
1847 device_data->vtable.CmdBeginQuery(commandBuffer,
1848 cmd_buffer_data->pipeline_query_pool,
1849 cmd_buffer_data->query_index, 0);
1850 }
1851 if (cmd_buffer_data->timestamp_query_pool) {
1852 device_data->vtable.CmdWriteTimestamp(commandBuffer,
1853 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1854 cmd_buffer_data->timestamp_query_pool,
1855 cmd_buffer_data->query_index * 2);
1856 }
1857 }
1858
1859 return result;
1860 }
1861
1862 VKAPI_ATTR VkResult VKAPI_CALL overlay_EndCommandBuffer(
1863 VkCommandBuffer commandBuffer)
1864 {
1865 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1866 struct device_data *device_data = cmd_buffer_data->device;
1867
1868 if (cmd_buffer_data->timestamp_query_pool) {
1869 device_data->vtable.CmdWriteTimestamp(commandBuffer,
1870 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1871 cmd_buffer_data->timestamp_query_pool,
1872 cmd_buffer_data->query_index * 2 + 1);
1873 }
1874 if (cmd_buffer_data->pipeline_query_pool) {
1875 device_data->vtable.CmdEndQuery(commandBuffer,
1876 cmd_buffer_data->pipeline_query_pool,
1877 cmd_buffer_data->query_index);
1878 }
1879
1880 return device_data->vtable.EndCommandBuffer(commandBuffer);
1881 }
1882
1883 VKAPI_ATTR VkResult VKAPI_CALL overlay_ResetCommandBuffer(
1884 VkCommandBuffer commandBuffer,
1885 VkCommandBufferResetFlags flags)
1886 {
1887 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1888 struct device_data *device_data = cmd_buffer_data->device;
1889
1890 memset(&cmd_buffer_data->stats, 0, sizeof(cmd_buffer_data->stats));
1891
1892 return device_data->vtable.ResetCommandBuffer(commandBuffer, flags);
1893 }
1894
1895 VKAPI_ATTR void VKAPI_CALL overlay_CmdExecuteCommands(
1896 VkCommandBuffer commandBuffer,
1897 uint32_t commandBufferCount,
1898 const VkCommandBuffer* pCommandBuffers)
1899 {
1900 struct command_buffer_data *cmd_buffer_data = FIND_CMD_BUFFER_DATA(commandBuffer);
1901 struct device_data *device_data = cmd_buffer_data->device;
1902
1903 /* Add the stats of the executed command buffers to the primary one. */
1904 for (uint32_t c = 0; c < commandBufferCount; c++) {
1905 struct command_buffer_data *sec_cmd_buffer_data = FIND_CMD_BUFFER_DATA(pCommandBuffers[c]);
1906
1907 for (uint32_t s = 0; s < OVERLAY_PARAM_ENABLED_MAX; s++)
1908 cmd_buffer_data->stats.stats[s] += sec_cmd_buffer_data->stats.stats[s];
1909 }
1910
1911 device_data->vtable.CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
1912 }
1913
1914 VKAPI_ATTR VkResult VKAPI_CALL overlay_AllocateCommandBuffers(
1915 VkDevice device,
1916 const VkCommandBufferAllocateInfo* pAllocateInfo,
1917 VkCommandBuffer* pCommandBuffers)
1918 {
1919 struct device_data *device_data = FIND_DEVICE_DATA(device);
1920 VkResult result =
1921 device_data->vtable.AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
1922 if (result != VK_SUCCESS)
1923 return result;
1924
1925 VkQueryPool pipeline_query_pool = VK_NULL_HANDLE;
1926 VkQueryPool timestamp_query_pool = VK_NULL_HANDLE;
1927 if (device_data->instance->pipeline_statistics_enabled &&
1928 pAllocateInfo->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
1929 VkQueryPoolCreateInfo pool_info = {
1930 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
1931 NULL,
1932 0,
1933 VK_QUERY_TYPE_PIPELINE_STATISTICS,
1934 pAllocateInfo->commandBufferCount,
1935 overlay_query_flags,
1936 };
1937 VkResult err =
1938 device_data->vtable.CreateQueryPool(device_data->device, &pool_info,
1939 NULL, &pipeline_query_pool);
1940 check_vk_result(err);
1941 }
1942 if (device_data->instance->params.enabled[OVERLAY_PARAM_ENABLED_gpu_timing]) {
1943 VkQueryPoolCreateInfo pool_info = {
1944 VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
1945 NULL,
1946 0,
1947 VK_QUERY_TYPE_TIMESTAMP,
1948 pAllocateInfo->commandBufferCount * 2,
1949 0,
1950 };
1951 VkResult err =
1952 device_data->vtable.CreateQueryPool(device_data->device, &pool_info,
1953 NULL, &timestamp_query_pool);
1954 check_vk_result(err);
1955 }
1956
1957 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
1958 new_command_buffer_data(pCommandBuffers[i], pAllocateInfo->level,
1959 pipeline_query_pool, timestamp_query_pool,
1960 i, device_data);
1961 }
1962
1963 if (pipeline_query_pool)
1964 map_object(pipeline_query_pool, (void *)(uintptr_t) pAllocateInfo->commandBufferCount);
1965 if (timestamp_query_pool)
1966 map_object(timestamp_query_pool, (void *)(uintptr_t) pAllocateInfo->commandBufferCount);
1967
1968 return result;
1969 }
1970
1971 VKAPI_ATTR void VKAPI_CALL overlay_FreeCommandBuffers(
1972 VkDevice device,
1973 VkCommandPool commandPool,
1974 uint32_t commandBufferCount,
1975 const VkCommandBuffer* pCommandBuffers)
1976 {
1977 struct device_data *device_data = FIND_DEVICE_DATA(device);
1978 for (uint32_t i = 0; i < commandBufferCount; i++) {
1979 struct command_buffer_data *cmd_buffer_data =
1980 FIND_CMD_BUFFER_DATA(pCommandBuffers[i]);
1981 uint64_t count = (uintptr_t)find_object_data((void *)cmd_buffer_data->pipeline_query_pool);
1982 if (count == 1) {
1983 unmap_object(cmd_buffer_data->pipeline_query_pool);
1984 device_data->vtable.DestroyQueryPool(device_data->device,
1985 cmd_buffer_data->pipeline_query_pool, NULL);
1986 } else if (count != 0) {
1987 map_object(cmd_buffer_data->pipeline_query_pool, (void *)(uintptr_t)(count - 1));
1988 }
1989 count = (uintptr_t)find_object_data((void *)cmd_buffer_data->timestamp_query_pool);
1990 if (count == 1) {
1991 unmap_object(cmd_buffer_data->timestamp_query_pool);
1992 device_data->vtable.DestroyQueryPool(device_data->device,
1993 cmd_buffer_data->timestamp_query_pool, NULL);
1994 } else if (count != 0) {
1995 map_object(cmd_buffer_data->timestamp_query_pool, (void *)(uintptr_t)(count - 1));
1996 }
1997 destroy_command_buffer_data(cmd_buffer_data);
1998 }
1999
2000 device_data->vtable.FreeCommandBuffers(device, commandPool,
2001 commandBufferCount, pCommandBuffers);
2002 }
2003
2004 VKAPI_ATTR VkResult VKAPI_CALL overlay_QueueSubmit(
2005 VkQueue queue,
2006 uint32_t submitCount,
2007 const VkSubmitInfo* pSubmits,
2008 VkFence fence)
2009 {
2010 struct queue_data *queue_data = FIND_QUEUE_DATA(queue);
2011 struct device_data *device_data = queue_data->device;
2012
2013 device_data->frame_stats.stats[OVERLAY_PARAM_ENABLED_submit]++;
2014
2015 for (uint32_t s = 0; s < submitCount; s++) {
2016 for (uint32_t c = 0; c < pSubmits[s].commandBufferCount; c++) {
2017 struct command_buffer_data *cmd_buffer_data =
2018 FIND_CMD_BUFFER_DATA(pSubmits[s].pCommandBuffers[c]);
2019
2020 /* Merge the submitted command buffer stats into the device. */
2021 for (uint32_t st = 0; st < OVERLAY_PARAM_ENABLED_MAX; st++)
2022 device_data->frame_stats.stats[st] += cmd_buffer_data->stats.stats[st];
2023
2024 /* Attach the command buffer to the queue so we remember to read its
2025 * pipeline statistics & timestamps at QueuePresent().
2026 */
2027 if (!cmd_buffer_data->pipeline_query_pool &&
2028 !cmd_buffer_data->timestamp_query_pool)
2029 continue;
2030
2031 if (list_empty(&cmd_buffer_data->link)) {
2032 list_addtail(&cmd_buffer_data->link,
2033 &queue_data->running_command_buffer);
2034 } else {
2035 fprintf(stderr, "Command buffer submitted multiple times before present.\n"
2036 "This could lead to invalid data.\n");
2037 }
2038 }
2039 }
2040
2041 return device_data->vtable.QueueSubmit(queue, submitCount, pSubmits, fence);
2042 }
2043
2044 VKAPI_ATTR VkResult VKAPI_CALL overlay_CreateDevice(
2045 VkPhysicalDevice physicalDevice,
2046 const VkDeviceCreateInfo* pCreateInfo,
2047 const VkAllocationCallbacks* pAllocator,
2048 VkDevice* pDevice)
2049 {
2050 struct instance_data *instance_data = FIND_PHYSICAL_DEVICE_DATA(physicalDevice);
2051 VkLayerDeviceCreateInfo *chain_info =
2052 get_device_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
2053
2054 assert(chain_info->u.pLayerInfo);
2055 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
2056 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
2057 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
2058 if (fpCreateDevice == NULL) {
2059 return VK_ERROR_INITIALIZATION_FAILED;
2060 }
2061
2062 // Advance the link info for the next element on the chain
2063 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
2064
2065 VkPhysicalDeviceFeatures device_features = {};
2066 VkDeviceCreateInfo device_info = *pCreateInfo;
2067
2068 if (pCreateInfo->pEnabledFeatures)
2069 device_features = *(pCreateInfo->pEnabledFeatures);
2070 if (instance_data->pipeline_statistics_enabled) {
2071 device_features.inheritedQueries = true;
2072 device_features.pipelineStatisticsQuery = true;
2073 }
2074 device_info.pEnabledFeatures = &device_features;
2075
2076
2077 VkResult result = fpCreateDevice(physicalDevice, &device_info, pAllocator, pDevice);
2078 if (result != VK_SUCCESS) return result;
2079
2080 struct device_data *device_data = new_device_data(*pDevice, instance_data);
2081 device_data->physical_device = physicalDevice;
2082 vk_load_device_commands(*pDevice, fpGetDeviceProcAddr, &device_data->vtable);
2083
2084 instance_data->vtable.GetPhysicalDeviceProperties(device_data->physical_device,
2085 &device_data->properties);
2086
2087 VkLayerDeviceCreateInfo *load_data_info =
2088 get_device_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
2089 device_data->set_device_loader_data = load_data_info->u.pfnSetDeviceLoaderData;
2090
2091 device_map_queues(device_data, pCreateInfo);
2092
2093 return result;
2094 }
2095
2096 VKAPI_ATTR void VKAPI_CALL overlay_DestroyDevice(
2097 VkDevice device,
2098 const VkAllocationCallbacks* pAllocator)
2099 {
2100 struct device_data *device_data = FIND_DEVICE_DATA(device);
2101 device_unmap_queues(device_data);
2102 device_data->vtable.DestroyDevice(device, pAllocator);
2103 destroy_device_data(device_data);
2104 }
2105
2106 VKAPI_ATTR VkResult VKAPI_CALL overlay_CreateInstance(
2107 const VkInstanceCreateInfo* pCreateInfo,
2108 const VkAllocationCallbacks* pAllocator,
2109 VkInstance* pInstance)
2110 {
2111 VkLayerInstanceCreateInfo *chain_info =
2112 get_instance_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
2113
2114 assert(chain_info->u.pLayerInfo);
2115 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
2116 chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
2117 PFN_vkCreateInstance fpCreateInstance =
2118 (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
2119 if (fpCreateInstance == NULL) {
2120 return VK_ERROR_INITIALIZATION_FAILED;
2121 }
2122
2123 // Advance the link info for the next element on the chain
2124 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
2125
2126 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
2127 if (result != VK_SUCCESS) return result;
2128
2129 struct instance_data *instance_data = new_instance_data(*pInstance);
2130 vk_load_instance_commands(instance_data->instance,
2131 fpGetInstanceProcAddr,
2132 &instance_data->vtable);
2133 instance_data_map_physical_devices(instance_data, true);
2134
2135 parse_overlay_env(&instance_data->params, getenv("VK_LAYER_MESA_OVERLAY_CONFIG"));
2136
2137 for (int i = OVERLAY_PARAM_ENABLED_vertices;
2138 i <= OVERLAY_PARAM_ENABLED_compute_invocations; i++) {
2139 if (instance_data->params.enabled[i]) {
2140 instance_data->pipeline_statistics_enabled = true;
2141 break;
2142 }
2143 }
2144
2145 return result;
2146 }
2147
2148 VKAPI_ATTR void VKAPI_CALL overlay_DestroyInstance(
2149 VkInstance instance,
2150 const VkAllocationCallbacks* pAllocator)
2151 {
2152 struct instance_data *instance_data = FIND_INSTANCE_DATA(instance);
2153 instance_data_map_physical_devices(instance_data, false);
2154 instance_data->vtable.DestroyInstance(instance, pAllocator);
2155 destroy_instance_data(instance_data);
2156 }
2157
2158 static const struct {
2159 const char *name;
2160 void *ptr;
2161 } name_to_funcptr_map[] = {
2162 { "vkGetDeviceProcAddr", (void *) vkGetDeviceProcAddr },
2163 #define ADD_HOOK(fn) { "vk" # fn, (void *) overlay_ ## fn }
2164 ADD_HOOK(AllocateCommandBuffers),
2165 ADD_HOOK(FreeCommandBuffers),
2166 ADD_HOOK(ResetCommandBuffer),
2167 ADD_HOOK(BeginCommandBuffer),
2168 ADD_HOOK(EndCommandBuffer),
2169 ADD_HOOK(CmdExecuteCommands),
2170
2171 ADD_HOOK(CmdDraw),
2172 ADD_HOOK(CmdDrawIndexed),
2173 ADD_HOOK(CmdDrawIndexedIndirect),
2174 ADD_HOOK(CmdDispatch),
2175 ADD_HOOK(CmdDispatchIndirect),
2176 ADD_HOOK(CmdDrawIndirectCountKHR),
2177 ADD_HOOK(CmdDrawIndexedIndirectCountKHR),
2178
2179 ADD_HOOK(CmdBindPipeline),
2180
2181 ADD_HOOK(CreateSwapchainKHR),
2182 ADD_HOOK(QueuePresentKHR),
2183 ADD_HOOK(DestroySwapchainKHR),
2184 ADD_HOOK(AcquireNextImageKHR),
2185 ADD_HOOK(AcquireNextImage2KHR),
2186
2187 ADD_HOOK(QueueSubmit),
2188
2189 ADD_HOOK(CreateDevice),
2190 ADD_HOOK(DestroyDevice),
2191
2192 ADD_HOOK(CreateInstance),
2193 ADD_HOOK(DestroyInstance),
2194 #undef ADD_HOOK
2195 };
2196
2197 static void *find_ptr(const char *name)
2198 {
2199 for (uint32_t i = 0; i < ARRAY_SIZE(name_to_funcptr_map); i++) {
2200 if (strcmp(name, name_to_funcptr_map[i].name) == 0)
2201 return name_to_funcptr_map[i].ptr;
2202 }
2203
2204 return NULL;
2205 }
2206
2207 VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev,
2208 const char *funcName)
2209 {
2210 void *ptr = find_ptr(funcName);
2211 if (ptr) return reinterpret_cast<PFN_vkVoidFunction>(ptr);
2212
2213 if (dev == NULL) return NULL;
2214
2215 struct device_data *device_data = FIND_DEVICE_DATA(dev);
2216 if (device_data->vtable.GetDeviceProcAddr == NULL) return NULL;
2217 return device_data->vtable.GetDeviceProcAddr(dev, funcName);
2218 }
2219
2220 VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance,
2221 const char *funcName)
2222 {
2223 void *ptr = find_ptr(funcName);
2224 if (ptr) return reinterpret_cast<PFN_vkVoidFunction>(ptr);
2225
2226 if (instance == NULL) return NULL;
2227
2228 struct instance_data *instance_data = FIND_INSTANCE_DATA(instance);
2229 if (instance_data->vtable.GetInstanceProcAddr == NULL) return NULL;
2230 return instance_data->vtable.GetInstanceProcAddr(instance, funcName);
2231 }