turnip: add .clang-format
[mesa.git] / src / freedreno / vulkan / tu_image.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
26 */
27
28 #include "tu_private.h"
29
30 #include "util/debug.h"
31 #include "util/u_atomic.h"
32 #include "vk_format.h"
33 #include "vk_util.h"
34
35 static inline bool
36 image_level_linear(struct tu_image *image, int level)
37 {
38 unsigned w = u_minify(image->extent.width, level);
39 return w < 16;
40 }
41
42 /* indexed by cpp: */
43 static const struct
44 {
45 unsigned pitchalign;
46 unsigned heightalign;
47 } tile_alignment[] = {
48 [1] = { 128, 32 }, [2] = { 128, 16 }, [3] = { 128, 16 }, [4] = { 64, 16 },
49 [8] = { 64, 16 }, [12] = { 64, 16 }, [16] = { 64, 16 },
50 };
51
52 static void
53 setup_slices(struct tu_image *image, const VkImageCreateInfo *pCreateInfo)
54 {
55 enum vk_format_layout layout =
56 vk_format_description(pCreateInfo->format)->layout;
57 uint32_t layer_size = 0;
58 uint32_t width = pCreateInfo->extent.width;
59 uint32_t height = pCreateInfo->extent.height;
60 uint32_t depth = pCreateInfo->extent.depth;
61 bool layer_first = pCreateInfo->imageType != VK_IMAGE_TYPE_3D;
62 uint32_t alignment = pCreateInfo->imageType == VK_IMAGE_TYPE_3D ? 4096 : 1;
63 uint32_t cpp = vk_format_get_blocksize(pCreateInfo->format);
64
65 uint32_t heightalign = tile_alignment[cpp].heightalign;
66
67 for (unsigned level = 0; level < pCreateInfo->mipLevels; level++) {
68 struct tu_image_level *slice = &image->levels[level];
69 bool linear_level = image_level_linear(image, level);
70 uint32_t aligned_height = height;
71 uint32_t blocks;
72 uint32_t pitchalign;
73
74 if (image->tile_mode && !linear_level) {
75 pitchalign = tile_alignment[cpp].pitchalign;
76 aligned_height = align(aligned_height, heightalign);
77 } else {
78 pitchalign = 64;
79
80 /* The blits used for mem<->gmem work at a granularity of
81 * 32x32, which can cause faults due to over-fetch on the
82 * last level. The simple solution is to over-allocate a
83 * bit the last level to ensure any over-fetch is harmless.
84 * The pitch is already sufficiently aligned, but height
85 * may not be:
86 */
87 if ((level + 1 == pCreateInfo->mipLevels))
88 aligned_height = align(aligned_height, 32);
89 }
90
91 if (layout == VK_FORMAT_LAYOUT_ASTC)
92 slice->pitch = util_align_npot(
93 width,
94 pitchalign * vk_format_get_blockwidth(pCreateInfo->format));
95 else
96 slice->pitch = align(width, pitchalign);
97
98 slice->offset = layer_size;
99 blocks = vk_format_get_block_count(pCreateInfo->format, slice->pitch,
100 aligned_height);
101
102 /* 1d array and 2d array textures must all have the same layer size
103 * for each miplevel on a3xx. 3d textures can have different layer
104 * sizes for high levels, but the hw auto-sizer is buggy (or at least
105 * different than what this code does), so as soon as the layer size
106 * range gets into range, we stop reducing it.
107 */
108 if (pCreateInfo->imageType == VK_IMAGE_TYPE_3D &&
109 (level == 1 ||
110 (level > 1 && image->levels[level - 1].size > 0xf000)))
111 slice->size = align(blocks * cpp, alignment);
112 else if (level == 0 || layer_first || alignment == 1)
113 slice->size = align(blocks * cpp, alignment);
114 else
115 slice->size = image->levels[level - 1].size;
116
117 layer_size += slice->size * depth;
118
119 width = u_minify(width, 1);
120 height = u_minify(height, 1);
121 depth = u_minify(depth, 1);
122 }
123
124 image->layer_size = layer_size;
125 }
126
127 VkResult
128 tu_image_create(VkDevice _device,
129 const struct tu_image_create_info *create_info,
130 const VkAllocationCallbacks *alloc,
131 VkImage *pImage)
132 {
133 TU_FROM_HANDLE(tu_device, device, _device);
134 const VkImageCreateInfo *pCreateInfo = create_info->vk_info;
135 struct tu_image *image = NULL;
136 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO);
137
138 tu_assert(pCreateInfo->mipLevels > 0);
139 tu_assert(pCreateInfo->arrayLayers > 0);
140 tu_assert(pCreateInfo->samples > 0);
141 tu_assert(pCreateInfo->extent.width > 0);
142 tu_assert(pCreateInfo->extent.height > 0);
143 tu_assert(pCreateInfo->extent.depth > 0);
144
145 image = vk_zalloc2(&device->alloc, alloc, sizeof(*image), 8,
146 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
147 if (!image)
148 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
149
150 image->type = pCreateInfo->imageType;
151
152 image->vk_format = pCreateInfo->format;
153 image->tiling = pCreateInfo->tiling;
154 image->usage = pCreateInfo->usage;
155 image->flags = pCreateInfo->flags;
156 image->extent = pCreateInfo->extent;
157
158 image->exclusive = pCreateInfo->sharingMode == VK_SHARING_MODE_EXCLUSIVE;
159 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) {
160 for (uint32_t i = 0; i < pCreateInfo->queueFamilyIndexCount; ++i)
161 if (pCreateInfo->pQueueFamilyIndices[i] ==
162 VK_QUEUE_FAMILY_EXTERNAL_KHR)
163 image->queue_family_mask |= (1u << TU_MAX_QUEUE_FAMILIES) - 1u;
164 else
165 image->queue_family_mask |=
166 1u << pCreateInfo->pQueueFamilyIndices[i];
167 }
168
169 image->shareable =
170 vk_find_struct_const(pCreateInfo->pNext,
171 EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR) != NULL;
172
173 image->tile_mode = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? 3 : 0;
174 setup_slices(image, pCreateInfo);
175
176 image->size = image->layer_size * pCreateInfo->arrayLayers;
177 *pImage = tu_image_to_handle(image);
178
179 return VK_SUCCESS;
180 }
181
182 void
183 tu_image_view_init(struct tu_image_view *iview,
184 struct tu_device *device,
185 const VkImageViewCreateInfo *pCreateInfo)
186 {
187 }
188
189 unsigned
190 tu_image_queue_family_mask(const struct tu_image *image,
191 uint32_t family,
192 uint32_t queue_family)
193 {
194 if (!image->exclusive)
195 return image->queue_family_mask;
196 if (family == VK_QUEUE_FAMILY_EXTERNAL_KHR)
197 return (1u << TU_MAX_QUEUE_FAMILIES) - 1u;
198 if (family == VK_QUEUE_FAMILY_IGNORED)
199 return 1u << queue_family;
200 return 1u << family;
201 }
202
203 VkResult
204 tu_CreateImage(VkDevice device,
205 const VkImageCreateInfo *pCreateInfo,
206 const VkAllocationCallbacks *pAllocator,
207 VkImage *pImage)
208 {
209 #ifdef ANDROID
210 const VkNativeBufferANDROID *gralloc_info =
211 vk_find_struct_const(pCreateInfo->pNext, NATIVE_BUFFER_ANDROID);
212
213 if (gralloc_info)
214 return tu_image_from_gralloc(device, pCreateInfo, gralloc_info,
215 pAllocator, pImage);
216 #endif
217
218 return tu_image_create(device,
219 &(struct tu_image_create_info) {
220 .vk_info = pCreateInfo,
221 .scanout = false,
222 },
223 pAllocator, pImage);
224 }
225
226 void
227 tu_DestroyImage(VkDevice _device,
228 VkImage _image,
229 const VkAllocationCallbacks *pAllocator)
230 {
231 TU_FROM_HANDLE(tu_device, device, _device);
232 TU_FROM_HANDLE(tu_image, image, _image);
233
234 if (!image)
235 return;
236
237 if (image->owned_memory != VK_NULL_HANDLE)
238 tu_FreeMemory(_device, image->owned_memory, pAllocator);
239
240 vk_free2(&device->alloc, pAllocator, image);
241 }
242
243 void
244 tu_GetImageSubresourceLayout(VkDevice _device,
245 VkImage _image,
246 const VkImageSubresource *pSubresource,
247 VkSubresourceLayout *pLayout)
248 {
249 }
250
251 VkResult
252 tu_CreateImageView(VkDevice _device,
253 const VkImageViewCreateInfo *pCreateInfo,
254 const VkAllocationCallbacks *pAllocator,
255 VkImageView *pView)
256 {
257 TU_FROM_HANDLE(tu_device, device, _device);
258 struct tu_image_view *view;
259
260 view = vk_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
261 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
262 if (view == NULL)
263 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
264
265 tu_image_view_init(view, device, pCreateInfo);
266
267 *pView = tu_image_view_to_handle(view);
268
269 return VK_SUCCESS;
270 }
271
272 void
273 tu_DestroyImageView(VkDevice _device,
274 VkImageView _iview,
275 const VkAllocationCallbacks *pAllocator)
276 {
277 TU_FROM_HANDLE(tu_device, device, _device);
278 TU_FROM_HANDLE(tu_image_view, iview, _iview);
279
280 if (!iview)
281 return;
282 vk_free2(&device->alloc, pAllocator, iview);
283 }
284
285 void
286 tu_buffer_view_init(struct tu_buffer_view *view,
287 struct tu_device *device,
288 const VkBufferViewCreateInfo *pCreateInfo)
289 {
290 TU_FROM_HANDLE(tu_buffer, buffer, pCreateInfo->buffer);
291
292 view->range = pCreateInfo->range == VK_WHOLE_SIZE
293 ? buffer->size - pCreateInfo->offset
294 : pCreateInfo->range;
295 view->vk_format = pCreateInfo->format;
296 }
297
298 VkResult
299 tu_CreateBufferView(VkDevice _device,
300 const VkBufferViewCreateInfo *pCreateInfo,
301 const VkAllocationCallbacks *pAllocator,
302 VkBufferView *pView)
303 {
304 TU_FROM_HANDLE(tu_device, device, _device);
305 struct tu_buffer_view *view;
306
307 view = vk_alloc2(&device->alloc, pAllocator, sizeof(*view), 8,
308 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
309 if (!view)
310 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
311
312 tu_buffer_view_init(view, device, pCreateInfo);
313
314 *pView = tu_buffer_view_to_handle(view);
315
316 return VK_SUCCESS;
317 }
318
319 void
320 tu_DestroyBufferView(VkDevice _device,
321 VkBufferView bufferView,
322 const VkAllocationCallbacks *pAllocator)
323 {
324 TU_FROM_HANDLE(tu_device, device, _device);
325 TU_FROM_HANDLE(tu_buffer_view, view, bufferView);
326
327 if (!view)
328 return;
329
330 vk_free2(&device->alloc, pAllocator, view);
331 }