v3d: compute appropriate VPM memory configuration for geometry shader workloads
[mesa.git] / src / gallium / drivers / zink / zink_blit.c
1 #include "zink_context.h"
2 #include "zink_helpers.h"
3 #include "zink_resource.h"
4 #include "zink_screen.h"
5
6 #include "util/u_blitter.h"
7 #include "util/format/u_format.h"
8
9 static bool
10 blit_resolve(struct zink_context *ctx, const struct pipe_blit_info *info)
11 {
12 if (info->mask != PIPE_MASK_RGBA ||
13 info->scissor_enable ||
14 info->alpha_blend)
15 return false;
16
17 struct zink_resource *src = zink_resource(info->src.resource);
18 struct zink_resource *dst = zink_resource(info->dst.resource);
19
20 struct zink_screen *screen = zink_screen(ctx->base.screen);
21 if (src->format != zink_get_format(screen, info->src.format) ||
22 dst->format != zink_get_format(screen, info->dst.format))
23 return false;
24
25 struct zink_batch *batch = zink_batch_no_rp(ctx);
26
27 zink_batch_reference_resoure(batch, src);
28 zink_batch_reference_resoure(batch, dst);
29
30 if (src->layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
31 zink_resource_barrier(batch->cmdbuf, src, src->aspect,
32 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
33
34 if (dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
35 zink_resource_barrier(batch->cmdbuf, dst, dst->aspect,
36 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
37
38 VkImageResolve region = {};
39
40 region.srcSubresource.aspectMask = src->aspect;
41 region.srcSubresource.mipLevel = info->src.level;
42 region.srcSubresource.baseArrayLayer = 0; // no clue
43 region.srcSubresource.layerCount = 1; // no clue
44 region.srcOffset.x = info->src.box.x;
45 region.srcOffset.y = info->src.box.y;
46 region.srcOffset.z = info->src.box.z;
47
48 region.dstSubresource.aspectMask = dst->aspect;
49 region.dstSubresource.mipLevel = info->dst.level;
50 region.dstSubresource.baseArrayLayer = 0; // no clue
51 region.dstSubresource.layerCount = 1; // no clue
52 region.dstOffset.x = info->dst.box.x;
53 region.dstOffset.y = info->dst.box.y;
54 region.dstOffset.z = info->dst.box.z;
55
56 region.extent.width = info->dst.box.width;
57 region.extent.height = info->dst.box.height;
58 region.extent.depth = info->dst.box.depth;
59 vkCmdResolveImage(batch->cmdbuf, src->image, src->layout,
60 dst->image, dst->layout,
61 1, &region);
62
63 return true;
64 }
65
66 static bool
67 blit_native(struct zink_context *ctx, const struct pipe_blit_info *info)
68 {
69 if (info->mask != PIPE_MASK_RGBA ||
70 info->scissor_enable ||
71 info->alpha_blend)
72 return false;
73
74 struct zink_resource *src = zink_resource(info->src.resource);
75 struct zink_resource *dst = zink_resource(info->dst.resource);
76
77 struct zink_screen *screen = zink_screen(ctx->base.screen);
78 if (src->format != zink_get_format(screen, info->src.format) ||
79 dst->format != zink_get_format(screen, info->dst.format))
80 return false;
81
82 struct zink_batch *batch = zink_batch_no_rp(ctx);
83 zink_batch_reference_resoure(batch, src);
84 zink_batch_reference_resoure(batch, dst);
85
86 if (src->layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
87 zink_resource_barrier(batch->cmdbuf, src, src->aspect,
88 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
89
90 if (dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
91 zink_resource_barrier(batch->cmdbuf, dst, dst->aspect,
92 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
93
94 VkImageBlit region = {};
95 region.srcSubresource.aspectMask = src->aspect;
96 region.srcSubresource.mipLevel = info->src.level;
97 region.srcOffsets[0].x = info->src.box.x;
98 region.srcOffsets[0].y = info->src.box.y;
99 region.srcOffsets[1].x = info->src.box.x + info->src.box.width;
100 region.srcOffsets[1].y = info->src.box.y + info->src.box.height;
101
102 if (src->base.array_size > 1) {
103 region.srcOffsets[0].z = 0;
104 region.srcOffsets[1].z = 1;
105 region.srcSubresource.baseArrayLayer = info->src.box.z;
106 region.srcSubresource.layerCount = info->src.box.depth;
107 } else {
108 region.srcOffsets[0].z = info->src.box.z;
109 region.srcOffsets[1].z = info->src.box.z + info->src.box.depth;
110 region.srcSubresource.baseArrayLayer = 0;
111 region.srcSubresource.layerCount = 1;
112 }
113
114 region.dstSubresource.aspectMask = dst->aspect;
115 region.dstSubresource.mipLevel = info->dst.level;
116 region.dstOffsets[0].x = info->dst.box.x;
117 region.dstOffsets[0].y = info->dst.box.y;
118 region.dstOffsets[1].x = info->dst.box.x + info->dst.box.width;
119 region.dstOffsets[1].y = info->dst.box.y + info->dst.box.height;
120
121 if (dst->base.array_size > 1) {
122 region.dstOffsets[0].z = 0;
123 region.dstOffsets[1].z = 1;
124 region.dstSubresource.baseArrayLayer = info->dst.box.z;
125 region.dstSubresource.layerCount = info->dst.box.depth;
126 } else {
127 region.dstOffsets[0].z = info->dst.box.z;
128 region.dstOffsets[1].z = info->dst.box.z + info->dst.box.depth;
129 region.dstSubresource.baseArrayLayer = 0;
130 region.dstSubresource.layerCount = 1;
131 }
132
133 vkCmdBlitImage(batch->cmdbuf, src->image, src->layout,
134 dst->image, dst->layout,
135 1, &region,
136 zink_filter(info->filter));
137
138 return true;
139 }
140
141 void
142 zink_blit(struct pipe_context *pctx,
143 const struct pipe_blit_info *info)
144 {
145 struct zink_context *ctx = zink_context(pctx);
146 if (info->src.resource->nr_samples > 1 &&
147 info->dst.resource->nr_samples <= 1) {
148 if (blit_resolve(ctx, info))
149 return;
150 } else {
151 if (blit_native(ctx, info))
152 return;
153 }
154
155 if (!util_blitter_is_blit_supported(ctx->blitter, info)) {
156 debug_printf("blit unsupported %s -> %s\n",
157 util_format_short_name(info->src.resource->format),
158 util_format_short_name(info->dst.resource->format));
159 return;
160 }
161
162 util_blitter_save_blend(ctx->blitter, ctx->gfx_pipeline_state.blend_state);
163 util_blitter_save_depth_stencil_alpha(ctx->blitter, ctx->gfx_pipeline_state.depth_stencil_alpha_state);
164 util_blitter_save_vertex_elements(ctx->blitter, ctx->element_state);
165 util_blitter_save_stencil_ref(ctx->blitter, &ctx->stencil_ref);
166 util_blitter_save_rasterizer(ctx->blitter, ctx->rast_state);
167 util_blitter_save_fragment_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_FRAGMENT]);
168 util_blitter_save_vertex_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_VERTEX]);
169 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state);
170 util_blitter_save_viewport(ctx->blitter, ctx->viewport_states);
171 util_blitter_save_scissor(ctx->blitter, ctx->scissor_states);
172 util_blitter_save_fragment_sampler_states(ctx->blitter,
173 ctx->num_samplers[PIPE_SHADER_FRAGMENT],
174 ctx->sampler_states[PIPE_SHADER_FRAGMENT]);
175 util_blitter_save_fragment_sampler_views(ctx->blitter,
176 ctx->num_image_views[PIPE_SHADER_FRAGMENT],
177 ctx->image_views[PIPE_SHADER_FRAGMENT]);
178 util_blitter_save_fragment_constant_buffer_slot(ctx->blitter, ctx->ubos[PIPE_SHADER_FRAGMENT]);
179 util_blitter_save_vertex_buffer_slot(ctx->blitter, ctx->buffers);
180 util_blitter_save_sample_mask(ctx->blitter, ctx->gfx_pipeline_state.sample_mask);
181
182 util_blitter_blit(ctx->blitter, info);
183 }