This is leftover from anv, and we really never needed it.
Signed-off-by: Dave Airlie <airlied@redhat.com>
});
VkSampler sampler;
- RADV_CALL(CreateSampler)(radv_device_to_handle(device),
+ radv_CreateSampler(radv_device_to_handle(device),
&(VkSamplerCreateInfo) {
.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
.magFilter = blit_filter,
case VK_IMAGE_ASPECT_COLOR_BIT: {
unsigned fs_key = radv_format_meta_fs_key(dest_image->vk_format);
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit.render_pass[fs_key],
break;
}
case VK_IMAGE_ASPECT_DEPTH_BIT:
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit.depth_only_rp,
}
break;
case VK_IMAGE_ASPECT_STENCIL_BIT:
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit.stencil_only_rp,
device->meta_state.blit.pipeline_layout, 0, 1,
&set, 0, NULL);
- RADV_CALL(CmdDraw)(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
+ radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
- RADV_CALL(CmdEndRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer));
+ radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
/* At the point where we emit the draw call, all data from the
* descriptor sets, etc. has been used. We are free to delete it.
if (dst->aspect_mask == VK_IMAGE_ASPECT_COLOR_BIT) {
unsigned fs_key = radv_format_meta_fs_key(dst_temps.iview.vk_format);
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit2d.render_passes[fs_key],
bind_pipeline(cmd_buffer, src_type, fs_key);
} else if (dst->aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT) {
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit2d.depth_only_rp,
bind_depth_pipeline(cmd_buffer, src_type);
} else if (dst->aspect_mask == VK_IMAGE_ASPECT_STENCIL_BIT) {
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.blit2d.stencil_only_rp,
bind_stencil_pipeline(cmd_buffer, src_type);
}
- RADV_CALL(CmdDraw)(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
- RADV_CALL(CmdEndRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer));
+ radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0);
+ radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
/* At the point where we emit the draw call, all data from the
* descriptor sets, etc. has been used. We are free to delete it.
if (!pipeline)
return;
- RADV_CALL(DestroyPipeline)(radv_device_to_handle(device),
+ radv_DestroyPipeline(radv_device_to_handle(device),
radv_pipeline_to_handle(pipeline),
&device->meta_state.alloc);
static void
destroy_render_pass(struct radv_device *device, VkRenderPass renderpass)
{
- RADV_CALL(DestroyRenderPass)(radv_device_to_handle(device), renderpass,
+ radv_DestroyRenderPass(radv_device_to_handle(device), renderpass,
&device->meta_state.alloc);
}
};
- RADV_CALL(CmdBindVertexBuffers)(cmd_buffer_h, 0, 1,
+ radv_CmdBindVertexBuffers(cmd_buffer_h, 0, 1,
(VkBuffer[]) { radv_buffer_to_handle(&vertex_buffer) },
(VkDeviceSize[]) { 0 });
if (cmd_buffer->state.pipeline != pipeline) {
- RADV_CALL(CmdBindPipeline)(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
+ radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
pipeline_h);
}
- RADV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
+ radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
radv_cmd_buffer_set_subpass(cmd_buffer, subpass, false);
}
};
if (aspects & VK_IMAGE_ASPECT_STENCIL_BIT) {
- RADV_CALL(CmdSetStencilReference)(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
+ radv_CmdSetStencilReference(cmd_buffer_h, VK_STENCIL_FACE_FRONT_BIT,
clear_value.stencil);
}
- RADV_CALL(CmdBindVertexBuffers)(cmd_buffer_h, 0, 1,
+ radv_CmdBindVertexBuffers(cmd_buffer_h, 0, 1,
(VkBuffer[]) { radv_buffer_to_handle(&vertex_buffer) },
(VkDeviceSize[]) { 0 });
clear_rect,
clear_value);
if (cmd_buffer->state.pipeline != pipeline) {
- RADV_CALL(CmdBindPipeline)(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
+ radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
radv_pipeline_to_handle(pipeline));
}
if (depth_view_can_fast_clear(iview, subpass->depth_stencil_attachment.layout, clear_rect))
radv_set_depth_clear_regs(cmd_buffer, iview->image, clear_value, aspects);
- RADV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
+ radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
}
&cmd_buffer->pool->alloc,
&pass);
- RADV_CALL(CmdBeginRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer),
+ radv_CmdBeginRenderPass(radv_cmd_buffer_to_handle(cmd_buffer),
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderArea = {
emit_clear(cmd_buffer, &clear_att, &clear_rect);
- RADV_CALL(CmdEndRenderPass)(radv_cmd_buffer_to_handle(cmd_buffer));
- RADV_CALL(DestroyRenderPass)(device_h, pass,
+ radv_CmdEndRenderPass(radv_cmd_buffer_to_handle(cmd_buffer));
+ radv_DestroyRenderPass(device_h, pass,
&cmd_buffer->pool->alloc);
- RADV_CALL(DestroyFramebuffer)(device_h, fb,
+ radv_DestroyFramebuffer(device_h, fb,
&cmd_buffer->pool->alloc);
}
}
const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
if (pass_h)
- RADV_CALL(DestroyRenderPass)(device_h, pass_h,
+ radv_DestroyRenderPass(device_h, pass_h,
&device->meta_state.alloc);
VkPipeline pipeline_h = state->depth_decomp.decompress_pipeline;
if (pipeline_h) {
- RADV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
+ radv_DestroyPipeline(device_h, pipeline_h, alloc);
}
pipeline_h = state->depth_decomp.resummarize_pipeline;
if (pipeline_h) {
- RADV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
+ radv_DestroyPipeline(device_h, pipeline_h, alloc);
}
}
pipeline_h);
}
- RADV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
+ radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
}
&cmd_buffer->pool->alloc,
&fb_h);
- RADV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
+ radv_CmdBeginRenderPass(cmd_buffer_h,
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = cmd_buffer->device->meta_state.depth_decomp.pass,
VK_SUBPASS_CONTENTS_INLINE);
emit_depth_decomp(cmd_buffer, &(VkOffset2D){0, 0 }, &(VkExtent2D){width, height}, pipeline_h);
- RADV_CALL(CmdEndRenderPass)(cmd_buffer_h);
+ radv_CmdEndRenderPass(cmd_buffer_h);
radv_DestroyFramebuffer(device_h, fb_h,
&cmd_buffer->pool->alloc);
goto cleanup;
cleanup_cmask:
- RADV_CALL(DestroyPipeline)(device_h, device->meta_state.fast_clear_flush.cmask_eliminate_pipeline, &device->meta_state.alloc);
+ radv_DestroyPipeline(device_h, device->meta_state.fast_clear_flush.cmask_eliminate_pipeline, &device->meta_state.alloc);
cleanup:
ralloc_free(fs_module.nir);
return result;
const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
if (pass_h)
- RADV_CALL(DestroyRenderPass)(device_h, pass_h,
+ radv_DestroyRenderPass(device_h, pass_h,
&device->meta_state.alloc);
VkPipeline pipeline_h = state->fast_clear_flush.cmask_eliminate_pipeline;
if (pipeline_h) {
- RADV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
+ radv_DestroyPipeline(device_h, pipeline_h, alloc);
}
pipeline_h = state->fast_clear_flush.fmask_decompress_pipeline;
if (pipeline_h) {
- RADV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
+ radv_DestroyPipeline(device_h, pipeline_h, alloc);
}
}
pipeline_h);
}
- RADV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
+ radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
cmd_buffer->state.flush_bits |= (RADV_CMD_FLAG_FLUSH_AND_INV_CB |
RADV_CMD_FLAG_FLUSH_AND_INV_CB_META);
si_emit_cache_flush(cmd_buffer);
&cmd_buffer->pool->alloc,
&fb_h);
- RADV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
+ radv_CmdBeginRenderPass(cmd_buffer_h,
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = cmd_buffer->device->meta_state.fast_clear_flush.pass,
emit_fast_clear_flush(cmd_buffer,
&(VkExtent2D) { image->extent.width, image->extent.height },
image->fmask.size > 0);
- RADV_CALL(CmdEndRenderPass)(cmd_buffer_h);
+ radv_CmdEndRenderPass(cmd_buffer_h);
radv_DestroyFramebuffer(device_h, fb_h,
&cmd_buffer->pool->alloc);
const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
if (pass_h)
- RADV_CALL(DestroyRenderPass)(device_h, pass_h,
+ radv_DestroyRenderPass(device_h, pass_h,
&device->meta_state.alloc);
VkPipeline pipeline_h = state->resolve.pipeline;
if (pipeline_h) {
- RADV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
+ radv_DestroyPipeline(device_h, pipeline_h, alloc);
}
}
pipeline_h);
}
- RADV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
+ radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
si_emit_cache_flush(cmd_buffer);
}
&cmd_buffer->pool->alloc,
&fb_h);
- RADV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
+ radv_CmdBeginRenderPass(cmd_buffer_h,
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = device->meta_state.resolve.pass,
.height = extent.height,
});
- RADV_CALL(CmdEndRenderPass)(cmd_buffer_h);
+ radv_CmdEndRenderPass(cmd_buffer_h);
radv_DestroyFramebuffer(device_h, fb_h,
&cmd_buffer->pool->alloc);
extern struct radv_dispatch_table dtable;
-#define RADV_CALL(func) ({ \
- if (dtable.func == NULL) { \
- size_t idx = offsetof(struct radv_dispatch_table, func) / sizeof(void *); \
- dtable.entrypoints[idx] = radv_resolve_entrypoint(idx); \
- } \
- dtable.func; \
- })
-
static inline void *
radv_alloc(const VkAllocationCallbacks *alloc,
size_t size, size_t align,