vulkan: Emit a renderpass op

... instead of doing the equivalent things manually by creating a
RenderPass and calling the relevant functions.

Now all renderpass operations are indeed stored in ops.

Also reshuffle the command emission code, because we no longer need to
emit the ops for the base renderpass.

As a result we only submit a single command buffer containing all the
render passes instead of once per render pass.
We also bind vertex buffers and descriptor sets only once now at the
start instead of once per renderpass.
This commit is contained in:
Benjamin Otte 2023-07-10 03:42:55 +02:00
parent cc5cab65a1
commit 6f76c37fed
4 changed files with 103 additions and 70 deletions

View File

@ -12,6 +12,7 @@
#include "gskvulkanpushconstantsopprivate.h"
#include "gskvulkanrendererprivate.h"
#include "gskvulkanrenderpassprivate.h"
#include "gskvulkanrenderpassopprivate.h"
#include "gdk/gdkvulkancontextprivate.h"
@ -59,7 +60,6 @@ struct _GskVulkanRender
GskVulkanRenderOps render_ops;
GskVulkanOp *first_op;
GskVulkanUploader *uploader;
GskVulkanRenderPass *render_pass;
GskDescriptorImageInfos descriptor_images;
GskDescriptorBufferInfos descriptor_buffers;
@ -492,16 +492,13 @@ gsk_vulkan_render_add_node (GskVulkanRender *self,
graphene_vec2_init (&scale, self->scale, self->scale);
self->render_pass = gsk_vulkan_render_pass_new (self->vulkan,
self,
self->target,
&scale,
&self->viewport,
self->clip,
node,
TRUE);
gsk_vulkan_render_pass_add (self->render_pass, self, node);
gsk_vulkan_render_pass_op (self,
self->vulkan,
g_object_ref (self->target),
self->clip,
&scale,
&self->viewport,
node, TRUE);
gsk_vulkan_render_seal_ops (self);
gsk_vulkan_render_verbose_print (self, "start of frame");
@ -898,34 +895,14 @@ gsk_vulkan_render_collect_vertex_buffer (GskVulkanRender *self)
GskVulkanOp *
gsk_vulkan_render_draw_pass (GskVulkanRender *self,
GskVulkanRenderPass *render_pass,
GskVulkanOp *op)
GskVulkanOp *op,
VkCommandBuffer command_buffer)
{
VkPipeline current_pipeline = VK_NULL_HANDLE;
const GskVulkanOpClass *current_pipeline_class = NULL;
const char *current_pipeline_clip_type = NULL;
VkCommandBuffer command_buffer;
VkRenderPass vk_render_pass;
command_buffer = gsk_vulkan_command_pool_get_buffer (self->command_pool);
if (self->vertex_buffer)
vkCmdBindVertexBuffers (command_buffer,
0,
1,
(VkBuffer[1]) {
gsk_vulkan_buffer_get_buffer (self->vertex_buffer)
},
(VkDeviceSize[1]) { 0 });
vkCmdBindDescriptorSets (command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
self->pipeline_layout,
0,
N_DESCRIPTOR_SETS,
self->descriptor_sets,
0,
NULL);
vk_render_pass = gsk_vulkan_render_pass_begin_draw (render_pass, self, self->pipeline_layout, command_buffer);
while (op && op->op_class->stage != GSK_VULKAN_STAGE_END_PASS)
@ -954,20 +931,15 @@ gsk_vulkan_render_draw_pass (GskVulkanRender *self,
else
gsk_vulkan_render_pass_end_draw (render_pass, self, self->pipeline_layout, command_buffer);
gsk_vulkan_command_pool_submit_buffer (self->command_pool,
command_buffer,
0,
NULL,
0,
NULL,
self->fence);
return op;
}
void
gsk_vulkan_render_draw (GskVulkanRender *self)
{
VkCommandBuffer command_buffer;
GskVulkanOp *op;
#ifdef G_ENABLE_DEBUG
if (GSK_RENDERER_DEBUG_CHECK (self->renderer, SYNC))
gsk_profiler_timer_begin (gsk_renderer_get_profiler (self->renderer), self->gpu_time_timer);
@ -977,7 +949,39 @@ gsk_vulkan_render_draw (GskVulkanRender *self)
gsk_vulkan_render_collect_vertex_buffer (self);
gsk_vulkan_render_draw_pass (self, self->render_pass, self->first_op);
command_buffer = gsk_vulkan_command_pool_get_buffer (self->command_pool);
if (self->vertex_buffer)
vkCmdBindVertexBuffers (command_buffer,
0,
1,
(VkBuffer[1]) {
gsk_vulkan_buffer_get_buffer (self->vertex_buffer)
},
(VkDeviceSize[1]) { 0 });
vkCmdBindDescriptorSets (command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
self->pipeline_layout,
0,
N_DESCRIPTOR_SETS,
self->descriptor_sets,
0,
NULL);
op = self->first_op;
while (op)
{
op = gsk_vulkan_op_command (op, self, self->pipeline_layout, command_buffer);
}
gsk_vulkan_command_pool_submit_buffer (self->command_pool,
command_buffer,
0,
NULL,
0,
NULL,
self->fence);
#ifdef G_ENABLE_DEBUG
if (GSK_RENDERER_DEBUG_CHECK (self->renderer, SYNC))
@ -1045,7 +1049,6 @@ gsk_vulkan_render_cleanup (GskVulkanRender *self)
gsk_descriptor_image_infos_set_size (&self->descriptor_images, 0);
gsk_descriptor_buffer_infos_set_size (&self->descriptor_buffers, 0);
g_clear_pointer (&self->render_pass, gsk_vulkan_render_pass_free);
g_clear_pointer (&self->clip, cairo_region_destroy);
g_clear_object (&self->target);
}

View File

@ -73,7 +73,7 @@ gsk_vulkan_render_pass_op_command (GskVulkanOp *op,
{
GskVulkanRenderPassOp *self = (GskVulkanRenderPassOp *) op;
return gsk_vulkan_render_draw_pass (render, self->render_pass, op->next);
return gsk_vulkan_render_draw_pass (render, self->render_pass, op->next, command_buffer);
}
static const GskVulkanOpClass GSK_VULKAN_RENDER_PASS_OP_CLASS = {
@ -170,6 +170,40 @@ static const GskVulkanOpClass GSK_VULKAN_RENDER_PASS_END_OP_CLASS = {
gsk_vulkan_render_pass_end_op_command
};
void
gsk_vulkan_render_pass_op (GskVulkanRender *render,
GdkVulkanContext *context,
GskVulkanImage *image,
cairo_region_t *clip,
const graphene_vec2_t *scale,
const graphene_rect_t *viewport,
GskRenderNode *node,
gboolean is_root)
{
GskVulkanRenderPassOp *self;
GskVulkanRenderPassEndOp *end;
self = (GskVulkanRenderPassOp *) gsk_vulkan_op_alloc (render, &GSK_VULKAN_RENDER_PASS_OP_CLASS);
self->image = image;
self->render_pass = gsk_vulkan_render_pass_new (context,
render,
self->image,
scale,
viewport,
clip,
node,
is_root);
/* This invalidates the self pointer */
gsk_vulkan_render_pass_add (self->render_pass, render, node);
end = (GskVulkanRenderPassEndOp *) gsk_vulkan_op_alloc (render, &GSK_VULKAN_RENDER_PASS_END_OP_CLASS);
end->image = g_object_ref (image);
}
GskVulkanImage *
gsk_vulkan_render_pass_op_offscreen (GskVulkanRender *render,
GdkVulkanContext *context,
@ -177,10 +211,8 @@ gsk_vulkan_render_pass_op_offscreen (GskVulkanRender *render,
const graphene_rect_t *viewport,
GskRenderNode *node)
{
GskVulkanRenderPassOp *self;
GskVulkanRenderPassEndOp *end;
GskVulkanImage *image;
graphene_rect_t view;
GskVulkanImage *image;
cairo_region_t *clip;
float scale_x, scale_y;
@ -196,33 +228,22 @@ gsk_vulkan_render_pass_op_offscreen (GskVulkanRender *render,
gsk_render_node_get_preferred_depth (node)),
view.size.width, view.size.height);
self = (GskVulkanRenderPassOp *) gsk_vulkan_op_alloc (render, &GSK_VULKAN_RENDER_PASS_OP_CLASS);
self->image = image;
clip = cairo_region_create_rectangle (&(cairo_rectangle_int_t) {
0, 0,
gsk_vulkan_image_get_width (self->image),
gsk_vulkan_image_get_height (self->image)
gsk_vulkan_image_get_width (image),
gsk_vulkan_image_get_height (image)
});
self->render_pass = gsk_vulkan_render_pass_new (context,
render,
self->image,
scale,
&view,
clip,
node,
FALSE);
gsk_vulkan_render_pass_op (render,
context,
image,
clip,
scale,
&view,
node,
FALSE);
cairo_region_destroy (clip);
/* This invalidates the self pointer */
gsk_vulkan_render_pass_add (self->render_pass, render, node);
end = (GskVulkanRenderPassEndOp *) gsk_vulkan_op_alloc (render, &GSK_VULKAN_RENDER_PASS_END_OP_CLASS);
end->image = g_object_ref (image);
return self->image;
return image;
}

View File

@ -4,6 +4,14 @@
G_BEGIN_DECLS
void gsk_vulkan_render_pass_op (GskVulkanRender *render,
GdkVulkanContext *context,
GskVulkanImage *image,
cairo_region_t *clip,
const graphene_vec2_t *scale,
const graphene_rect_t *viewport,
GskRenderNode *node,
gboolean is_root);
GskVulkanImage * gsk_vulkan_render_pass_op_offscreen (GskVulkanRender *render,
GdkVulkanContext *context,
const graphene_vec2_t *scale,

View File

@ -55,7 +55,8 @@ guchar * gsk_vulkan_render_get_buffer_memory (GskVulk
void gsk_vulkan_render_draw (GskVulkanRender *self);
GskVulkanOp * gsk_vulkan_render_draw_pass (GskVulkanRender *self,
GskVulkanRenderPass *render_pass,
GskVulkanOp *op);
GskVulkanOp *op,
VkCommandBuffer command_buffer);
GdkTexture * gsk_vulkan_render_download_target (GskVulkanRender *self);
VkFence gsk_vulkan_render_get_fence (GskVulkanRender *self);