gpu: Add a CommandState struct to the command vfuncs

This way, we can make it writable and track things like the active
textures and the current program.

We don't do that yet, but we can.
This commit is contained in:
Benjamin Otte 2023-11-04 03:57:38 +01:00
parent 78a7127b96
commit 1733671295
15 changed files with 195 additions and 207 deletions

View File

@ -141,6 +141,7 @@ gsk_gl_frame_submit (GskGpuFrame *frame,
GskGpuOp *op)
{
GskGLFrame *self = GSK_GL_FRAME (frame);
GskGLCommandState state = { 0, };
glEnable (GL_SCISSOR_TEST);
@ -166,7 +167,7 @@ gsk_gl_frame_submit (GskGpuFrame *frame,
while (op)
{
op = gsk_gpu_op_gl_command (op, frame, 0);
op = gsk_gpu_op_gl_command (op, frame, &state);
}
}

View File

@ -45,11 +45,9 @@ gsk_gpu_blit_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_blit_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_blit_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
VkImageLayout src_layout, dest_layout;
@ -61,7 +59,7 @@ gsk_gpu_blit_op_vk_command (GskGpuOp *op,
src_layout != VK_IMAGE_LAYOUT_GENERAL)
{
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->src_image),
command_buffer,
state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
VK_ACCESS_TRANSFER_READ_BIT);
@ -74,7 +72,7 @@ gsk_gpu_blit_op_vk_command (GskGpuOp *op,
dest_layout != VK_IMAGE_LAYOUT_GENERAL)
{
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->dest_image),
command_buffer,
state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_ACCESS_TRANSFER_WRITE_BIT);
@ -95,7 +93,7 @@ gsk_gpu_blit_op_vk_command (GskGpuOp *op,
break;
}
vkCmdBlitImage (command_buffer,
vkCmdBlitImage (state->vk_command_buffer,
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->src_image)),
src_layout,
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->dest_image)),
@ -146,9 +144,9 @@ gsk_gpu_blit_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_blit_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_blit_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
GLenum filter;
@ -176,10 +174,10 @@ gsk_gpu_blit_op_gl_command (GskGpuOp *op,
self->src_rect.x + self->src_rect.width,
self->src_rect.y + self->src_rect.height,
self->dest_rect.x,
flip_y ? flip_y - self->dest_rect.y - self->dest_rect.height
state->flip_y ? state->flip_y - self->dest_rect.y - self->dest_rect.height
: self->dest_rect.y,
self->dest_rect.x + self->dest_rect.width,
flip_y ? flip_y - self->dest_rect.y
state->flip_y ? state->flip_y - self->dest_rect.y
: self->dest_rect.y + self->dest_rect.height,
GL_COLOR_BUFFER_BIT,
filter);

View File

@ -63,22 +63,20 @@ gsk_gpu_border_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_border_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_border_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
return gsk_gpu_shader_op_vk_command_n (op, frame, render_pass, format, command_buffer, 8);
return gsk_gpu_shader_op_vk_command_n (op, frame, state, 8);
}
#endif
static GskGpuOp *
gsk_gpu_border_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_border_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
return gsk_gpu_shader_op_gl_command_n (op, frame, flip_y, 8);
return gsk_gpu_shader_op_gl_command_n (op, frame, state, 8);
}
static const GskGpuShaderOpClass GSK_GPU_BORDER_OP_CLASS = {

View File

@ -47,18 +47,16 @@ gsk_gpu_init_clear_value (VkClearValue *value,
}
static GskGpuOp *
gsk_gpu_clear_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_clear_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuClearOp *self = (GskGpuClearOp *) op;
VkClearValue clear_value;
gsk_gpu_init_clear_value (&clear_value, &self->color);
vkCmdClearAttachments (command_buffer,
vkCmdClearAttachments (state->vk_command_buffer,
1,
&(VkClearAttachment) {
VK_IMAGE_ASPECT_COLOR_BIT,
@ -80,17 +78,17 @@ gsk_gpu_clear_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_clear_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_clear_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuClearOp *self = (GskGpuClearOp *) op;
int scissor[4];
glGetIntegerv (GL_SCISSOR_BOX, scissor);
if (flip_y)
glScissor (self->rect.x, flip_y - self->rect.y - self->rect.height, self->rect.width, self->rect.height);
if (state->flip_y)
glScissor (self->rect.x, state->flip_y - self->rect.y - self->rect.height, self->rect.width, self->rect.height);
else
glScissor (self->rect.x, self->rect.y, self->rect.width, self->rect.height);

View File

@ -83,11 +83,9 @@ gsk_gpu_download_op_vk_create (GskGpuDownloadOp *self)
}
static GskGpuOp *
gsk_gpu_download_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_download_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
gsize width, height, stride;
@ -99,12 +97,12 @@ gsk_gpu_download_op_vk_command (GskGpuOp *op,
height * stride);
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->image),
command_buffer,
state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
VK_ACCESS_TRANSFER_READ_BIT);
vkCmdCopyImageToBuffer (command_buffer,
vkCmdCopyImageToBuffer (state->vk_command_buffer,
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->image)),
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
gsk_vulkan_buffer_get_vk_buffer (GSK_VULKAN_BUFFER (self->buffer)),
@ -133,7 +131,7 @@ gsk_gpu_download_op_vk_command (GskGpuOp *op,
}
});
vkCmdPipelineBarrier (command_buffer,
vkCmdPipelineBarrier (state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_HOST_BIT,
0,
@ -180,9 +178,9 @@ gsk_gl_texture_data_free (gpointer user_data)
}
static GskGpuOp *
gsk_gpu_download_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_download_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
GdkGLTextureBuilder *builder;

View File

@ -36,15 +36,13 @@ gsk_gpu_globals_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_globals_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_globals_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuGlobalsOp *self = (GskGpuGlobalsOp *) op;
vkCmdPushConstants (command_buffer,
vkCmdPushConstants (state->vk_command_buffer,
gsk_vulkan_device_get_vk_pipeline_layout (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
gsk_vulkan_frame_get_pipeline_layout (GSK_VULKAN_FRAME (frame))),
VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT,
@ -57,9 +55,9 @@ gsk_gpu_globals_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_globals_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_globals_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuGlobalsOp *self = (GskGpuGlobalsOp *) op;

View File

@ -40,11 +40,9 @@ gsk_gpu_mipmap_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_mipmap_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_mipmap_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuMipmapOp *self = (GskGpuMipmapOp *) op;
GskVulkanImage *image;
@ -60,14 +58,14 @@ gsk_gpu_mipmap_op_vk_command (GskGpuOp *op,
/* optimize me: only transition mipmap layers 1..n, but not 0 */
gsk_vulkan_image_transition (image,
command_buffer,
state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_ACCESS_TRANSFER_WRITE_BIT);
for (i = 0; /* we break after the barrier */ ; i++)
{
vkCmdPipelineBarrier (command_buffer,
vkCmdPipelineBarrier (state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
0,
@ -92,7 +90,7 @@ gsk_gpu_mipmap_op_vk_command (GskGpuOp *op,
});
if (i + 1 == n_levels)
break;
vkCmdBlitImage (command_buffer,
vkCmdBlitImage (state->vk_command_buffer,
vk_image,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
vk_image,
@ -151,9 +149,9 @@ gsk_gpu_mipmap_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_mipmap_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_mipmap_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuMipmapOp *self = (GskGpuMipmapOp *) op;

View File

@ -33,21 +33,19 @@ gsk_gpu_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
GskGpuOp *
gsk_gpu_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
return op->op_class->vk_command (op, frame, render_pass, format, command_buffer);
return op->op_class->vk_command (op, frame, state);
}
#endif
GskGpuOp *
gsk_gpu_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
return op->op_class->gl_command (op, frame, flip_y);
return op->op_class->gl_command (op, frame, state);
}

View File

@ -17,6 +17,23 @@ typedef enum
GSK_GPU_STAGE_END_PASS
} GskGpuStage;
typedef struct _GskGLCommandState GskGLCommandState;
typedef struct _GskVulkanCommandState GskVulkanCommandState;
struct _GskGLCommandState
{
gsize flip_y;
};
#ifdef GDK_RENDERING_VULKAN
struct _GskVulkanCommandState
{
VkRenderPass vk_render_pass;
VkFormat vk_format;
VkCommandBuffer vk_command_buffer;
};
#endif
struct _GskGpuOp
{
const GskGpuOpClass *op_class;
@ -39,13 +56,11 @@ struct _GskGpuOpClass
#ifdef GDK_RENDERING_VULKAN
GskGpuOp * (* vk_command) (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer);
GskVulkanCommandState *state);
#endif
GskGpuOp * (* gl_command) (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y);
GskGLCommandState *state);
};
/* ensures alignment of ops to multipes of 16 bytes - and that makes graphene happy */
@ -63,13 +78,11 @@ void gsk_gpu_op_print (GskGpuO
#ifdef GDK_RENDERING_VULKAN
GskGpuOp * gsk_gpu_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer);
GskVulkanCommandState *state);
#endif
GskGpuOp * gsk_gpu_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y);
GskGLCommandState *state);
G_END_DECLS

View File

@ -88,29 +88,25 @@ gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
}
static GskGpuOp *
gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
VkRenderPass vk_render_pass;
VkFormat vk_format;
/* nesting frame passes not allowed */
g_assert (render_pass == VK_NULL_HANDLE);
g_assert (state->vk_render_pass == VK_NULL_HANDLE);
gsk_gpu_render_pass_op_do_barriers (self, command_buffer);
gsk_gpu_render_pass_op_do_barriers (self, state->vk_command_buffer);
vk_format = gsk_vulkan_image_get_vk_format (GSK_VULKAN_IMAGE (self->target));
vk_render_pass = gsk_vulkan_device_get_vk_render_pass (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
vk_format,
gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->target)),
gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type));
state->vk_format = gsk_vulkan_image_get_vk_format (GSK_VULKAN_IMAGE (self->target));
state->vk_render_pass = gsk_vulkan_device_get_vk_render_pass (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
state->vk_format,
gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->target)),
gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type));
vkCmdSetViewport (command_buffer,
vkCmdSetViewport (state->vk_command_buffer,
0,
1,
&(VkViewport) {
@ -122,12 +118,12 @@ gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
.maxDepth = 1
});
vkCmdBeginRenderPass (command_buffer,
vkCmdBeginRenderPass (state->vk_command_buffer,
&(VkRenderPassBeginInfo) {
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.renderPass = vk_render_pass,
.renderPass = state->vk_render_pass,
.framebuffer = gsk_vulkan_image_get_vk_framebuffer (GSK_VULKAN_IMAGE(self->target),
vk_render_pass),
state->vk_render_pass),
.renderArea = {
{ self->area.x, self->area.y },
{ self->area.width, self->area.height }
@ -142,36 +138,38 @@ gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
op = op->next;
while (op->op_class->stage != GSK_GPU_STAGE_END_PASS)
{
op = gsk_gpu_op_vk_command (op, frame, vk_render_pass, vk_format, command_buffer);
op = gsk_gpu_op_vk_command (op, frame, state);
}
op = gsk_gpu_op_vk_command (op, frame, vk_render_pass, vk_format, command_buffer);
op = gsk_gpu_op_vk_command (op, frame, state);
return op;
}
#endif
static GskGpuOp *
gsk_gpu_render_pass_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_render_pass_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
gsize target_flip_y;
/* nesting frame passes not allowed */
g_assert (state->flip_y == 0);
gsk_gl_image_bind_framebuffer (GSK_GL_IMAGE (self->target));
if (gsk_gl_image_is_flipped (GSK_GL_IMAGE (self->target)))
target_flip_y = gsk_gpu_image_get_height (self->target);
state->flip_y = gsk_gpu_image_get_height (self->target);
else
target_flip_y = 0;
state->flip_y = 0;
glViewport (0, 0,
gsk_gpu_image_get_width (self->target),
gsk_gpu_image_get_height (self->target));
if (target_flip_y)
glScissor (self->area.x, target_flip_y - self->area.y - self->area.height, self->area.width, self->area.height);
if (state->flip_y)
glScissor (self->area.x, state->flip_y - self->area.y - self->area.height, self->area.width, self->area.height);
else
glScissor (self->area.x, self->area.y, self->area.width, self->area.height);
glClearColor (0, 0, 0, 0);
@ -180,10 +178,10 @@ gsk_gpu_render_pass_op_gl_command (GskGpuOp *op,
op = op->next;
while (op->op_class->stage != GSK_GPU_STAGE_END_PASS)
{
op = gsk_gpu_op_gl_command (op, frame, target_flip_y);
op = gsk_gpu_op_gl_command (op, frame, state);
}
op = gsk_gpu_op_gl_command (op, frame, target_flip_y);
op = gsk_gpu_op_gl_command (op, frame, state);
return op;
}
@ -232,19 +230,17 @@ gsk_gpu_render_pass_end_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_render_pass_end_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_render_pass_end_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuFramePassEndOp *self = (GskGpuFramePassEndOp *) op;
vkCmdEndRenderPass (command_buffer);
vkCmdEndRenderPass (state->vk_command_buffer);
if (gsk_gpu_image_get_flags (self->target) & GSK_GPU_IMAGE_CAN_MIPMAP)
{
vkCmdPipelineBarrier (command_buffer,
vkCmdPipelineBarrier (state->vk_command_buffer,
gsk_vulkan_image_get_vk_pipeline_stage (GSK_VULKAN_IMAGE (self->target)),
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
0,
@ -274,16 +270,19 @@ gsk_gpu_render_pass_end_op_vk_command (GskGpuOp *op,
gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type),
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
state->vk_render_pass = VK_NULL_HANDLE;
state->vk_format = VK_FORMAT_UNDEFINED;
return op->next;
}
#endif
static GskGpuOp *
gsk_gpu_render_pass_end_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_render_pass_end_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
/* nothing to do here */
state->flip_y = 0;
return op->next;
}

View File

@ -34,15 +34,13 @@ gsk_gpu_scissor_op_print (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_scissor_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_scissor_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuScissorOp *self = (GskGpuScissorOp *) op;
vkCmdSetScissor (command_buffer,
vkCmdSetScissor (state->vk_command_buffer,
0,
1,
&(VkRect2D) {
@ -55,14 +53,14 @@ gsk_gpu_scissor_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_scissor_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_scissor_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuScissorOp *self = (GskGpuScissorOp *) op;
if (flip_y)
glScissor (self->rect.x, flip_y - self->rect.y - self->rect.height, self->rect.width, self->rect.height);
if (state->flip_y)
glScissor (self->rect.x, state->flip_y - self->rect.y - self->rect.height, self->rect.width, self->rect.height);
else
glScissor (self->rect.x, self->rect.y, self->rect.width, self->rect.height);

View File

@ -28,12 +28,10 @@ gsk_gpu_shader_op_finish (GskGpuOp *op)
#ifdef GDK_RENDERING_VULKAN
GskGpuOp *
gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer,
gsize instance_scale)
gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state,
gsize instance_scale)
{
GskGpuShaderOp *self = (GskGpuShaderOp *) op;
GskGpuShaderOpClass *shader_op_class = (GskGpuShaderOpClass *) op->op_class;
@ -59,16 +57,16 @@ gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
i++;
}
vkCmdBindPipeline (command_buffer,
vkCmdBindPipeline (state->vk_command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
gsk_vulkan_device_get_vk_pipeline (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
gsk_vulkan_frame_get_pipeline_layout (GSK_VULKAN_FRAME (frame)),
shader_op_class,
self->clip,
format,
render_pass));
state->vk_format,
state->vk_render_pass));
vkCmdDraw (command_buffer,
vkCmdDraw (state->vk_command_buffer,
6 * instance_scale, i,
0, self->vertex_offset / shader_op_class->vertex_size);
@ -76,21 +74,19 @@ gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
}
GskGpuOp *
gsk_gpu_shader_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_shader_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
return gsk_gpu_shader_op_vk_command_n (op, frame, render_pass, format, command_buffer, 1);
return gsk_gpu_shader_op_vk_command_n (op, frame, state, 1);
}
#endif
GskGpuOp *
gsk_gpu_shader_op_gl_command_n (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y,
gsize instance_scale)
gsk_gpu_shader_op_gl_command_n (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state,
gsize instance_scale)
{
GskGpuShaderOp *self = (GskGpuShaderOp *) op;
GskGpuShaderOpClass *shader_op_class = (GskGpuShaderOpClass *) op->op_class;
@ -146,11 +142,11 @@ gsk_gpu_shader_op_gl_command_n (GskGpuOp *op,
}
GskGpuOp *
gsk_gpu_shader_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_shader_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
return gsk_gpu_shader_op_gl_command_n (op, frame, flip_y, 1);
return gsk_gpu_shader_op_gl_command_n (op, frame, state, 1);
}
GskGpuShaderOp *

View File

@ -38,23 +38,19 @@ void gsk_gpu_shader_op_finish (GskGpuO
#ifdef GDK_RENDERING_VULKAN
GskGpuOp * gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer,
GskVulkanCommandState *state,
gsize instance_scale);
GskGpuOp * gsk_gpu_shader_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer);
GskVulkanCommandState *state);
#endif
GskGpuOp * gsk_gpu_shader_op_gl_command_n (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y,
GskGLCommandState *state,
gsize instance_scale);
GskGpuOp * gsk_gpu_shader_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y);
GskGLCommandState *state);
static inline void
gsk_gpu_rgba_to_float (const GdkRGBA *rgba,

View File

@ -95,7 +95,7 @@ gsk_gpu_upload_op_gl_command (GskGpuOp *op,
static GskGpuOp *
gsk_gpu_upload_op_vk_command_with_area (GskGpuOp *op,
GskGpuFrame *frame,
VkCommandBuffer command_buffer,
GskVulkanCommandState *state,
GskVulkanImage *image,
const cairo_rectangle_int_t *area,
void (* draw_func) (GskGpuOp *, guchar *, gsize),
@ -113,7 +113,7 @@ gsk_gpu_upload_op_vk_command_with_area (GskGpuOp *op,
gsk_gpu_buffer_unmap (*buffer);
vkCmdPipelineBarrier (command_buffer,
vkCmdPipelineBarrier (state->vk_command_buffer,
VK_PIPELINE_STAGE_HOST_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
0,
@ -130,12 +130,12 @@ gsk_gpu_upload_op_vk_command_with_area (GskGpuOp *op,
},
0, NULL);
gsk_vulkan_image_transition (image,
command_buffer,
state->vk_command_buffer,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_ACCESS_TRANSFER_WRITE_BIT);
vkCmdCopyBufferToImage (command_buffer,
vkCmdCopyBufferToImage (state->vk_command_buffer,
gsk_vulkan_buffer_get_vk_buffer (GSK_VULKAN_BUFFER (*buffer)),
gsk_vulkan_image_get_vk_image (image),
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
@ -168,12 +168,12 @@ gsk_gpu_upload_op_vk_command_with_area (GskGpuOp *op,
}
static GskGpuOp *
gsk_gpu_upload_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkCommandBuffer command_buffer,
GskVulkanImage *image,
void (* draw_func) (GskGpuOp *, guchar *, gsize),
GskGpuBuffer **buffer)
gsk_gpu_upload_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state,
GskVulkanImage *image,
void (* draw_func) (GskGpuOp *, guchar *, gsize),
GskGpuBuffer **buffer)
{
gsize stride;
guchar *data;
@ -190,7 +190,7 @@ gsk_gpu_upload_op_vk_command (GskGpuOp *op,
return gsk_gpu_upload_op_vk_command_with_area (op,
frame,
command_buffer,
state,
image,
&(cairo_rectangle_int_t) {
0, 0,
@ -253,17 +253,15 @@ gsk_gpu_upload_texture_op_draw (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_upload_texture_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_upload_texture_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuUploadTextureOp *self = (GskGpuUploadTextureOp *) op;
return gsk_gpu_upload_op_vk_command (op,
frame,
command_buffer,
state,
GSK_VULKAN_IMAGE (self->image),
gsk_gpu_upload_texture_op_draw,
&self->buffer);
@ -271,9 +269,9 @@ gsk_gpu_upload_texture_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_upload_texture_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_upload_texture_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuUploadTextureOp *self = (GskGpuUploadTextureOp *) op;
@ -390,17 +388,15 @@ gsk_gpu_upload_cairo_op_draw (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_upload_cairo_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_upload_cairo_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
return gsk_gpu_upload_op_vk_command (op,
frame,
command_buffer,
state,
GSK_VULKAN_IMAGE (self->image),
gsk_gpu_upload_cairo_op_draw,
&self->buffer);
@ -408,9 +404,9 @@ gsk_gpu_upload_cairo_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_upload_cairo_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_upload_cairo_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
@ -541,17 +537,15 @@ gsk_gpu_upload_glyph_op_draw (GskGpuOp *op,
#ifdef GDK_RENDERING_VULKAN
static GskGpuOp *
gsk_gpu_upload_glyph_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
VkRenderPass render_pass,
VkFormat format,
VkCommandBuffer command_buffer)
gsk_gpu_upload_glyph_op_vk_command (GskGpuOp *op,
GskGpuFrame *frame,
GskVulkanCommandState *state)
{
GskGpuUploadGlyphOp *self = (GskGpuUploadGlyphOp *) op;
return gsk_gpu_upload_op_vk_command_with_area (op,
frame,
command_buffer,
state,
GSK_VULKAN_IMAGE (self->image),
&self->area,
gsk_gpu_upload_glyph_op_draw,
@ -560,9 +554,9 @@ gsk_gpu_upload_glyph_op_vk_command (GskGpuOp *op,
#endif
static GskGpuOp *
gsk_gpu_upload_glyph_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
gsize flip_y)
gsk_gpu_upload_glyph_op_gl_command (GskGpuOp *op,
GskGpuFrame *frame,
GskGLCommandState *state)
{
GskGpuUploadGlyphOp *self = (GskGpuUploadGlyphOp *) op;

View File

@ -372,6 +372,7 @@ gsk_vulkan_frame_submit (GskGpuFrame *frame,
GskGpuOp *op)
{
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
GskVulkanCommandState state;
if (storage_buffer)
{
@ -403,9 +404,13 @@ gsk_vulkan_frame_submit (GskGpuFrame *frame,
},
(VkDeviceSize[1]) { 0 });
state.vk_command_buffer = self->vk_command_buffer;
state.vk_render_pass = VK_NULL_HANDLE;
state.vk_format = VK_FORMAT_UNDEFINED;
while (op)
{
op = gsk_gpu_op_vk_command (op, frame, VK_NULL_HANDLE, VK_FORMAT_UNDEFINED, self->vk_command_buffer);
op = gsk_gpu_op_vk_command (op, frame, &state);
}
GSK_VK_CHECK (vkEndCommandBuffer, self->vk_command_buffer);