2023-08-21 00:18:37 +00:00
|
|
|
#include "config.h"
|
|
|
|
|
|
|
|
#include "gskgpurenderpassopprivate.h"
|
|
|
|
|
|
|
|
#include "gskglimageprivate.h"
|
|
|
|
#include "gskgpudeviceprivate.h"
|
|
|
|
#include "gskgpuframeprivate.h"
|
|
|
|
#include "gskgpunodeprocessorprivate.h"
|
|
|
|
#include "gskgpuprintprivate.h"
|
|
|
|
#include "gskgpushaderopprivate.h"
|
|
|
|
#include "gskrendernodeprivate.h"
|
|
|
|
#ifdef GDK_RENDERING_VULKAN
|
|
|
|
#include "gskvulkanimageprivate.h"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
typedef struct _GskGpuRenderPassOp GskGpuRenderPassOp;
|
|
|
|
|
|
|
|
struct _GskGpuRenderPassOp
|
|
|
|
{
|
|
|
|
GskGpuOp op;
|
|
|
|
|
|
|
|
GskGpuImage *target;
|
|
|
|
cairo_rectangle_int_t area;
|
2024-08-11 06:10:59 +00:00
|
|
|
GskGpuLoadOp load_op;
|
2024-07-08 20:39:35 +00:00
|
|
|
float clear_color[4];
|
2023-08-21 00:18:37 +00:00
|
|
|
GskRenderPassType pass_type;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_gpu_render_pass_op_finish (GskGpuOp *op)
|
|
|
|
{
|
|
|
|
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
|
|
|
|
|
|
|
|
g_object_unref (self->target);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_gpu_render_pass_op_print (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GString *string,
|
|
|
|
guint indent)
|
|
|
|
{
|
|
|
|
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
|
|
|
|
|
|
|
|
gsk_gpu_print_op (string, indent, "begin-render-pass");
|
|
|
|
gsk_gpu_print_image (string, self->target);
|
2024-07-07 17:38:38 +00:00
|
|
|
gsk_gpu_print_int_rect (string, &self->area);
|
2024-08-11 06:10:59 +00:00
|
|
|
switch (self->load_op)
|
|
|
|
{
|
|
|
|
case GSK_GPU_LOAD_OP_LOAD:
|
|
|
|
gsk_gpu_print_string (string, "load");
|
|
|
|
break;
|
|
|
|
case GSK_GPU_LOAD_OP_CLEAR:
|
|
|
|
gsk_gpu_print_rgba (string, self->clear_color);
|
|
|
|
break;
|
|
|
|
case GSK_GPU_LOAD_OP_DONT_CARE:
|
|
|
|
gsk_gpu_print_string (string, "dont-care");
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
g_assert_not_reached ();
|
|
|
|
break;
|
|
|
|
}
|
2023-08-21 00:18:37 +00:00
|
|
|
gsk_gpu_print_newline (string);
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef GDK_RENDERING_VULKAN
|
|
|
|
static VkImageLayout
|
|
|
|
gsk_gpu_render_pass_type_to_vk_image_layout (GskRenderPassType type)
|
|
|
|
{
|
|
|
|
switch (type)
|
|
|
|
{
|
|
|
|
default:
|
|
|
|
g_assert_not_reached ();
|
|
|
|
case GSK_RENDER_PASS_PRESENT:
|
|
|
|
return VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
|
|
|
|
case GSK_RENDER_PASS_OFFSCREEN:
|
|
|
|
return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
2024-07-08 21:01:22 +00:00
|
|
|
case GSK_RENDER_PASS_EXPORT:
|
|
|
|
return VK_IMAGE_LAYOUT_GENERAL;
|
2023-08-21 00:18:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2023-11-04 18:38:21 +00:00
|
|
|
gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
|
|
|
|
GskVulkanCommandState *state)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
|
|
|
GskGpuShaderOp *shader;
|
|
|
|
GskGpuOp *op;
|
|
|
|
|
|
|
|
for (op = ((GskGpuOp *) self)->next;
|
|
|
|
op->op_class->stage != GSK_GPU_STAGE_END_PASS;
|
|
|
|
op = op->next)
|
|
|
|
{
|
|
|
|
if (op->op_class->stage != GSK_GPU_STAGE_SHADER)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
shader = (GskGpuShaderOp *) op;
|
|
|
|
|
2024-07-21 11:56:11 +00:00
|
|
|
if (shader->images[0])
|
|
|
|
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (shader->images[0]),
|
|
|
|
state->semaphores,
|
|
|
|
state->vk_command_buffer,
|
|
|
|
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT);
|
|
|
|
if (shader->images[1])
|
|
|
|
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (shader->images[1]),
|
|
|
|
state->semaphores,
|
|
|
|
state->vk_command_buffer,
|
|
|
|
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
|
|
|
|
VK_ACCESS_SHADER_READ_BIT);
|
2023-08-21 00:18:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-11 06:10:59 +00:00
|
|
|
static VkAttachmentLoadOp
|
|
|
|
gsk_gpu_load_op_to_vk_load_op (GskGpuLoadOp op)
|
|
|
|
{
|
|
|
|
switch (op)
|
|
|
|
{
|
|
|
|
case GSK_GPU_LOAD_OP_LOAD:
|
|
|
|
return VK_ATTACHMENT_LOAD_OP_LOAD;
|
|
|
|
case GSK_GPU_LOAD_OP_CLEAR:
|
|
|
|
return VK_ATTACHMENT_LOAD_OP_CLEAR;
|
|
|
|
case GSK_GPU_LOAD_OP_DONT_CARE:
|
|
|
|
return VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
|
|
|
default:
|
|
|
|
g_return_val_if_reached (VK_ATTACHMENT_LOAD_OP_DONT_CARE);
|
|
|
|
}
|
|
|
|
}
|
2023-08-21 00:18:37 +00:00
|
|
|
static GskGpuOp *
|
2023-11-04 02:57:38 +00:00
|
|
|
gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GskVulkanCommandState *state)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
|
|
|
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
|
|
|
|
|
|
|
|
/* nesting frame passes not allowed */
|
2023-11-04 02:57:38 +00:00
|
|
|
g_assert (state->vk_render_pass == VK_NULL_HANDLE);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2023-11-04 18:38:21 +00:00
|
|
|
gsk_gpu_render_pass_op_do_barriers (self, state);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
state->vk_format = gsk_vulkan_image_get_vk_format (GSK_VULKAN_IMAGE (self->target));
|
|
|
|
state->vk_render_pass = gsk_vulkan_device_get_vk_render_pass (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
|
|
|
|
state->vk_format,
|
2024-08-11 06:10:59 +00:00
|
|
|
gsk_gpu_load_op_to_vk_load_op (self->load_op),
|
2023-11-04 02:57:38 +00:00
|
|
|
gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->target)),
|
|
|
|
gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type));
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
vkCmdSetViewport (state->vk_command_buffer,
|
2023-08-21 00:18:37 +00:00
|
|
|
0,
|
|
|
|
1,
|
|
|
|
&(VkViewport) {
|
|
|
|
.x = 0,
|
|
|
|
.y = 0,
|
|
|
|
.width = gsk_gpu_image_get_width (self->target),
|
|
|
|
.height = gsk_gpu_image_get_height (self->target),
|
|
|
|
.minDepth = 0,
|
|
|
|
.maxDepth = 1
|
|
|
|
});
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
vkCmdBeginRenderPass (state->vk_command_buffer,
|
2023-08-21 00:18:37 +00:00
|
|
|
&(VkRenderPassBeginInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
|
2023-11-04 02:57:38 +00:00
|
|
|
.renderPass = state->vk_render_pass,
|
2023-08-21 00:18:37 +00:00
|
|
|
.framebuffer = gsk_vulkan_image_get_vk_framebuffer (GSK_VULKAN_IMAGE(self->target),
|
2023-11-04 02:57:38 +00:00
|
|
|
state->vk_render_pass),
|
2023-08-21 00:18:37 +00:00
|
|
|
.renderArea = {
|
|
|
|
{ self->area.x, self->area.y },
|
|
|
|
{ self->area.width, self->area.height }
|
|
|
|
},
|
2024-08-11 06:10:59 +00:00
|
|
|
.clearValueCount = self->load_op == GSK_GPU_LOAD_OP_CLEAR ? 1 : 0,
|
|
|
|
.pClearValues = self->load_op == GSK_GPU_LOAD_OP_CLEAR ? (VkClearValue [1]) {
|
2024-07-07 17:38:38 +00:00
|
|
|
{
|
|
|
|
.color = {
|
|
|
|
.float32 = {
|
2024-08-11 06:10:59 +00:00
|
|
|
self->clear_color[0],
|
|
|
|
self->clear_color[1],
|
|
|
|
self->clear_color[2],
|
|
|
|
self->clear_color[3]
|
2024-07-07 17:38:38 +00:00
|
|
|
}
|
|
|
|
}
|
2024-08-11 06:10:59 +00:00
|
|
|
}
|
|
|
|
} : NULL,
|
2023-08-21 00:18:37 +00:00
|
|
|
},
|
|
|
|
VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
|
|
|
|
op = op->next;
|
|
|
|
while (op->op_class->stage != GSK_GPU_STAGE_END_PASS)
|
|
|
|
{
|
2023-11-04 02:57:38 +00:00
|
|
|
op = gsk_gpu_op_vk_command (op, frame, state);
|
2023-08-21 00:18:37 +00:00
|
|
|
}
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
op = gsk_gpu_op_vk_command (op, frame, state);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static GskGpuOp *
|
2023-11-04 02:57:38 +00:00
|
|
|
gsk_gpu_render_pass_op_gl_command (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GskGLCommandState *state)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
|
|
|
GskGpuRenderPassOp *self = (GskGpuRenderPassOp *) op;
|
2023-11-04 02:57:38 +00:00
|
|
|
|
|
|
|
/* nesting frame passes not allowed */
|
|
|
|
g_assert (state->flip_y == 0);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
gsk_gl_image_bind_framebuffer (GSK_GL_IMAGE (self->target));
|
|
|
|
|
2023-08-29 06:34:55 +00:00
|
|
|
if (gsk_gl_image_is_flipped (GSK_GL_IMAGE (self->target)))
|
2023-11-04 02:57:38 +00:00
|
|
|
state->flip_y = gsk_gpu_image_get_height (self->target);
|
2023-08-29 06:34:55 +00:00
|
|
|
else
|
2023-11-04 02:57:38 +00:00
|
|
|
state->flip_y = 0;
|
2023-08-29 06:34:55 +00:00
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
glViewport (0, 0,
|
|
|
|
gsk_gpu_image_get_width (self->target),
|
|
|
|
gsk_gpu_image_get_height (self->target));
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
if (state->flip_y)
|
|
|
|
glScissor (self->area.x, state->flip_y - self->area.y - self->area.height, self->area.width, self->area.height);
|
2023-08-29 16:04:28 +00:00
|
|
|
else
|
|
|
|
glScissor (self->area.x, self->area.y, self->area.width, self->area.height);
|
2024-08-11 06:10:59 +00:00
|
|
|
if (self->load_op == GSK_GPU_LOAD_OP_CLEAR)
|
2024-07-08 20:39:35 +00:00
|
|
|
{
|
|
|
|
glClearColor (self->clear_color[0], self->clear_color[1], self->clear_color[2], self->clear_color[3]);
|
|
|
|
glClear (GL_COLOR_BUFFER_BIT);
|
|
|
|
}
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
op = op->next;
|
|
|
|
while (op->op_class->stage != GSK_GPU_STAGE_END_PASS)
|
|
|
|
{
|
2023-11-04 02:57:38 +00:00
|
|
|
op = gsk_gpu_op_gl_command (op, frame, state);
|
2023-08-21 00:18:37 +00:00
|
|
|
}
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
op = gsk_gpu_op_gl_command (op, frame, state);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const GskGpuOpClass GSK_GPU_RENDER_PASS_OP_CLASS = {
|
|
|
|
GSK_GPU_OP_SIZE (GskGpuRenderPassOp),
|
|
|
|
GSK_GPU_STAGE_BEGIN_PASS,
|
|
|
|
gsk_gpu_render_pass_op_finish,
|
|
|
|
gsk_gpu_render_pass_op_print,
|
|
|
|
#ifdef GDK_RENDERING_VULKAN
|
|
|
|
gsk_gpu_render_pass_op_vk_command,
|
|
|
|
#endif
|
|
|
|
gsk_gpu_render_pass_op_gl_command
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef struct _GskGpuFramePassEndOp GskGpuFramePassEndOp;
|
|
|
|
|
|
|
|
struct _GskGpuFramePassEndOp
|
|
|
|
{
|
|
|
|
GskGpuOp op;
|
|
|
|
|
|
|
|
GskGpuImage *target;
|
|
|
|
GskRenderPassType pass_type;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_gpu_render_pass_end_op_finish (GskGpuOp *op)
|
|
|
|
{
|
|
|
|
GskGpuFramePassEndOp *self = (GskGpuFramePassEndOp *) op;
|
|
|
|
|
|
|
|
g_object_unref (self->target);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_gpu_render_pass_end_op_print (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GString *string,
|
|
|
|
guint indent)
|
|
|
|
{
|
|
|
|
GskGpuFramePassEndOp *self = (GskGpuFramePassEndOp *) op;
|
|
|
|
|
|
|
|
gsk_gpu_print_op (string, indent, "end-render-pass");
|
|
|
|
gsk_gpu_print_image (string, self->target);
|
|
|
|
gsk_gpu_print_newline (string);
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef GDK_RENDERING_VULKAN
|
|
|
|
static GskGpuOp *
|
2023-11-04 02:57:38 +00:00
|
|
|
gsk_gpu_render_pass_end_op_vk_command (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GskVulkanCommandState *state)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
|
|
|
GskGpuFramePassEndOp *self = (GskGpuFramePassEndOp *) op;
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
vkCmdEndRenderPass (state->vk_command_buffer);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2024-03-19 00:33:32 +00:00
|
|
|
if ((gsk_gpu_image_get_flags (self->target) & GSK_GPU_IMAGE_CAN_MIPMAP) &&
|
|
|
|
(gsk_gpu_image_get_width (self->target) > 1 ||
|
|
|
|
gsk_gpu_image_get_height (self->target) > 1))
|
2023-11-01 05:43:15 +00:00
|
|
|
{
|
2023-11-04 02:57:38 +00:00
|
|
|
vkCmdPipelineBarrier (state->vk_command_buffer,
|
2023-11-01 05:43:15 +00:00
|
|
|
gsk_vulkan_image_get_vk_pipeline_stage (GSK_VULKAN_IMAGE (self->target)),
|
|
|
|
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
|
|
|
|
0,
|
|
|
|
0, NULL,
|
|
|
|
0, NULL,
|
|
|
|
1, &(VkImageMemoryBarrier) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
|
|
|
.srcAccessMask = gsk_vulkan_image_get_vk_access (GSK_VULKAN_IMAGE (self->target)),
|
|
|
|
.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
|
|
|
|
.oldLayout = gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->target)),
|
|
|
|
.newLayout = gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type),
|
|
|
|
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
|
|
|
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
|
|
|
.image = gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->target)),
|
|
|
|
.subresourceRange = {
|
|
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
|
|
.baseMipLevel = 1,
|
|
|
|
.levelCount = VK_REMAINING_MIP_LEVELS,
|
|
|
|
.baseArrayLayer = 0,
|
|
|
|
.layerCount = 1
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
gsk_vulkan_image_set_vk_image_layout (GSK_VULKAN_IMAGE (self->target),
|
|
|
|
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
|
|
|
|
gsk_gpu_render_pass_type_to_vk_image_layout (self->pass_type),
|
|
|
|
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
state->vk_render_pass = VK_NULL_HANDLE;
|
|
|
|
state->vk_format = VK_FORMAT_UNDEFINED;
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
return op->next;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static GskGpuOp *
|
2023-11-04 02:57:38 +00:00
|
|
|
gsk_gpu_render_pass_end_op_gl_command (GskGpuOp *op,
|
|
|
|
GskGpuFrame *frame,
|
|
|
|
GskGLCommandState *state)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
2023-11-04 02:57:38 +00:00
|
|
|
state->flip_y = 0;
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
return op->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const GskGpuOpClass GSK_GPU_RENDER_PASS_END_OP_CLASS = {
|
|
|
|
GSK_GPU_OP_SIZE (GskGpuFramePassEndOp),
|
|
|
|
GSK_GPU_STAGE_END_PASS,
|
|
|
|
gsk_gpu_render_pass_end_op_finish,
|
|
|
|
gsk_gpu_render_pass_end_op_print,
|
|
|
|
#ifdef GDK_RENDERING_VULKAN
|
|
|
|
gsk_gpu_render_pass_end_op_vk_command,
|
|
|
|
#endif
|
|
|
|
gsk_gpu_render_pass_end_op_gl_command
|
|
|
|
};
|
|
|
|
|
|
|
|
void
|
|
|
|
gsk_gpu_render_pass_begin_op (GskGpuFrame *frame,
|
|
|
|
GskGpuImage *image,
|
|
|
|
const cairo_rectangle_int_t *area,
|
2024-08-11 06:10:59 +00:00
|
|
|
GskGpuLoadOp load_op,
|
|
|
|
float clear_color[4],
|
2023-08-21 00:18:37 +00:00
|
|
|
GskRenderPassType pass_type)
|
|
|
|
{
|
|
|
|
GskGpuRenderPassOp *self;
|
|
|
|
|
2024-08-11 06:10:59 +00:00
|
|
|
g_assert (load_op != GSK_GPU_LOAD_OP_CLEAR || clear_color != NULL);
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
self = (GskGpuRenderPassOp *) gsk_gpu_op_alloc (frame, &GSK_GPU_RENDER_PASS_OP_CLASS);
|
|
|
|
|
|
|
|
self->target = g_object_ref (image);
|
|
|
|
self->area = *area;
|
2024-08-11 06:10:59 +00:00
|
|
|
self->load_op = load_op;
|
|
|
|
if (self->load_op == GSK_GPU_LOAD_OP_CLEAR)
|
|
|
|
gsk_gpu_vec4_to_float (clear_color, self->clear_color);
|
2023-08-21 00:18:37 +00:00
|
|
|
self->pass_type = pass_type;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
gsk_gpu_render_pass_end_op (GskGpuFrame *frame,
|
|
|
|
GskGpuImage *image,
|
|
|
|
GskRenderPassType pass_type)
|
|
|
|
{
|
|
|
|
GskGpuFramePassEndOp *self;
|
|
|
|
|
|
|
|
self = (GskGpuFramePassEndOp *) gsk_gpu_op_alloc (frame, &GSK_GPU_RENDER_PASS_END_OP_CLASS);
|
|
|
|
|
|
|
|
self->target = g_object_ref (image);
|
|
|
|
self->pass_type = pass_type;
|
|
|
|
}
|