mirror of
https://gitlab.gnome.org/GNOME/gtk.git
synced 2024-09-19 21:40:22 +00:00
gpu: Add outline of new GPU renderer
For now, it just renders using cairo, uploads the result to the GPU, blits it onto the framebuffer and then is happy. But it can do that using Vulkan and using GL (no idea which version). The most important thing still missing is shaders. It also has a bunch of copy/paste from the Vulkan renderer that isn't used yet. But I didn't want to rip it out and then try to copy it back later
This commit is contained in:
parent
e3c70645f9
commit
9ddae8aebc
@ -537,51 +537,3 @@ gsk_gl_renderer_try_compile_gl_shader (GskGLRenderer *renderer,
|
||||
return program != NULL;
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
GskRenderer parent_instance;
|
||||
} GskNglRenderer;
|
||||
|
||||
typedef struct {
|
||||
GskRendererClass parent_class;
|
||||
} GskNglRendererClass;
|
||||
|
||||
G_DEFINE_TYPE (GskNglRenderer, gsk_ngl_renderer, GSK_TYPE_RENDERER)
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_init (GskNglRenderer *renderer)
|
||||
{
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gsk_ngl_renderer_realize (GskRenderer *renderer,
|
||||
GdkSurface *surface,
|
||||
GError **error)
|
||||
{
|
||||
g_set_error_literal (error,
|
||||
G_IO_ERROR, G_IO_ERROR_FAILED,
|
||||
"please use the GL renderer instead");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_class_init (GskNglRendererClass *class)
|
||||
{
|
||||
GSK_RENDERER_CLASS (class)->realize = gsk_ngl_renderer_realize;
|
||||
}
|
||||
|
||||
/**
|
||||
* gsk_ngl_renderer_new:
|
||||
*
|
||||
* Same as gsk_gl_renderer_new().
|
||||
*
|
||||
* Returns: (transfer full): a new GL renderer
|
||||
*
|
||||
* Deprecated: 4.4: Use gsk_gl_renderer_new()
|
||||
*/
|
||||
GskRenderer *
|
||||
gsk_ngl_renderer_new (void)
|
||||
{
|
||||
G_GNUC_BEGIN_IGNORE_DEPRECATIONS
|
||||
return g_object_new (gsk_ngl_renderer_get_type (), NULL);
|
||||
G_GNUC_END_IGNORE_DEPRECATIONS
|
||||
}
|
||||
|
127
gsk/gpu/gskgldevice.c
Normal file
127
gsk/gpu/gskgldevice.c
Normal file
@ -0,0 +1,127 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgldeviceprivate.h"
|
||||
#include "gskglimageprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkglcontextprivate.h"
|
||||
|
||||
#include <glib/gi18n-lib.h>
|
||||
|
||||
struct _GskGLDevice
|
||||
{
|
||||
GskGpuDevice parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGLDeviceClass
|
||||
{
|
||||
GskGpuDeviceClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskGLDevice, gsk_gl_device, GSK_TYPE_GPU_DEVICE)
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_gl_device_create_offscreen_image (GskGpuDevice *device,
|
||||
GdkMemoryDepth depth,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskGLDevice *self = GSK_GL_DEVICE (device);
|
||||
|
||||
return gsk_gl_image_new (self,
|
||||
gdk_memory_depth_get_format (depth),
|
||||
width,
|
||||
height);
|
||||
}
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_gl_device_create_upload_image (GskGpuDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskGLDevice *self = GSK_GL_DEVICE (device);
|
||||
|
||||
return gsk_gl_image_new (self,
|
||||
format,
|
||||
width,
|
||||
height);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_device_finalize (GObject *object)
|
||||
{
|
||||
GskGLDevice *self = GSK_GL_DEVICE (object);
|
||||
GskGpuDevice *device = GSK_GPU_DEVICE (self);
|
||||
|
||||
g_object_steal_data (G_OBJECT (gsk_gpu_device_get_display (device)), "-gsk-gl-device");
|
||||
|
||||
G_OBJECT_CLASS (gsk_gl_device_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_device_class_init (GskGLDeviceClass *klass)
|
||||
{
|
||||
GskGpuDeviceClass *gpu_device_class = GSK_GPU_DEVICE_CLASS (klass);
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
gpu_device_class->create_offscreen_image = gsk_gl_device_create_offscreen_image;
|
||||
gpu_device_class->create_upload_image = gsk_gl_device_create_upload_image;
|
||||
|
||||
object_class->finalize = gsk_gl_device_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_device_init (GskGLDevice *self)
|
||||
{
|
||||
}
|
||||
|
||||
GskGpuDevice *
|
||||
gsk_gl_device_get_for_display (GdkDisplay *display,
|
||||
GError **error)
|
||||
{
|
||||
GskGLDevice *self;
|
||||
GdkGLContext *context;
|
||||
|
||||
self = g_object_get_data (G_OBJECT (display), "-gsk-gl-device");
|
||||
if (self)
|
||||
return GSK_GPU_DEVICE (g_object_ref (self));
|
||||
|
||||
if (!gdk_display_prepare_gl (display, error))
|
||||
return NULL;
|
||||
|
||||
context = gdk_display_get_gl_context (display);
|
||||
|
||||
/* GLES 2 is not supported */
|
||||
if (!gdk_gl_context_check_version (context, "3.0", "3.0"))
|
||||
{
|
||||
g_set_error (error, GDK_GL_ERROR, GDK_GL_ERROR_NOT_AVAILABLE,
|
||||
_("OpenGL ES 2.0 is not supported by this renderer."));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self = g_object_new (GSK_TYPE_GL_DEVICE, NULL);
|
||||
|
||||
gsk_gpu_device_setup (GSK_GPU_DEVICE (self), display);
|
||||
|
||||
g_object_set_data (G_OBJECT (display), "-gsk-gl-device", self);
|
||||
|
||||
return GSK_GPU_DEVICE (self);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gl_device_find_gl_format (GskGLDevice *self,
|
||||
GdkMemoryFormat format,
|
||||
GdkMemoryFormat *out_format,
|
||||
GLint *out_gl_internal_format,
|
||||
GLenum *out_gl_format,
|
||||
GLenum *out_gl_type,
|
||||
GLint out_swizzle[4])
|
||||
{
|
||||
gdk_memory_format_gl_format (format,
|
||||
out_gl_internal_format,
|
||||
out_gl_format,
|
||||
out_gl_type,
|
||||
out_swizzle);
|
||||
}
|
||||
|
23
gsk/gpu/gskgldeviceprivate.h
Normal file
23
gsk/gpu/gskgldeviceprivate.h
Normal file
@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpudeviceprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GL_DEVICE (gsk_gl_device_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE (GskGLDevice, gsk_gl_device, GSK, GL_DEVICE, GskGpuDevice)
|
||||
|
||||
GskGpuDevice * gsk_gl_device_get_for_display (GdkDisplay *display,
|
||||
GError **error);
|
||||
|
||||
|
||||
void gsk_gl_device_find_gl_format (GskGLDevice *self,
|
||||
GdkMemoryFormat format,
|
||||
GdkMemoryFormat *out_format,
|
||||
GLint *out_gl_internal_format,
|
||||
GLenum *out_gl_format,
|
||||
GLenum *out_gl_type,
|
||||
GLint out_swizzle[4]);
|
||||
|
||||
G_END_DECLS
|
51
gsk/gpu/gskglframe.c
Normal file
51
gsk/gpu/gskglframe.c
Normal file
@ -0,0 +1,51 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskglframeprivate.h"
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkglcontextprivate.h"
|
||||
|
||||
struct _GskGLFrame
|
||||
{
|
||||
GskGpuFrame parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGLFrameClass
|
||||
{
|
||||
GskGpuFrameClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskGLFrame, gsk_gl_frame, GSK_TYPE_GPU_FRAME)
|
||||
|
||||
static gboolean
|
||||
gsk_gl_frame_is_busy (GskGpuFrame *frame)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_frame_submit (GskGpuFrame *frame,
|
||||
GskGpuOp *op)
|
||||
{
|
||||
while (op)
|
||||
{
|
||||
op = gsk_gpu_op_gl_command (op, frame);
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_frame_class_init (GskGLFrameClass *klass)
|
||||
{
|
||||
GskGpuFrameClass *gpu_frame_class = GSK_GPU_FRAME_CLASS (klass);
|
||||
|
||||
gpu_frame_class->is_busy = gsk_gl_frame_is_busy;
|
||||
gpu_frame_class->submit = gsk_gl_frame_submit;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_frame_init (GskGLFrame *self)
|
||||
{
|
||||
}
|
||||
|
12
gsk/gpu/gskglframeprivate.h
Normal file
12
gsk/gpu/gskglframeprivate.h
Normal file
@ -0,0 +1,12 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GL_FRAME (gsk_gl_frame_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE (GskGLFrame, gsk_gl_frame, GSK, GL_FRAME, GskGpuFrame)
|
||||
|
||||
|
||||
G_END_DECLS
|
227
gsk/gpu/gskglimage.c
Normal file
227
gsk/gpu/gskglimage.c
Normal file
@ -0,0 +1,227 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskglimageprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkglcontextprivate.h"
|
||||
|
||||
struct _GskGLImage
|
||||
{
|
||||
GskGpuImage parent_instance;
|
||||
|
||||
guint texture_id;
|
||||
guint framebuffer_id;
|
||||
|
||||
GLint gl_internal_format;
|
||||
GLenum gl_format;
|
||||
GLenum gl_type;
|
||||
|
||||
guint owns_texture : 1;
|
||||
};
|
||||
|
||||
struct _GskGLImageClass
|
||||
{
|
||||
GskGpuImageClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskGLImage, gsk_gl_image, GSK_TYPE_GPU_IMAGE)
|
||||
|
||||
static void
|
||||
gsk_gl_image_finalize (GObject *object)
|
||||
{
|
||||
GskGLImage *self = GSK_GL_IMAGE (object);
|
||||
|
||||
if (self->framebuffer_id)
|
||||
glDeleteFramebuffers (1, &self->framebuffer_id);
|
||||
|
||||
if (self->owns_texture)
|
||||
glDeleteTextures (1, &self->texture_id);
|
||||
|
||||
G_OBJECT_CLASS (gsk_gl_image_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_image_class_init (GskGLImageClass *klass)
|
||||
{
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
object_class->finalize = gsk_gl_image_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gl_image_init (GskGLImage *self)
|
||||
{
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_gl_image_new_backbuffer (GskGLDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskGLImage *self;
|
||||
GLint swizzle[4];
|
||||
|
||||
self = g_object_new (GSK_TYPE_GL_IMAGE, NULL);
|
||||
|
||||
/* We only do this so these variables get initialized */
|
||||
gsk_gl_device_find_gl_format (device,
|
||||
format,
|
||||
&format,
|
||||
&self->gl_internal_format,
|
||||
&self->gl_format,
|
||||
&self->gl_type,
|
||||
swizzle);
|
||||
|
||||
gsk_gpu_image_setup (GSK_GPU_IMAGE (self), format, width, height);
|
||||
|
||||
/* texture_id == 0 means backbuffer */
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_gl_image_new (GskGLDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskGLImage *self;
|
||||
GLint swizzle[4];
|
||||
|
||||
self = g_object_new (GSK_TYPE_GL_IMAGE, NULL);
|
||||
|
||||
gsk_gl_device_find_gl_format (device,
|
||||
format,
|
||||
&format,
|
||||
&self->gl_internal_format,
|
||||
&self->gl_format,
|
||||
&self->gl_type,
|
||||
swizzle);
|
||||
|
||||
gsk_gpu_image_setup (GSK_GPU_IMAGE (self), format, width, height);
|
||||
|
||||
glGenTextures (1, &self->texture_id);
|
||||
self->owns_texture = TRUE;
|
||||
|
||||
glActiveTexture (GL_TEXTURE0);
|
||||
glBindTexture (GL_TEXTURE_2D, self->texture_id);
|
||||
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
glTexImage2D (GL_TEXTURE_2D, 0, self->gl_internal_format, width, height, 0, self->gl_format, self->gl_type, NULL);
|
||||
|
||||
/* Only apply swizzle if really needed, might not even be
|
||||
* supported if default values are set
|
||||
*/
|
||||
if (swizzle[0] != GL_RED || swizzle[1] != GL_GREEN || swizzle[2] != GL_BLUE || swizzle[3] != GL_ALPHA)
|
||||
{
|
||||
/* Set each channel independently since GLES 3.0 doesn't support the iv method */
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_R, swizzle[0]);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_G, swizzle[1]);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_B, swizzle[2]);
|
||||
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_SWIZZLE_A, swizzle[3]);
|
||||
}
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gl_image_bind_texture (GskGLImage *self)
|
||||
{
|
||||
glBindTexture (GL_TEXTURE_2D, self->texture_id);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gl_image_bind_framebuffer_target (GskGLImage *self,
|
||||
GLenum target)
|
||||
{
|
||||
GLenum status;
|
||||
|
||||
if (self->framebuffer_id)
|
||||
{
|
||||
glBindFramebuffer (target, self->framebuffer_id);
|
||||
return;
|
||||
}
|
||||
|
||||
/* We're the renderbuffer */
|
||||
if (self->texture_id == 0)
|
||||
{
|
||||
glBindFramebuffer (target, 0);
|
||||
return;
|
||||
}
|
||||
|
||||
glGenFramebuffers (1, &self->framebuffer_id);
|
||||
glBindFramebuffer (target, self->framebuffer_id);
|
||||
glFramebufferTexture2D (target, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self->texture_id, 0);
|
||||
status = glCheckFramebufferStatus (target);
|
||||
|
||||
switch (status)
|
||||
{
|
||||
case GL_FRAMEBUFFER_COMPLETE:
|
||||
break;
|
||||
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
|
||||
g_critical ("glCheckFramebufferStatus() returned GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. Expect broken rendering.");
|
||||
break;
|
||||
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS:
|
||||
g_critical ("glCheckFramebufferStatus() returned GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS. Expect broken rendering.");
|
||||
break;
|
||||
|
||||
case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
|
||||
g_critical ("glCheckFramebufferStatus() returned GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT. Expect broken rendering.");
|
||||
break;
|
||||
|
||||
case GL_FRAMEBUFFER_UNSUPPORTED:
|
||||
g_critical ("glCheckFramebufferStatus() returned GL_FRAMEBUFFER_UNSUPPORTED. Expect broken rendering.");
|
||||
break;
|
||||
|
||||
default:
|
||||
g_critical ("glCheckFramebufferStatus() returned %u (0x%x). Expect broken rendering.", status, status);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gl_image_bind_framebuffer (GskGLImage *self)
|
||||
{
|
||||
gsk_gl_image_bind_framebuffer_target (self, GL_FRAMEBUFFER);
|
||||
}
|
||||
|
||||
GLint
|
||||
gsk_gl_image_get_gl_internal_format (GskGLImage *self)
|
||||
{
|
||||
return self->gl_internal_format;
|
||||
}
|
||||
|
||||
GLenum
|
||||
gsk_gl_image_get_gl_format (GskGLImage *self)
|
||||
{
|
||||
return self->gl_format;
|
||||
}
|
||||
|
||||
GLenum
|
||||
gsk_gl_image_get_gl_type (GskGLImage *self)
|
||||
{
|
||||
return self->gl_type;
|
||||
}
|
||||
|
||||
GLuint
|
||||
gsk_gl_image_steal_texture (GskGLImage *self)
|
||||
{
|
||||
g_assert (self->owns_texture);
|
||||
|
||||
if (self->framebuffer_id)
|
||||
{
|
||||
glDeleteFramebuffers (1, &self->framebuffer_id);
|
||||
self->framebuffer_id = 0;
|
||||
}
|
||||
|
||||
self->owns_texture = FALSE;
|
||||
|
||||
return self->texture_id;
|
||||
}
|
33
gsk/gpu/gskglimageprivate.h
Normal file
33
gsk/gpu/gskglimageprivate.h
Normal file
@ -0,0 +1,33 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuimageprivate.h"
|
||||
|
||||
#include "gskgldeviceprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GL_IMAGE (gsk_gl_image_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE (GskGLImage, gsk_gl_image, GSK, GL_IMAGE, GskGpuImage)
|
||||
|
||||
GskGpuImage * gsk_gl_image_new_backbuffer (GskGLDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
GskGpuImage * gsk_gl_image_new (GskGLDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
|
||||
void gsk_gl_image_bind_texture (GskGLImage *self);
|
||||
void gsk_gl_image_bind_framebuffer (GskGLImage *self);
|
||||
void gsk_gl_image_bind_framebuffer_target (GskGLImage *self,
|
||||
GLenum target);
|
||||
|
||||
GLint gsk_gl_image_get_gl_internal_format (GskGLImage *self);
|
||||
GLenum gsk_gl_image_get_gl_format (GskGLImage *self);
|
||||
GLenum gsk_gl_image_get_gl_type (GskGLImage *self);
|
||||
|
||||
GLuint gsk_gl_image_steal_texture (GskGLImage *self);
|
||||
|
||||
G_END_DECLS
|
221
gsk/gpu/gskgpublitop.c
Normal file
221
gsk/gpu/gskgpublitop.c
Normal file
@ -0,0 +1,221 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpublitopprivate.h"
|
||||
|
||||
#include "gskglimageprivate.h"
|
||||
#include "gskgpuprintprivate.h"
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
#include "gskvulkanimageprivate.h"
|
||||
#endif
|
||||
|
||||
typedef struct _GskGpuBlitOp GskGpuBlitOp;
|
||||
|
||||
struct _GskGpuBlitOp
|
||||
{
|
||||
GskGpuOp op;
|
||||
|
||||
GskGpuImage *src_image;
|
||||
GskGpuImage *dest_image;
|
||||
cairo_rectangle_int_t src_rect;
|
||||
cairo_rectangle_int_t dest_rect;
|
||||
GskGpuBlitFilter filter;
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_gpu_blit_op_finish (GskGpuOp *op)
|
||||
{
|
||||
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
|
||||
|
||||
g_object_unref (self->src_image);
|
||||
g_object_unref (self->dest_image);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_blit_op_print (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent)
|
||||
{
|
||||
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
|
||||
|
||||
gsk_gpu_print_op (string, indent, "blit");
|
||||
gsk_gpu_print_int_rect (string, &self->dest_rect);
|
||||
gsk_gpu_print_newline (string);
|
||||
}
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
static void
|
||||
gsk_gpu_blit_op_vk_reserve_descriptor_sets (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_blit_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer)
|
||||
{
|
||||
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
|
||||
VkImageLayout src_layout, dest_layout;
|
||||
VkFilter filter;
|
||||
|
||||
src_layout = gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->src_image));
|
||||
if (src_layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR &&
|
||||
src_layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL &&
|
||||
src_layout != VK_IMAGE_LAYOUT_GENERAL)
|
||||
{
|
||||
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->src_image),
|
||||
command_buffer,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
VK_ACCESS_TRANSFER_READ_BIT);
|
||||
src_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
||||
}
|
||||
|
||||
dest_layout = gsk_vulkan_image_get_vk_image_layout (GSK_VULKAN_IMAGE (self->dest_image));
|
||||
if (dest_layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR &&
|
||||
dest_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL &&
|
||||
dest_layout != VK_IMAGE_LAYOUT_GENERAL)
|
||||
{
|
||||
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->dest_image),
|
||||
command_buffer,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
VK_ACCESS_TRANSFER_WRITE_BIT);
|
||||
dest_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||
}
|
||||
|
||||
switch (self->filter)
|
||||
{
|
||||
default:
|
||||
g_assert_not_reached ();
|
||||
G_GNUC_FALLTHROUGH;
|
||||
case GSK_GPU_BLIT_LINEAR:
|
||||
filter = VK_FILTER_LINEAR;
|
||||
break;
|
||||
|
||||
case GSK_GPU_BLIT_NEAREST:
|
||||
filter = VK_FILTER_NEAREST;
|
||||
break;
|
||||
}
|
||||
|
||||
vkCmdBlitImage (command_buffer,
|
||||
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->src_image)),
|
||||
src_layout,
|
||||
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->dest_image)),
|
||||
dest_layout,
|
||||
1,
|
||||
&(VkImageBlit) {
|
||||
.srcSubresource = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.mipLevel = 0,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
},
|
||||
.srcOffsets = {
|
||||
{
|
||||
.x = self->src_rect.x,
|
||||
.y = self->src_rect.y,
|
||||
.z = 0,
|
||||
},
|
||||
{
|
||||
.x = self->src_rect.x + self->src_rect.width,
|
||||
.y = self->src_rect.y + self->src_rect.height,
|
||||
.z = 1
|
||||
}
|
||||
},
|
||||
.dstSubresource = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.mipLevel = 0,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
},
|
||||
.dstOffsets = {
|
||||
{
|
||||
.x = self->dest_rect.x,
|
||||
.y = self->dest_rect.y,
|
||||
.z = 0,
|
||||
},
|
||||
{
|
||||
.x = self->dest_rect.x + self->dest_rect.width,
|
||||
.y = self->dest_rect.y + self->dest_rect.height,
|
||||
.z = 1,
|
||||
}
|
||||
},
|
||||
},
|
||||
filter);
|
||||
|
||||
return op->next;
|
||||
}
|
||||
#endif
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_blit_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
GskGpuBlitOp *self = (GskGpuBlitOp *) op;
|
||||
GLenum filter;
|
||||
|
||||
gsk_gl_image_bind_framebuffer_target (GSK_GL_IMAGE (self->src_image), GL_READ_FRAMEBUFFER);
|
||||
gsk_gl_image_bind_framebuffer_target (GSK_GL_IMAGE (self->dest_image), GL_DRAW_FRAMEBUFFER);
|
||||
|
||||
switch (self->filter)
|
||||
{
|
||||
default:
|
||||
g_assert_not_reached ();
|
||||
G_GNUC_FALLTHROUGH;
|
||||
case GSK_GPU_BLIT_LINEAR:
|
||||
filter = GL_LINEAR;
|
||||
break;
|
||||
|
||||
case GSK_GPU_BLIT_NEAREST:
|
||||
filter = GL_NEAREST;
|
||||
break;
|
||||
}
|
||||
|
||||
glDisable (GL_SCISSOR_TEST);
|
||||
glBlitFramebuffer (self->src_rect.x,
|
||||
self->src_rect.y,
|
||||
self->src_rect.x + self->src_rect.width,
|
||||
self->src_rect.y + self->src_rect.height,
|
||||
self->dest_rect.x,
|
||||
self->dest_rect.y,
|
||||
self->dest_rect.x + self->dest_rect.width,
|
||||
self->dest_rect.y + self->dest_rect.height,
|
||||
GL_COLOR_BUFFER_BIT,
|
||||
filter);
|
||||
glEnable (GL_SCISSOR_TEST);
|
||||
|
||||
return op->next;
|
||||
}
|
||||
|
||||
static const GskGpuOpClass GSK_GPU_BLIT_OP_CLASS = {
|
||||
GSK_GPU_OP_SIZE (GskGpuBlitOp),
|
||||
GSK_GPU_STAGE_PASS,
|
||||
gsk_gpu_blit_op_finish,
|
||||
gsk_gpu_blit_op_print,
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
gsk_gpu_blit_op_vk_reserve_descriptor_sets,
|
||||
gsk_gpu_blit_op_vk_command,
|
||||
#endif
|
||||
gsk_gpu_blit_op_gl_command
|
||||
};
|
||||
|
||||
void
|
||||
gsk_gpu_blit_op (GskGpuFrame *frame,
|
||||
GskGpuImage *src_image,
|
||||
GskGpuImage *dest_image,
|
||||
const cairo_rectangle_int_t *src_rect,
|
||||
const cairo_rectangle_int_t *dest_rect,
|
||||
GskGpuBlitFilter filter)
|
||||
{
|
||||
GskGpuBlitOp *self;
|
||||
|
||||
self = (GskGpuBlitOp *) gsk_gpu_op_alloc (frame, &GSK_GPU_BLIT_OP_CLASS);
|
||||
|
||||
self->src_image = g_object_ref (src_image);
|
||||
self->dest_image = g_object_ref (dest_image);
|
||||
self->src_rect = *src_rect;
|
||||
self->dest_rect = *dest_rect;
|
||||
self->filter = filter;
|
||||
}
|
20
gsk/gpu/gskgpublitopprivate.h
Normal file
20
gsk/gpu/gskgpublitopprivate.h
Normal file
@ -0,0 +1,20 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
typedef enum {
|
||||
GSK_GPU_BLIT_NEAREST,
|
||||
GSK_GPU_BLIT_LINEAR
|
||||
} GskGpuBlitFilter;
|
||||
|
||||
void gsk_gpu_blit_op (GskGpuFrame *frame,
|
||||
GskGpuImage *src_image,
|
||||
GskGpuImage *dest_image,
|
||||
const cairo_rectangle_int_t *src_rect,
|
||||
const cairo_rectangle_int_t *dest_rect,
|
||||
GskGpuBlitFilter filter);
|
||||
|
||||
G_END_DECLS
|
||||
|
74
gsk/gpu/gskgpudevice.c
Normal file
74
gsk/gpu/gskgpudevice.c
Normal file
@ -0,0 +1,74 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpudeviceprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
|
||||
typedef struct _GskGpuDevicePrivate GskGpuDevicePrivate;
|
||||
|
||||
struct _GskGpuDevicePrivate
|
||||
{
|
||||
GdkDisplay *display;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE_WITH_PRIVATE (GskGpuDevice, gsk_gpu_device, G_TYPE_OBJECT)
|
||||
|
||||
static void
|
||||
gsk_gpu_device_finalize (GObject *object)
|
||||
{
|
||||
GskGpuDevice *self = GSK_GPU_DEVICE (object);
|
||||
GskGpuDevicePrivate *priv = gsk_gpu_device_get_instance_private (self);
|
||||
|
||||
g_object_unref (priv->display);
|
||||
|
||||
G_OBJECT_CLASS (gsk_gpu_device_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_device_class_init (GskGpuDeviceClass *klass)
|
||||
{
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
object_class->finalize = gsk_gpu_device_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_device_init (GskGpuDevice *self)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_device_setup (GskGpuDevice *self,
|
||||
GdkDisplay *display)
|
||||
{
|
||||
GskGpuDevicePrivate *priv = gsk_gpu_device_get_instance_private (self);
|
||||
|
||||
priv->display = g_object_ref (display);
|
||||
}
|
||||
|
||||
GdkDisplay *
|
||||
gsk_gpu_device_get_display (GskGpuDevice *self)
|
||||
{
|
||||
GskGpuDevicePrivate *priv = gsk_gpu_device_get_instance_private (self);
|
||||
|
||||
return priv->display;
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_gpu_device_create_offscreen_image (GskGpuDevice *self,
|
||||
GdkMemoryDepth depth,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
return GSK_GPU_DEVICE_GET_CLASS (self)->create_offscreen_image (self, depth, width, height);
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_gpu_device_create_upload_image (GskGpuDevice *self,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
return GSK_GPU_DEVICE_GET_CLASS (self)->create_upload_image (self, format, width, height);
|
||||
}
|
||||
|
53
gsk/gpu/gskgpudeviceprivate.h
Normal file
53
gsk/gpu/gskgpudeviceprivate.h
Normal file
@ -0,0 +1,53 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GPU_DEVICE (gsk_gpu_device_get_type ())
|
||||
#define GSK_GPU_DEVICE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GSK_TYPE_GPU_DEVICE, GskGpuDevice))
|
||||
#define GSK_GPU_DEVICE_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), GSK_TYPE_GPU_DEVICE, GskGpuDeviceClass))
|
||||
#define GSK_IS_GPU_DEVICE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GSK_TYPE_GPU_DEVICE))
|
||||
#define GSK_IS_GPU_DEVICE_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), GSK_TYPE_GPU_DEVICE))
|
||||
#define GSK_GPU_DEVICE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GSK_TYPE_GPU_DEVICE, GskGpuDeviceClass))
|
||||
|
||||
typedef struct _GskGpuDeviceClass GskGpuDeviceClass;
|
||||
|
||||
struct _GskGpuDevice
|
||||
{
|
||||
GObject parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGpuDeviceClass
|
||||
{
|
||||
GObjectClass parent_class;
|
||||
|
||||
GskGpuImage * (* create_offscreen_image) (GskGpuDevice *self,
|
||||
GdkMemoryDepth depth,
|
||||
gsize width,
|
||||
gsize height);
|
||||
GskGpuImage * (* create_upload_image) (GskGpuDevice *self,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
};
|
||||
|
||||
GType gsk_gpu_device_get_type (void) G_GNUC_CONST;
|
||||
|
||||
void gsk_gpu_device_setup (GskGpuDevice *self,
|
||||
GdkDisplay *display);
|
||||
|
||||
GdkDisplay * gsk_gpu_device_get_display (GskGpuDevice *self);
|
||||
|
||||
GskGpuImage * gsk_gpu_device_create_offscreen_image (GskGpuDevice *self,
|
||||
GdkMemoryDepth depth,
|
||||
gsize width,
|
||||
gsize height);
|
||||
GskGpuImage * gsk_gpu_device_create_upload_image (GskGpuDevice *self,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
|
||||
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GskGpuDevice, g_object_unref)
|
||||
|
||||
G_END_DECLS
|
278
gsk/gpu/gskgpudownloadop.c
Normal file
278
gsk/gpu/gskgpudownloadop.c
Normal file
@ -0,0 +1,278 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpudownloadopprivate.h"
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
#include "gskglimageprivate.h"
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskgpuprintprivate.h"
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
#include "gskvulkanbufferprivate.h"
|
||||
#include "gskvulkanimageprivate.h"
|
||||
#endif
|
||||
|
||||
#include "gdk/gdkglcontextprivate.h"
|
||||
|
||||
typedef struct _GskGpuDownloadOp GskGpuDownloadOp;
|
||||
|
||||
typedef void (* GdkGpuDownloadOpCreateFunc) (GskGpuDownloadOp *);
|
||||
|
||||
struct _GskGpuDownloadOp
|
||||
{
|
||||
GskGpuOp op;
|
||||
|
||||
GskGpuImage *image;
|
||||
GdkGpuDownloadOpCreateFunc create_func;
|
||||
GskGpuDownloadFunc func;
|
||||
gpointer user_data;
|
||||
|
||||
GdkTexture *texture;
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
GskVulkanBuffer *buffer;
|
||||
#endif
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_gpu_download_op_finish (GskGpuOp *op)
|
||||
{
|
||||
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
|
||||
|
||||
if (self->create_func)
|
||||
self->create_func (self);
|
||||
|
||||
self->func (self->user_data, self->texture);
|
||||
|
||||
g_object_unref (self->texture);
|
||||
g_object_unref (self->image);
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
g_clear_pointer (&self->buffer, gsk_vulkan_buffer_free);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_download_op_print (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent)
|
||||
{
|
||||
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
|
||||
|
||||
gsk_gpu_print_op (string, indent, "download");
|
||||
gsk_gpu_print_image (string, self->image);
|
||||
gsk_gpu_print_newline (string);
|
||||
}
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
static void
|
||||
gsk_gpu_download_op_vk_reserve_descriptor_sets (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_download_op_vk_create (GskGpuDownloadOp *self)
|
||||
{
|
||||
GBytes *bytes;
|
||||
guchar *data;
|
||||
gsize width, height, stride;
|
||||
GdkMemoryFormat format;
|
||||
|
||||
data = gsk_vulkan_buffer_get_data (self->buffer);
|
||||
width = gsk_gpu_image_get_width (self->image);
|
||||
height = gsk_gpu_image_get_height (self->image);
|
||||
format = gsk_gpu_image_get_format (self->image);
|
||||
stride = width * gdk_memory_format_bytes_per_pixel (format);
|
||||
bytes = g_bytes_new (data, stride * height);
|
||||
self->texture = gdk_memory_texture_new (width,
|
||||
height,
|
||||
format,
|
||||
bytes,
|
||||
stride);
|
||||
g_bytes_unref (bytes);
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_download_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer)
|
||||
{
|
||||
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
|
||||
gsize width, height, stride;
|
||||
|
||||
width = gsk_gpu_image_get_width (self->image);
|
||||
height = gsk_gpu_image_get_height (self->image);
|
||||
stride = width * gdk_memory_format_bytes_per_pixel (gsk_gpu_image_get_format (self->image));
|
||||
self->buffer = gsk_vulkan_buffer_new_map (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
|
||||
height * stride,
|
||||
GSK_VULKAN_READ);
|
||||
|
||||
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (self->image),
|
||||
command_buffer,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
VK_ACCESS_TRANSFER_READ_BIT);
|
||||
|
||||
vkCmdCopyImageToBuffer (command_buffer,
|
||||
gsk_vulkan_image_get_vk_image (GSK_VULKAN_IMAGE (self->image)),
|
||||
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
gsk_vulkan_buffer_get_buffer (self->buffer),
|
||||
1,
|
||||
(VkBufferImageCopy[1]) {
|
||||
{
|
||||
.bufferOffset = 0,
|
||||
.bufferRowLength = width,
|
||||
.bufferImageHeight = height,
|
||||
.imageSubresource = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.mipLevel = 0,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
},
|
||||
.imageOffset = {
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
.z = 0
|
||||
},
|
||||
.imageExtent = {
|
||||
.width = width,
|
||||
.height = height,
|
||||
.depth = 1
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
vkCmdPipelineBarrier (command_buffer,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_PIPELINE_STAGE_HOST_BIT,
|
||||
0,
|
||||
0, NULL,
|
||||
1, &(VkBufferMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
|
||||
.dstAccessMask = VK_ACCESS_HOST_READ_BIT,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.buffer = gsk_vulkan_buffer_get_buffer (self->buffer),
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE,
|
||||
},
|
||||
0, NULL);
|
||||
|
||||
self->create_func = gsk_gpu_download_op_vk_create;
|
||||
|
||||
return op->next;
|
||||
}
|
||||
#endif
|
||||
|
||||
typedef struct _GskGLTextureData GskGLTextureData;
|
||||
|
||||
struct _GskGLTextureData
|
||||
{
|
||||
GdkGLContext *context;
|
||||
GLuint texture_id;
|
||||
GLsync sync;
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_gl_texture_data_free (gpointer user_data)
|
||||
{
|
||||
GskGLTextureData *data = user_data;
|
||||
|
||||
gdk_gl_context_make_current (data->context);
|
||||
|
||||
g_clear_pointer (&data->sync, glDeleteSync);
|
||||
glDeleteTextures (1, &data->texture_id);
|
||||
g_object_unref (data->context);
|
||||
|
||||
g_free (data);
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_download_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
GskGpuDownloadOp *self = (GskGpuDownloadOp *) op;
|
||||
GdkGLTextureBuilder *builder;
|
||||
GskGLTextureData *data;
|
||||
GdkGLContext *context;
|
||||
|
||||
context = GDK_GL_CONTEXT (gsk_gpu_frame_get_context (frame));
|
||||
|
||||
data = g_new (GskGLTextureData, 1);
|
||||
data->context = g_object_ref (context);
|
||||
data->texture_id = gsk_gl_image_steal_texture (GSK_GL_IMAGE (self->image));
|
||||
|
||||
if (gdk_gl_context_has_sync (context))
|
||||
data->sync = glFenceSync (GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
|
||||
builder = gdk_gl_texture_builder_new ();
|
||||
gdk_gl_texture_builder_set_context (builder, context);
|
||||
gdk_gl_texture_builder_set_id (builder, data->texture_id);
|
||||
gdk_gl_texture_builder_set_format (builder, gsk_gpu_image_get_format (self->image));
|
||||
gdk_gl_texture_builder_set_width (builder, gsk_gpu_image_get_width (self->image));
|
||||
gdk_gl_texture_builder_set_height (builder, gsk_gpu_image_get_height (self->image));
|
||||
gdk_gl_texture_builder_set_sync (builder, data->sync);
|
||||
|
||||
self->texture = gdk_gl_texture_builder_build (builder,
|
||||
gsk_gl_texture_data_free,
|
||||
data);
|
||||
|
||||
g_object_unref (builder);
|
||||
|
||||
return op->next;
|
||||
}
|
||||
|
||||
static const GskGpuOpClass GSK_GPU_DOWNLOAD_OP_CLASS = {
|
||||
GSK_GPU_OP_SIZE (GskGpuDownloadOp),
|
||||
GSK_GPU_STAGE_COMMAND,
|
||||
gsk_gpu_download_op_finish,
|
||||
gsk_gpu_download_op_print,
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
gsk_gpu_download_op_vk_reserve_descriptor_sets,
|
||||
gsk_gpu_download_op_vk_command,
|
||||
#endif
|
||||
gsk_gpu_download_op_gl_command
|
||||
};
|
||||
|
||||
void
|
||||
gsk_gpu_download_op (GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
GskGpuDownloadFunc func,
|
||||
gpointer user_data)
|
||||
{
|
||||
GskGpuDownloadOp *self;
|
||||
|
||||
self = (GskGpuDownloadOp *) gsk_gpu_op_alloc (frame, &GSK_GPU_DOWNLOAD_OP_CLASS);
|
||||
|
||||
self->image = g_object_ref (image);
|
||||
self->func = func,
|
||||
self->user_data = user_data;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_download_save_png_cb (gpointer filename,
|
||||
GdkTexture *texture)
|
||||
{
|
||||
gdk_texture_save_to_png (texture, filename);
|
||||
|
||||
g_free (filename);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_download_png_op (GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
const char *filename_format,
|
||||
...)
|
||||
{
|
||||
va_list args;
|
||||
char *filename;
|
||||
|
||||
va_start (args, filename_format);
|
||||
filename = g_strdup_vprintf (filename_format, args);
|
||||
va_end (args);
|
||||
|
||||
gsk_gpu_download_op (frame,
|
||||
image,
|
||||
gsk_gpu_download_save_png_cb,
|
||||
filename);
|
||||
}
|
21
gsk/gpu/gskgpudownloadopprivate.h
Normal file
21
gsk/gpu/gskgpudownloadopprivate.h
Normal file
@ -0,0 +1,21 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
typedef void (* GskGpuDownloadFunc) (gpointer user_data,
|
||||
GdkTexture *texture);
|
||||
|
||||
void gsk_gpu_download_op (GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
GskGpuDownloadFunc func,
|
||||
gpointer user_data);
|
||||
|
||||
void gsk_gpu_download_png_op (GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
const char *filename_format,
|
||||
...) G_GNUC_PRINTF(3, 4);
|
||||
|
||||
G_END_DECLS
|
||||
|
388
gsk/gpu/gskgpuframe.c
Normal file
388
gsk/gpu/gskgpuframe.c
Normal file
@ -0,0 +1,388 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
|
||||
#include "gskgpudeviceprivate.h"
|
||||
#include "gskgpudownloadopprivate.h"
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskgpunodeprocessorprivate.h"
|
||||
#include "gskgpuopprivate.h"
|
||||
#include "gskgpurendererprivate.h"
|
||||
|
||||
#include "gskdebugprivate.h"
|
||||
#include "gskrendererprivate.h"
|
||||
|
||||
#define GDK_ARRAY_NAME gsk_gpu_ops
|
||||
#define GDK_ARRAY_TYPE_NAME GskGpuOps
|
||||
#define GDK_ARRAY_ELEMENT_TYPE guchar
|
||||
#define GDK_ARRAY_BY_VALUE 1
|
||||
#include "gdk/gdkarrayimpl.c"
|
||||
|
||||
typedef struct _GskGpuFramePrivate GskGpuFramePrivate;
|
||||
|
||||
struct _GskGpuFramePrivate
|
||||
{
|
||||
GskGpuRenderer *renderer;
|
||||
GskGpuDevice *device;
|
||||
|
||||
GskGpuOps ops;
|
||||
GskGpuOp *first_op;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE_WITH_PRIVATE (GskGpuFrame, gsk_gpu_frame, G_TYPE_OBJECT)
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_default_setup (GskGpuFrame *self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_default_cleanup (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
GskGpuOp *op;
|
||||
gsize i;
|
||||
|
||||
for (i = 0; i < gsk_gpu_ops_get_size (&priv->ops); i += op->op_class->size)
|
||||
{
|
||||
op = (GskGpuOp *) gsk_gpu_ops_index (&priv->ops, i);
|
||||
|
||||
gsk_gpu_op_finish (op);
|
||||
}
|
||||
gsk_gpu_ops_set_size (&priv->ops, 0);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_cleanup (GskGpuFrame *self)
|
||||
{
|
||||
GSK_GPU_FRAME_GET_CLASS (self)->cleanup (self);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_dispose (GObject *object)
|
||||
{
|
||||
GskGpuFrame *self = GSK_GPU_FRAME (object);
|
||||
|
||||
gsk_gpu_frame_cleanup (self);
|
||||
|
||||
G_OBJECT_CLASS (gsk_gpu_frame_parent_class)->dispose (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_finalize (GObject *object)
|
||||
{
|
||||
GskGpuFrame *self = GSK_GPU_FRAME (object);
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
gsk_gpu_ops_clear (&priv->ops);
|
||||
|
||||
g_object_unref (priv->device);
|
||||
|
||||
G_OBJECT_CLASS (gsk_gpu_frame_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_class_init (GskGpuFrameClass *klass)
|
||||
{
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
klass->setup = gsk_gpu_frame_default_setup;
|
||||
klass->cleanup = gsk_gpu_frame_default_cleanup;
|
||||
|
||||
object_class->dispose = gsk_gpu_frame_dispose;
|
||||
object_class->finalize = gsk_gpu_frame_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_init (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
gsk_gpu_ops_init (&priv->ops);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_frame_setup (GskGpuFrame *self,
|
||||
GskGpuRenderer *renderer,
|
||||
GskGpuDevice *device)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
/* no reference, the renderer owns us */
|
||||
priv->renderer = renderer;
|
||||
priv->device = g_object_ref (device);
|
||||
|
||||
GSK_GPU_FRAME_GET_CLASS (self)->setup (self);
|
||||
}
|
||||
|
||||
GskGpuDevice *
|
||||
gsk_gpu_frame_get_device (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
return priv->device;
|
||||
}
|
||||
|
||||
GdkDrawContext *
|
||||
gsk_gpu_frame_get_context (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
return gsk_gpu_renderer_get_context (priv->renderer);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_verbose_print (GskGpuFrame *self,
|
||||
const char *heading)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
if (GSK_RENDERER_DEBUG_CHECK (GSK_RENDERER (priv->renderer), VERBOSE))
|
||||
{
|
||||
GskGpuOp *op;
|
||||
guint indent = 1;
|
||||
GString *string = g_string_new (heading);
|
||||
g_string_append (string, ":\n");
|
||||
|
||||
for (op = priv->first_op; op; op = op->next)
|
||||
{
|
||||
if (op->op_class->stage == GSK_GPU_STAGE_END_PASS)
|
||||
indent--;
|
||||
gsk_gpu_op_print (op, string, indent);
|
||||
if (op->op_class->stage == GSK_GPU_STAGE_BEGIN_PASS)
|
||||
indent++;
|
||||
}
|
||||
|
||||
gdk_debug_message ("%s", string->str);
|
||||
g_string_free (string, TRUE);
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_seal_ops (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
GskGpuOp *last, *op;
|
||||
guint i;
|
||||
|
||||
priv->first_op = (GskGpuOp *) gsk_gpu_ops_index (&priv->ops, 0);
|
||||
|
||||
last = priv->first_op;
|
||||
for (i = last->op_class->size; i < gsk_gpu_ops_get_size (&priv->ops); i += op->op_class->size)
|
||||
{
|
||||
op = (GskGpuOp *) gsk_gpu_ops_index (&priv->ops, i);
|
||||
|
||||
last->next = op;
|
||||
last = op;
|
||||
}
|
||||
}
|
||||
|
||||
typedef struct
|
||||
{
|
||||
struct {
|
||||
GskGpuOp *first;
|
||||
GskGpuOp *last;
|
||||
} upload, command;
|
||||
} SortData;
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_frame_sort_render_pass (GskGpuFrame *self,
|
||||
GskGpuOp *op,
|
||||
SortData *sort_data)
|
||||
{
|
||||
while (op)
|
||||
{
|
||||
switch (op->op_class->stage)
|
||||
{
|
||||
case GSK_GPU_STAGE_UPLOAD:
|
||||
if (sort_data->upload.first == NULL)
|
||||
sort_data->upload.first = op;
|
||||
else
|
||||
sort_data->upload.last->next = op;
|
||||
sort_data->upload.last = op;
|
||||
op = op->next;
|
||||
break;
|
||||
|
||||
case GSK_GPU_STAGE_COMMAND:
|
||||
case GSK_GPU_STAGE_SHADER:
|
||||
if (sort_data->command.first == NULL)
|
||||
sort_data->command.first = op;
|
||||
else
|
||||
sort_data->command.last->next = op;
|
||||
sort_data->command.last = op;
|
||||
op = op->next;
|
||||
break;
|
||||
|
||||
case GSK_GPU_STAGE_PASS:
|
||||
if (sort_data->command.first == NULL)
|
||||
sort_data->command.first = op;
|
||||
else
|
||||
sort_data->command.last->next = op;
|
||||
sort_data->command.last = op;
|
||||
op = op->next;
|
||||
break;
|
||||
|
||||
case GSK_GPU_STAGE_BEGIN_PASS:
|
||||
{
|
||||
SortData pass_data = { { NULL, NULL }, { op, op } };
|
||||
|
||||
op = gsk_gpu_frame_sort_render_pass (self, op->next, &pass_data);
|
||||
|
||||
if (pass_data.upload.first)
|
||||
{
|
||||
if (sort_data->upload.last == NULL)
|
||||
sort_data->upload.last = pass_data.upload.last;
|
||||
else
|
||||
pass_data.upload.last->next = sort_data->upload.first;
|
||||
sort_data->upload.first = pass_data.upload.first;
|
||||
}
|
||||
if (sort_data->command.last == NULL)
|
||||
sort_data->command.last = pass_data.command.last;
|
||||
else
|
||||
pass_data.command.last->next = sort_data->command.first;
|
||||
sort_data->command.first = pass_data.command.first;
|
||||
}
|
||||
break;
|
||||
|
||||
case GSK_GPU_STAGE_END_PASS:
|
||||
sort_data->command.last->next = op;
|
||||
sort_data->command.last = op;
|
||||
return op->next;
|
||||
|
||||
default:
|
||||
g_assert_not_reached ();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return op;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_sort_ops (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
SortData sort_data = { { NULL, }, };
|
||||
|
||||
gsk_gpu_frame_sort_render_pass (self, priv->first_op, &sort_data);
|
||||
|
||||
if (sort_data.upload.first)
|
||||
{
|
||||
sort_data.upload.last->next = sort_data.command.first;
|
||||
priv->first_op = sort_data.upload.first;
|
||||
}
|
||||
else
|
||||
priv->first_op = sort_data.command.first;
|
||||
if (sort_data.command.last)
|
||||
sort_data.command.last->next = NULL;
|
||||
}
|
||||
|
||||
gpointer
|
||||
gsk_gpu_frame_alloc_op (GskGpuFrame *self,
|
||||
gsize size)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
gsize pos;
|
||||
|
||||
pos = gsk_gpu_ops_get_size (&priv->ops);
|
||||
|
||||
gsk_gpu_ops_splice (&priv->ops,
|
||||
pos,
|
||||
0, FALSE,
|
||||
NULL,
|
||||
size);
|
||||
|
||||
return gsk_gpu_ops_index (&priv->ops, pos);
|
||||
}
|
||||
|
||||
gboolean
|
||||
gsk_gpu_frame_is_busy (GskGpuFrame *self)
|
||||
{
|
||||
return GSK_GPU_FRAME_GET_CLASS (self)->is_busy (self);
|
||||
}
|
||||
|
||||
static void
|
||||
copy_texture (gpointer user_data,
|
||||
GdkTexture *texture)
|
||||
{
|
||||
GdkTexture **target = (GdkTexture **) user_data;
|
||||
|
||||
*target = g_object_ref (texture);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_record (GskGpuFrame *self,
|
||||
GskGpuImage *target,
|
||||
const cairo_region_t *clip,
|
||||
GskRenderNode *node,
|
||||
const graphene_rect_t *viewport,
|
||||
GdkTexture **texture)
|
||||
{
|
||||
cairo_rectangle_int_t extents;
|
||||
|
||||
if (clip)
|
||||
{
|
||||
cairo_region_get_extents (clip, &extents);
|
||||
}
|
||||
else
|
||||
{
|
||||
extents = (cairo_rectangle_int_t) {
|
||||
0, 0,
|
||||
gsk_gpu_image_get_width (target),
|
||||
gsk_gpu_image_get_height (target)
|
||||
};
|
||||
}
|
||||
|
||||
#if 0
|
||||
gsk_gpu_render_pass_begin_op (self,
|
||||
target,
|
||||
&extents,
|
||||
VK_IMAGE_LAYOUT_UNDEFINED,
|
||||
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
|
||||
#endif
|
||||
|
||||
gsk_gpu_node_processor_process (self,
|
||||
target,
|
||||
&extents,
|
||||
node,
|
||||
viewport);
|
||||
|
||||
#if 0
|
||||
gsk_gpu_render_pass_end_op (self,
|
||||
target,
|
||||
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
|
||||
#endif
|
||||
|
||||
if (texture)
|
||||
gsk_gpu_download_op (self, target, copy_texture, texture);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_frame_submit (GskGpuFrame *self)
|
||||
{
|
||||
GskGpuFramePrivate *priv = gsk_gpu_frame_get_instance_private (self);
|
||||
|
||||
gsk_gpu_frame_seal_ops (self);
|
||||
gsk_gpu_frame_verbose_print (self, "start of frame");
|
||||
gsk_gpu_frame_sort_ops (self);
|
||||
gsk_gpu_frame_verbose_print (self, "after sort");
|
||||
|
||||
GSK_GPU_FRAME_GET_CLASS (self)->submit (self, priv->first_op);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_frame_render (GskGpuFrame *self,
|
||||
GskGpuImage *target,
|
||||
const cairo_region_t *region,
|
||||
GskRenderNode *node,
|
||||
const graphene_rect_t *viewport,
|
||||
GdkTexture **texture)
|
||||
{
|
||||
gsk_gpu_frame_cleanup (self);
|
||||
|
||||
gsk_gpu_frame_record (self, target, region, node, viewport, texture);
|
||||
|
||||
gsk_gpu_frame_submit (self);
|
||||
}
|
||||
|
||||
|
57
gsk/gpu/gskgpuframeprivate.h
Normal file
57
gsk/gpu/gskgpuframeprivate.h
Normal file
@ -0,0 +1,57 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpurenderer.h"
|
||||
#include "gskgputypesprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GPU_FRAME (gsk_gpu_frame_get_type ())
|
||||
#define GSK_GPU_FRAME(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GSK_TYPE_GPU_FRAME, GskGpuFrame))
|
||||
#define GSK_GPU_FRAME_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), GSK_TYPE_GPU_FRAME, GskGpuFrameClass))
|
||||
#define GSK_IS_GPU_FRAME(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GSK_TYPE_GPU_FRAME))
|
||||
#define GSK_IS_GPU_FRAME_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), GSK_TYPE_GPU_FRAME))
|
||||
#define GSK_GPU_FRAME_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GSK_TYPE_GPU_FRAME, GskGpuFrameClass))
|
||||
|
||||
typedef struct _GskGpuFrameClass GskGpuFrameClass;
|
||||
|
||||
struct _GskGpuFrame
|
||||
{
|
||||
GObject parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGpuFrameClass
|
||||
{
|
||||
GObjectClass parent_class;
|
||||
|
||||
gboolean (* is_busy) (GskGpuFrame *self);
|
||||
void (* setup) (GskGpuFrame *self);
|
||||
void (* cleanup) (GskGpuFrame *self);
|
||||
void (* submit) (GskGpuFrame *self,
|
||||
GskGpuOp *op);
|
||||
};
|
||||
|
||||
GType gsk_gpu_frame_get_type (void) G_GNUC_CONST;
|
||||
|
||||
|
||||
void gsk_gpu_frame_setup (GskGpuFrame *self,
|
||||
GskGpuRenderer *renderer,
|
||||
GskGpuDevice *device);
|
||||
|
||||
GdkDrawContext * gsk_gpu_frame_get_context (GskGpuFrame *self);
|
||||
GskGpuDevice * gsk_gpu_frame_get_device (GskGpuFrame *self);
|
||||
|
||||
gpointer gsk_gpu_frame_alloc_op (GskGpuFrame *self,
|
||||
gsize size);
|
||||
|
||||
gboolean gsk_gpu_frame_is_busy (GskGpuFrame *self);
|
||||
|
||||
void gsk_gpu_frame_render (GskGpuFrame *self,
|
||||
GskGpuImage *target,
|
||||
const cairo_region_t *region,
|
||||
GskRenderNode *node,
|
||||
const graphene_rect_t *viewport,
|
||||
GdkTexture **texture);
|
||||
|
||||
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GskGpuFrame, g_object_unref)
|
||||
|
||||
G_END_DECLS
|
62
gsk/gpu/gskgpuimage.c
Normal file
62
gsk/gpu/gskgpuimage.c
Normal file
@ -0,0 +1,62 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpuimageprivate.h"
|
||||
|
||||
typedef struct _GskGpuImagePrivate GskGpuImagePrivate;
|
||||
|
||||
struct _GskGpuImagePrivate
|
||||
{
|
||||
GdkMemoryFormat format;
|
||||
gsize width;
|
||||
gsize height;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE_WITH_PRIVATE (GskGpuImage, gsk_gpu_image, G_TYPE_OBJECT)
|
||||
|
||||
static void
|
||||
gsk_gpu_image_class_init (GskGpuImageClass *klass)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_image_init (GskGpuImage *self)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_image_setup (GskGpuImage *self,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskGpuImagePrivate *priv = gsk_gpu_image_get_instance_private (self);
|
||||
|
||||
priv->format = format;
|
||||
priv->width = width;
|
||||
priv->height = height;
|
||||
}
|
||||
|
||||
GdkMemoryFormat
|
||||
gsk_gpu_image_get_format (GskGpuImage *self)
|
||||
{
|
||||
GskGpuImagePrivate *priv = gsk_gpu_image_get_instance_private (self);
|
||||
|
||||
return priv->format;
|
||||
}
|
||||
|
||||
gsize
|
||||
gsk_gpu_image_get_width (GskGpuImage *self)
|
||||
{
|
||||
GskGpuImagePrivate *priv = gsk_gpu_image_get_instance_private (self);
|
||||
|
||||
return priv->width;
|
||||
}
|
||||
|
||||
gsize
|
||||
gsk_gpu_image_get_height (GskGpuImage *self)
|
||||
{
|
||||
GskGpuImagePrivate *priv = gsk_gpu_image_get_instance_private (self);
|
||||
|
||||
return priv->height;
|
||||
}
|
||||
|
40
gsk/gpu/gskgpuimageprivate.h
Normal file
40
gsk/gpu/gskgpuimageprivate.h
Normal file
@ -0,0 +1,40 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GPU_IMAGE (gsk_gpu_image_get_type ())
|
||||
#define GSK_GPU_IMAGE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GSK_TYPE_GPU_IMAGE, GskGpuImage))
|
||||
#define GSK_GPU_IMAGE_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), GSK_TYPE_GPU_IMAGE, GskGpuImageClass))
|
||||
#define GSK_IS_GPU_IMAGE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GSK_TYPE_GPU_IMAGE))
|
||||
#define GSK_IS_GPU_IMAGE_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), GSK_TYPE_GPU_IMAGE))
|
||||
#define GSK_GPU_IMAGE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GSK_TYPE_GPU_IMAGE, GskGpuImageClass))
|
||||
|
||||
typedef struct _GskGpuImageClass GskGpuImageClass;
|
||||
|
||||
struct _GskGpuImage
|
||||
{
|
||||
GObject parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGpuImageClass
|
||||
{
|
||||
GObjectClass parent_class;
|
||||
};
|
||||
|
||||
GType gsk_gpu_image_get_type (void) G_GNUC_CONST;
|
||||
|
||||
void gsk_gpu_image_setup (GskGpuImage *self,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
|
||||
GdkMemoryFormat gsk_gpu_image_get_format (GskGpuImage *self);
|
||||
gsize gsk_gpu_image_get_width (GskGpuImage *self);
|
||||
gsize gsk_gpu_image_get_height (GskGpuImage *self);
|
||||
|
||||
|
||||
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GskGpuImage, g_object_unref)
|
||||
|
||||
G_END_DECLS
|
110
gsk/gpu/gskgpunodeprocessor.c
Normal file
110
gsk/gpu/gskgpunodeprocessor.c
Normal file
@ -0,0 +1,110 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpunodeprocessorprivate.h"
|
||||
|
||||
#include "gskgpublitopprivate.h"
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskgpuuploadopprivate.h"
|
||||
|
||||
#include "gskrendernodeprivate.h"
|
||||
|
||||
#define ORTHO_NEAR_PLANE -10000
|
||||
#define ORTHO_FAR_PLANE 10000
|
||||
|
||||
typedef struct _GskGpuNodeProcessor GskGpuNodeProcessor;
|
||||
|
||||
struct _GskGpuNodeProcessor
|
||||
{
|
||||
GskGpuFrame *frame;
|
||||
cairo_rectangle_int_t scissor;
|
||||
graphene_point_t offset;
|
||||
graphene_vec2_t scale;
|
||||
GskTransform *modelview;
|
||||
graphene_matrix_t projection;
|
||||
/* GskGpuClip clip; */
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_gpu_node_processor_add_node (GskGpuNodeProcessor *self,
|
||||
GskGpuImage *target,
|
||||
GskRenderNode *node);
|
||||
|
||||
static void
|
||||
gsk_gpu_node_processor_finish (GskGpuNodeProcessor *self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_node_processor_init (GskGpuNodeProcessor *self,
|
||||
GskGpuFrame *frame,
|
||||
gsize width,
|
||||
gsize height,
|
||||
const cairo_rectangle_int_t *clip,
|
||||
const graphene_rect_t *viewport)
|
||||
{
|
||||
self->frame = frame;
|
||||
|
||||
self->scissor = *clip;
|
||||
//gsk_vulkan_clip_init_empty (&state.clip, &GRAPHENE_RECT_INIT (0, 0, viewport->size.width, viewport->size.height));
|
||||
|
||||
self->modelview = NULL;
|
||||
graphene_matrix_init_ortho (&self->projection,
|
||||
0, width,
|
||||
0, height,
|
||||
2 * ORTHO_NEAR_PLANE - ORTHO_FAR_PLANE,
|
||||
ORTHO_FAR_PLANE);
|
||||
graphene_vec2_init (&self->scale, width / viewport->size.width,
|
||||
height / viewport->size.height);
|
||||
self->offset = GRAPHENE_POINT_INIT (-viewport->origin.x,
|
||||
-viewport->origin.y);
|
||||
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_node_processor_process (GskGpuFrame *frame,
|
||||
GskGpuImage *target,
|
||||
const cairo_rectangle_int_t *clip,
|
||||
GskRenderNode *node,
|
||||
const graphene_rect_t *viewport)
|
||||
{
|
||||
GskGpuNodeProcessor self;
|
||||
|
||||
gsk_gpu_node_processor_init (&self,
|
||||
frame,
|
||||
gsk_gpu_image_get_width (target),
|
||||
gsk_gpu_image_get_height (target),
|
||||
clip,
|
||||
viewport);
|
||||
|
||||
gsk_gpu_node_processor_add_node (&self, target, node);
|
||||
|
||||
gsk_gpu_node_processor_finish (&self);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_node_processor_add_node (GskGpuNodeProcessor *self,
|
||||
GskGpuImage *target,
|
||||
GskRenderNode *node)
|
||||
{
|
||||
GskGpuImage *image;
|
||||
|
||||
image = gsk_gpu_upload_cairo_op (self->frame,
|
||||
node,
|
||||
&self->scale,
|
||||
&node->bounds);
|
||||
|
||||
gsk_gpu_blit_op (self->frame,
|
||||
image,
|
||||
target,
|
||||
&(cairo_rectangle_int_t) {
|
||||
0, 0,
|
||||
gsk_gpu_image_get_width (image),
|
||||
gsk_gpu_image_get_height (image)
|
||||
},
|
||||
&(cairo_rectangle_int_t) {
|
||||
0, 0,
|
||||
gsk_gpu_image_get_width (image),
|
||||
gsk_gpu_image_get_height (image)
|
||||
},
|
||||
GSK_GPU_BLIT_LINEAR);
|
||||
}
|
14
gsk/gpu/gskgpunodeprocessorprivate.h
Normal file
14
gsk/gpu/gskgpunodeprocessorprivate.h
Normal file
@ -0,0 +1,14 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
#include "gsktypes.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
void gsk_gpu_node_processor_process (GskGpuFrame *frame,
|
||||
GskGpuImage *target,
|
||||
const cairo_rectangle_int_t *clip,
|
||||
GskRenderNode *node,
|
||||
const graphene_rect_t *viewport);
|
||||
|
||||
G_END_DECLS
|
57
gsk/gpu/gskgpuop.c
Normal file
57
gsk/gpu/gskgpuop.c
Normal file
@ -0,0 +1,57 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
|
||||
GskGpuOp *
|
||||
gsk_gpu_op_alloc (GskGpuFrame *frame,
|
||||
const GskGpuOpClass *op_class)
|
||||
{
|
||||
GskGpuOp *op;
|
||||
|
||||
op = gsk_gpu_frame_alloc_op (frame, op_class->size);
|
||||
op->op_class = op_class;
|
||||
|
||||
return op;
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_op_finish (GskGpuOp *op)
|
||||
{
|
||||
op->op_class->finish (op);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_op_print (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent)
|
||||
{
|
||||
op->op_class->print (op, string, indent);
|
||||
}
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
void
|
||||
gsk_gpu_op_vk_reserve_descriptor_sets (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
op->op_class->vk_reserve_descriptor_sets (op, frame);
|
||||
}
|
||||
|
||||
GskGpuOp *
|
||||
gsk_gpu_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer)
|
||||
{
|
||||
return op->op_class->vk_command (op, frame, render_pass, command_buffer);
|
||||
}
|
||||
#endif
|
||||
|
||||
GskGpuOp *
|
||||
gsk_gpu_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
return op->op_class->gl_command (op, frame);
|
||||
}
|
||||
|
73
gsk/gpu/gskgpuopprivate.h
Normal file
73
gsk/gpu/gskgpuopprivate.h
Normal file
@ -0,0 +1,73 @@
|
||||
#pragma once
|
||||
|
||||
#include <gdk/gdk.h>
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
typedef enum
|
||||
{
|
||||
GSK_GPU_STAGE_UPLOAD,
|
||||
GSK_GPU_STAGE_PASS,
|
||||
GSK_GPU_STAGE_COMMAND,
|
||||
GSK_GPU_STAGE_SHADER,
|
||||
/* magic ones */
|
||||
GSK_GPU_STAGE_BEGIN_PASS,
|
||||
GSK_GPU_STAGE_END_PASS
|
||||
} GskGpuStage;
|
||||
|
||||
struct _GskGpuOp
|
||||
{
|
||||
const GskGpuOpClass *op_class;
|
||||
|
||||
GskGpuOp *next;
|
||||
};
|
||||
|
||||
struct _GskGpuOpClass
|
||||
{
|
||||
gsize size;
|
||||
GskGpuStage stage;
|
||||
|
||||
void (* finish) (GskGpuOp *op);
|
||||
|
||||
void (* print) (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent);
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
void (* vk_reserve_descriptor_sets) (GskGpuOp *op,
|
||||
GskGpuFrame *frame);
|
||||
GskGpuOp * (* vk_command) (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer);
|
||||
#endif
|
||||
GskGpuOp * (* gl_command) (GskGpuOp *op,
|
||||
GskGpuFrame *frame);
|
||||
};
|
||||
|
||||
/* ensures alignment of ops to multipes of 16 bytes - and that makes graphene happy */
|
||||
#define GSK_GPU_OP_SIZE(struct_name) ((sizeof(struct_name) + 15) & ~15)
|
||||
|
||||
GskGpuOp * gsk_gpu_op_alloc (GskGpuFrame *frame,
|
||||
const GskGpuOpClass *op_class);
|
||||
void gsk_gpu_op_finish (GskGpuOp *op);
|
||||
|
||||
void gsk_gpu_op_print (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent);
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
void gsk_gpu_op_vk_reserve_descriptor_sets (GskGpuOp *op,
|
||||
GskGpuFrame *frame);
|
||||
GskGpuOp * gsk_gpu_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer);
|
||||
#endif
|
||||
GskGpuOp * gsk_gpu_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame);
|
||||
|
||||
G_END_DECLS
|
||||
|
104
gsk/gpu/gskgpuprint.c
Normal file
104
gsk/gpu/gskgpuprint.c
Normal file
@ -0,0 +1,104 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpuprintprivate.h"
|
||||
|
||||
#include "gskgpuimageprivate.h"
|
||||
|
||||
void
|
||||
gsk_gpu_print_indent (GString *string,
|
||||
guint indent)
|
||||
{
|
||||
g_string_append_printf (string, "%*s", 2 * indent, "");
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_op (GString *string,
|
||||
guint indent,
|
||||
const char *op_name)
|
||||
{
|
||||
gsk_gpu_print_indent (string, indent);
|
||||
g_string_append (string, op_name);
|
||||
g_string_append_c (string, ' ');
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_rect (GString *string,
|
||||
const float rect[4])
|
||||
{
|
||||
g_string_append_printf (string, "%g %g %g %g ",
|
||||
rect[0], rect[1],
|
||||
rect[2], rect[3]);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_int_rect (GString *string,
|
||||
const cairo_rectangle_int_t *rect)
|
||||
{
|
||||
g_string_append_printf (string, "%d %d %d %d ",
|
||||
rect->x, rect->y,
|
||||
rect->width, rect->height);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_rounded_rect (GString *string,
|
||||
const float rect[12])
|
||||
{
|
||||
gsk_gpu_print_rect (string, (const float *) rect);
|
||||
|
||||
if (rect[4] == 0.0 && rect[5] == 0.0 &&
|
||||
rect[6] == 0.0 && rect[7] == 0.0 &&
|
||||
rect[8] == 0.0 && rect[9] == 0.0 &&
|
||||
rect[10] == 0.0 && rect[11] == 0.0)
|
||||
return;
|
||||
|
||||
g_string_append (string, "/ ");
|
||||
|
||||
if (rect[4] != rect[5] ||
|
||||
rect[6] != rect[7] ||
|
||||
rect[8] != rect[9] ||
|
||||
rect[10] != rect[11])
|
||||
{
|
||||
g_string_append (string, "variable ");
|
||||
}
|
||||
else if (rect[4] != rect[6] ||
|
||||
rect[4] != rect[8] ||
|
||||
rect[4] != rect[10])
|
||||
{
|
||||
g_string_append_printf (string, "%g %g %g %g ",
|
||||
rect[4], rect[6],
|
||||
rect[8], rect[10]);
|
||||
}
|
||||
else
|
||||
{
|
||||
g_string_append_printf (string, "%g ", rect[4]);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_rgba (GString *string,
|
||||
const float rgba[4])
|
||||
{
|
||||
GdkRGBA color = { rgba[0], rgba[1], rgba[2], rgba[3] };
|
||||
char *s = gdk_rgba_to_string (&color);
|
||||
g_string_append (string, s);
|
||||
g_string_append_c (string, ' ');
|
||||
g_free (s);
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_newline (GString *string)
|
||||
{
|
||||
if (string->len && string->str[string->len - 1] == ' ')
|
||||
string->str[string->len - 1] = '\n';
|
||||
else
|
||||
g_string_append_c (string, '\n');
|
||||
}
|
||||
|
||||
void
|
||||
gsk_gpu_print_image (GString *string,
|
||||
GskGpuImage *image)
|
||||
{
|
||||
g_string_append_printf (string, "%zux%zu ",
|
||||
gsk_gpu_image_get_width (image),
|
||||
gsk_gpu_image_get_height (image));
|
||||
}
|
27
gsk/gpu/gskgpuprintprivate.h
Normal file
27
gsk/gpu/gskgpuprintprivate.h
Normal file
@ -0,0 +1,27 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
|
||||
#include "gskroundedrect.h"
|
||||
|
||||
#include <cairo.h>
|
||||
#include <graphene.h>
|
||||
|
||||
void gsk_gpu_print_indent (GString *string,
|
||||
guint indent);
|
||||
void gsk_gpu_print_op (GString *string,
|
||||
guint indent,
|
||||
const char *op_name);
|
||||
|
||||
void gsk_gpu_print_newline (GString *string);
|
||||
|
||||
void gsk_gpu_print_rect (GString *string,
|
||||
const float rect[4]);
|
||||
void gsk_gpu_print_int_rect (GString *string,
|
||||
const cairo_rectangle_int_t *rect);
|
||||
void gsk_gpu_print_rounded_rect (GString *string,
|
||||
const float rect[12]);
|
||||
void gsk_gpu_print_rgba (GString *string,
|
||||
const float rgba[4]);
|
||||
void gsk_gpu_print_image (GString *string,
|
||||
GskGpuImage *image);
|
286
gsk/gpu/gskgpurenderer.c
Normal file
286
gsk/gpu/gskgpurenderer.c
Normal file
@ -0,0 +1,286 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpurendererprivate.h"
|
||||
|
||||
#include "gskdebugprivate.h"
|
||||
#include "gskgpudeviceprivate.h"
|
||||
#include "gskgpuframeprivate.h"
|
||||
#include "gskprivate.h"
|
||||
#include "gskrendererprivate.h"
|
||||
#include "gskrendernodeprivate.h"
|
||||
#include "gskgpuimageprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkdrawcontextprivate.h"
|
||||
#include "gdk/gdkprofilerprivate.h"
|
||||
#include "gdk/gdktextureprivate.h"
|
||||
#include "gdk/gdkdrawcontextprivate.h"
|
||||
|
||||
#include <graphene.h>
|
||||
|
||||
#define GSK_GPU_MAX_FRAMES 4
|
||||
|
||||
typedef struct _GskGpuRendererPrivate GskGpuRendererPrivate;
|
||||
|
||||
struct _GskGpuRendererPrivate
|
||||
{
|
||||
GskGpuDevice *device;
|
||||
GdkDrawContext *context;
|
||||
|
||||
GskGpuFrame *frames[GSK_GPU_MAX_FRAMES];
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE_WITH_PRIVATE (GskGpuRenderer, gsk_gpu_renderer, GSK_TYPE_RENDERER)
|
||||
|
||||
static void
|
||||
gsk_gpu_renderer_make_current (GskGpuRenderer *self)
|
||||
{
|
||||
GSK_GPU_RENDERER_GET_CLASS (self)->make_current (self);
|
||||
}
|
||||
|
||||
static cairo_region_t *
|
||||
get_render_region (GskGpuRenderer *self)
|
||||
{
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
const cairo_region_t *damage;
|
||||
cairo_region_t *scaled_damage;
|
||||
GdkSurface *surface;
|
||||
double scale;
|
||||
|
||||
surface = gdk_draw_context_get_surface (priv->context);
|
||||
scale = gdk_surface_get_scale (surface);
|
||||
|
||||
damage = gdk_draw_context_get_frame_region (priv->context);
|
||||
scaled_damage = cairo_region_create ();
|
||||
for (int i = 0; i < cairo_region_num_rectangles (damage); i++)
|
||||
{
|
||||
cairo_rectangle_int_t rect;
|
||||
cairo_region_get_rectangle (damage, i, &rect);
|
||||
cairo_region_union_rectangle (scaled_damage, &(cairo_rectangle_int_t) {
|
||||
.x = (int) floor (rect.x * scale),
|
||||
.y = (int) floor (rect.y * scale),
|
||||
.width = (int) ceil ((rect.x + rect.width) * scale) - floor (rect.x * scale),
|
||||
.height = (int) ceil ((rect.y + rect.height) * scale) - floor (rect.y * scale),
|
||||
});
|
||||
}
|
||||
|
||||
return scaled_damage;
|
||||
}
|
||||
|
||||
static GskGpuFrame *
|
||||
gsk_gpu_renderer_create_frame (GskGpuRenderer *self)
|
||||
{
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
GskGpuFrame *result;
|
||||
|
||||
result = g_object_new (GSK_GPU_RENDERER_GET_CLASS (self)->frame_type, NULL);
|
||||
|
||||
gsk_gpu_frame_setup (result, self, priv->device);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static GskGpuFrame *
|
||||
gsk_gpu_renderer_get_frame (GskGpuRenderer *self)
|
||||
{
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
guint i;
|
||||
|
||||
while (TRUE)
|
||||
{
|
||||
for (i = 0; i < G_N_ELEMENTS (priv->frames); i++)
|
||||
{
|
||||
if (priv->frames[i] == NULL)
|
||||
{
|
||||
priv->frames[i] = gsk_gpu_renderer_create_frame (self);
|
||||
return priv->frames[i];
|
||||
}
|
||||
|
||||
if (!gsk_gpu_frame_is_busy (priv->frames[i]))
|
||||
return priv->frames[i];
|
||||
}
|
||||
|
||||
GSK_GPU_RENDERER_GET_CLASS (self)->wait (self, priv->frames, GSK_GPU_MAX_FRAMES);
|
||||
}
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gsk_gpu_renderer_realize (GskRenderer *renderer,
|
||||
GdkSurface *surface,
|
||||
GError **error)
|
||||
{
|
||||
GskGpuRenderer *self = GSK_GPU_RENDERER (renderer);
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
GdkDisplay *display;
|
||||
|
||||
if (surface)
|
||||
display = gdk_surface_get_display (surface);
|
||||
else
|
||||
display = gdk_display_get_default ();
|
||||
|
||||
priv->device = GSK_GPU_RENDERER_GET_CLASS (self)->get_device (display, error);
|
||||
if (priv->device == NULL)
|
||||
return FALSE;
|
||||
|
||||
priv->context = GSK_GPU_RENDERER_GET_CLASS (self)->create_context (self, display, surface, error);
|
||||
if (priv->context == NULL)
|
||||
{
|
||||
g_clear_object (&priv->device);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_renderer_unrealize (GskRenderer *renderer)
|
||||
{
|
||||
GskGpuRenderer *self = GSK_GPU_RENDERER (renderer);
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
gsize i, j;
|
||||
|
||||
gsk_gpu_renderer_make_current (self);
|
||||
|
||||
while (TRUE)
|
||||
{
|
||||
for (i = 0, j = 0; i < G_N_ELEMENTS (priv->frames); i++)
|
||||
{
|
||||
if (priv->frames[i] == NULL)
|
||||
break;
|
||||
if (gsk_gpu_frame_is_busy (priv->frames[i]))
|
||||
{
|
||||
if (i > j)
|
||||
{
|
||||
priv->frames[j] = priv->frames[i];
|
||||
priv->frames[i] = NULL;
|
||||
}
|
||||
j++;
|
||||
continue;
|
||||
}
|
||||
g_clear_object (&priv->frames[i]);
|
||||
}
|
||||
if (j == 0)
|
||||
break;
|
||||
GSK_GPU_RENDERER_GET_CLASS (self)->wait (self, priv->frames, j);
|
||||
}
|
||||
|
||||
g_clear_object (&priv->context);
|
||||
g_clear_object (&priv->device);
|
||||
}
|
||||
|
||||
static GdkTexture *
|
||||
gsk_gpu_renderer_render_texture (GskRenderer *renderer,
|
||||
GskRenderNode *root,
|
||||
const graphene_rect_t *viewport)
|
||||
{
|
||||
GskGpuRenderer *self = GSK_GPU_RENDERER (renderer);
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
GskGpuFrame *frame;
|
||||
GskGpuImage *image;
|
||||
GdkTexture *texture;
|
||||
graphene_rect_t rounded_viewport;
|
||||
|
||||
gsk_gpu_renderer_make_current (self);
|
||||
|
||||
frame = gsk_gpu_renderer_create_frame (self);
|
||||
|
||||
rounded_viewport = GRAPHENE_RECT_INIT (viewport->origin.x,
|
||||
viewport->origin.y,
|
||||
ceil (viewport->size.width),
|
||||
ceil (viewport->size.height));
|
||||
image = gsk_gpu_device_create_offscreen_image (priv->device,
|
||||
gsk_render_node_get_preferred_depth (root),
|
||||
rounded_viewport.size.width,
|
||||
rounded_viewport.size.height);
|
||||
|
||||
texture = NULL;
|
||||
gsk_gpu_frame_render (frame,
|
||||
image,
|
||||
NULL,
|
||||
root,
|
||||
&rounded_viewport,
|
||||
&texture);
|
||||
|
||||
g_object_unref (frame);
|
||||
g_object_unref (image);
|
||||
|
||||
/* check that callback setting texture was actually called, as its technically async */
|
||||
g_assert (texture);
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_renderer_render (GskRenderer *renderer,
|
||||
GskRenderNode *root,
|
||||
const cairo_region_t *region)
|
||||
{
|
||||
GskGpuRenderer *self = GSK_GPU_RENDERER (renderer);
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
GskGpuFrame *frame;
|
||||
GskGpuImage *backbuffer;
|
||||
cairo_region_t *render_region;
|
||||
GdkSurface *surface;
|
||||
|
||||
gdk_draw_context_begin_frame_full (priv->context,
|
||||
gsk_render_node_get_preferred_depth (root),
|
||||
region);
|
||||
|
||||
gsk_gpu_renderer_make_current (self);
|
||||
|
||||
backbuffer = GSK_GPU_RENDERER_GET_CLASS (self)->get_backbuffer (self);
|
||||
|
||||
frame = gsk_gpu_renderer_get_frame (self);
|
||||
render_region = get_render_region (self);
|
||||
surface = gdk_draw_context_get_surface (priv->context);
|
||||
|
||||
gsk_gpu_frame_render (frame,
|
||||
backbuffer,
|
||||
render_region,
|
||||
root,
|
||||
&GRAPHENE_RECT_INIT (
|
||||
0, 0,
|
||||
gdk_surface_get_width (surface),
|
||||
gdk_surface_get_height (surface)
|
||||
),
|
||||
NULL);
|
||||
|
||||
gdk_draw_context_end_frame (priv->context);
|
||||
|
||||
g_clear_pointer (&render_region, cairo_region_destroy);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_renderer_class_init (GskGpuRendererClass *klass)
|
||||
{
|
||||
GskRendererClass *renderer_class = GSK_RENDERER_CLASS (klass);
|
||||
|
||||
renderer_class->realize = gsk_gpu_renderer_realize;
|
||||
renderer_class->unrealize = gsk_gpu_renderer_unrealize;
|
||||
renderer_class->render = gsk_gpu_renderer_render;
|
||||
renderer_class->render_texture = gsk_gpu_renderer_render_texture;
|
||||
|
||||
gsk_ensure_resources ();
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_renderer_init (GskGpuRenderer *self)
|
||||
{
|
||||
}
|
||||
|
||||
GdkDrawContext *
|
||||
gsk_gpu_renderer_get_context (GskGpuRenderer *self)
|
||||
{
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
|
||||
return priv->context;
|
||||
}
|
||||
|
||||
GskGpuDevice *
|
||||
gsk_gpu_renderer_get_device (GskGpuRenderer *self)
|
||||
{
|
||||
GskGpuRendererPrivate *priv = gsk_gpu_renderer_get_instance_private (self);
|
||||
|
||||
return priv->device;
|
||||
}
|
||||
|
38
gsk/gpu/gskgpurenderer.h
Normal file
38
gsk/gpu/gskgpurenderer.h
Normal file
@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright © 2016 Benjamin Otte
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gdk/gdk.h>
|
||||
#include <gsk/gskrenderer.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_GPU_RENDERER (gsk_gpu_renderer_get_type ())
|
||||
|
||||
#define GSK_GPU_RENDERER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GSK_TYPE_GPU_RENDERER, GskGpuRenderer))
|
||||
#define GSK_IS_GPU_RENDERER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GSK_TYPE_GPU_RENDERER))
|
||||
|
||||
typedef struct _GskGpuRenderer GskGpuRenderer;
|
||||
|
||||
GDK_AVAILABLE_IN_ALL
|
||||
GType gsk_gpu_renderer_get_type (void) G_GNUC_CONST;
|
||||
|
||||
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GskGpuRenderer, g_object_unref)
|
||||
|
||||
G_END_DECLS
|
||||
|
45
gsk/gpu/gskgpurendererprivate.h
Normal file
45
gsk/gpu/gskgpurendererprivate.h
Normal file
@ -0,0 +1,45 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpurenderer.h"
|
||||
|
||||
#include "gskgputypesprivate.h"
|
||||
#include "gskrendererprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_GPU_RENDERER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GSK_TYPE_GPU_RENDERER, GskGpuRendererClass))
|
||||
#define GSK_IS_GPU_RENDERER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GSK_TYPE_GPU_RENDERER))
|
||||
#define GSK_GPU_RENDERER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GSK_TYPE_GPU_RENDERER, GskGpuRendererClass))
|
||||
|
||||
typedef struct _GskGpuRendererClass GskGpuRendererClass;
|
||||
|
||||
struct _GskGpuRenderer
|
||||
{
|
||||
GskRenderer parent_instance;
|
||||
};
|
||||
|
||||
struct _GskGpuRendererClass
|
||||
{
|
||||
GskRendererClass parent_class;
|
||||
|
||||
GType frame_type;
|
||||
|
||||
GskGpuDevice * (* get_device) (GdkDisplay *display,
|
||||
GError **error);
|
||||
GdkDrawContext * (* create_context) (GskGpuRenderer *self,
|
||||
GdkDisplay *display,
|
||||
GdkSurface *surface,
|
||||
GError **error);
|
||||
|
||||
void (* make_current) (GskGpuRenderer *self);
|
||||
GskGpuImage * (* get_backbuffer) (GskGpuRenderer *self);
|
||||
void (* wait) (GskGpuRenderer *self,
|
||||
GskGpuFrame **frame,
|
||||
gsize n_frames);
|
||||
};
|
||||
|
||||
GdkDrawContext * gsk_gpu_renderer_get_context (GskGpuRenderer *self);
|
||||
GskGpuDevice * gsk_gpu_renderer_get_device (GskGpuRenderer *self);
|
||||
|
||||
G_END_DECLS
|
||||
|
12
gsk/gpu/gskgputypesprivate.h
Normal file
12
gsk/gpu/gskgputypesprivate.h
Normal file
@ -0,0 +1,12 @@
|
||||
#pragma once
|
||||
|
||||
#include <gdk/gdk.h>
|
||||
|
||||
#include "gdk/gdkmemoryformatprivate.h"
|
||||
|
||||
typedef struct _GskGpuDevice GskGpuDevice;
|
||||
typedef struct _GskGpuFrame GskGpuFrame;
|
||||
typedef struct _GskGpuImage GskGpuImage;
|
||||
typedef struct _GskGpuOp GskGpuOp;
|
||||
typedef struct _GskGpuOpClass GskGpuOpClass;
|
||||
|
346
gsk/gpu/gskgpuuploadop.c
Normal file
346
gsk/gpu/gskgpuuploadop.c
Normal file
@ -0,0 +1,346 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskgpuuploadopprivate.h"
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskgpuprintprivate.h"
|
||||
#include "gskgldeviceprivate.h"
|
||||
#include "gskglimageprivate.h"
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
#include "gskvulkanbufferprivate.h"
|
||||
#include "gskvulkanimageprivate.h"
|
||||
#endif
|
||||
|
||||
#include "gdk/gdkglcontextprivate.h"
|
||||
#include "gsk/gskrendernodeprivate.h"
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_op_gl_command_with_area (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
const cairo_rectangle_int_t *area,
|
||||
void (* draw_func) (GskGpuOp *, guchar *, gsize))
|
||||
{
|
||||
GskGLImage *gl_image = GSK_GL_IMAGE (image);
|
||||
GdkMemoryFormat format;
|
||||
GdkGLContext *context;
|
||||
gsize stride, bpp;
|
||||
guchar *data;
|
||||
guint gl_format, gl_type;
|
||||
|
||||
context = GDK_GL_CONTEXT (gsk_gpu_frame_get_context (frame));
|
||||
format = gsk_gpu_image_get_format (image);
|
||||
bpp = gdk_memory_format_bytes_per_pixel (format);
|
||||
stride = area->width * bpp;
|
||||
data = g_malloc (area->height * stride);
|
||||
|
||||
draw_func (op, data, stride);
|
||||
|
||||
gl_format = gsk_gl_image_get_gl_format (gl_image);
|
||||
gl_type = gsk_gl_image_get_gl_type (gl_image);
|
||||
|
||||
glActiveTexture (GL_TEXTURE0);
|
||||
gsk_gl_image_bind_texture (gl_image);
|
||||
|
||||
glPixelStorei (GL_UNPACK_ALIGNMENT, gdk_memory_format_alignment (format));
|
||||
|
||||
/* GL_UNPACK_ROW_LENGTH is available on desktop GL, OpenGL ES >= 3.0, or if
|
||||
* the GL_EXT_unpack_subimage extension for OpenGL ES 2.0 is available
|
||||
*/
|
||||
if (stride == gsk_gpu_image_get_width (image) * bpp)
|
||||
{
|
||||
glTexSubImage2D (GL_TEXTURE_2D, 0, area->x, area->y, area->width, area->height, gl_format, gl_type, data);
|
||||
}
|
||||
else if (stride % bpp == 0 && gdk_gl_context_has_unpack_subimage (context))
|
||||
{
|
||||
glPixelStorei (GL_UNPACK_ROW_LENGTH, stride / bpp);
|
||||
|
||||
glTexSubImage2D (GL_TEXTURE_2D, 0, area->x, area->y, area->width, area->height, gl_format, gl_type, data);
|
||||
|
||||
glPixelStorei (GL_UNPACK_ROW_LENGTH, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
gsize i;
|
||||
for (i = 0; i < area->height; i++)
|
||||
glTexSubImage2D (GL_TEXTURE_2D, 0, area->x, area->y + i, area->width, 1, gl_format, gl_type, data + (i * stride));
|
||||
}
|
||||
|
||||
glPixelStorei (GL_UNPACK_ALIGNMENT, 4);
|
||||
|
||||
g_free (data);
|
||||
|
||||
return op->next;
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
void (* draw_func) (GskGpuOp *, guchar *, gsize))
|
||||
{
|
||||
return gsk_gpu_upload_op_gl_command_with_area (op,
|
||||
frame,
|
||||
image,
|
||||
&(cairo_rectangle_int_t) {
|
||||
0, 0,
|
||||
gsk_gpu_image_get_width (image),
|
||||
gsk_gpu_image_get_height (image)
|
||||
},
|
||||
draw_func);
|
||||
}
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
static void
|
||||
gsk_gpu_upload_op_vk_reserve_descriptor_sets (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_op_vk_command_with_area (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkCommandBuffer command_buffer,
|
||||
GskVulkanImage *image,
|
||||
const cairo_rectangle_int_t *area,
|
||||
void (* draw_func) (GskGpuOp *, guchar *, gsize),
|
||||
GskVulkanBuffer **buffer)
|
||||
{
|
||||
gsize stride;
|
||||
guchar *data;
|
||||
|
||||
stride = area->width * gdk_memory_format_bytes_per_pixel (gsk_gpu_image_get_format (GSK_GPU_IMAGE (image)));
|
||||
*buffer = gsk_vulkan_buffer_new_map (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
|
||||
area->height * stride,
|
||||
GSK_VULKAN_WRITE);
|
||||
data = gsk_vulkan_buffer_get_data (*buffer);
|
||||
|
||||
draw_func (op, data, stride);
|
||||
|
||||
vkCmdPipelineBarrier (command_buffer,
|
||||
VK_PIPELINE_STAGE_HOST_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
0,
|
||||
0, NULL,
|
||||
1, &(VkBufferMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.buffer = gsk_vulkan_buffer_get_buffer (*buffer),
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE,
|
||||
},
|
||||
0, NULL);
|
||||
gsk_vulkan_image_transition (image,
|
||||
command_buffer,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
VK_ACCESS_TRANSFER_WRITE_BIT);
|
||||
|
||||
vkCmdCopyBufferToImage (command_buffer,
|
||||
gsk_vulkan_buffer_get_buffer (*buffer),
|
||||
gsk_vulkan_image_get_vk_image (image),
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
1,
|
||||
(VkBufferImageCopy[1]) {
|
||||
{
|
||||
.bufferOffset = 0,
|
||||
.bufferRowLength = area->width,
|
||||
.bufferImageHeight = area->height,
|
||||
.imageSubresource = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.mipLevel = 0,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
},
|
||||
.imageOffset = {
|
||||
.x = area->x,
|
||||
.y = area->y,
|
||||
.z = 0
|
||||
},
|
||||
.imageExtent = {
|
||||
.width = area->width,
|
||||
.height = area->height,
|
||||
.depth = 1
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return op->next;
|
||||
}
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkCommandBuffer command_buffer,
|
||||
GskVulkanImage *image,
|
||||
void (* draw_func) (GskGpuOp *, guchar *, gsize),
|
||||
GskVulkanBuffer **buffer)
|
||||
{
|
||||
gsize stride;
|
||||
guchar *data;
|
||||
|
||||
data = gsk_vulkan_image_get_data (image, &stride);
|
||||
if (data)
|
||||
{
|
||||
draw_func (op, data, stride);
|
||||
|
||||
*buffer = NULL;
|
||||
|
||||
return op->next;
|
||||
}
|
||||
|
||||
return gsk_gpu_upload_op_vk_command_with_area (op,
|
||||
frame,
|
||||
command_buffer,
|
||||
image,
|
||||
&(cairo_rectangle_int_t) {
|
||||
0, 0,
|
||||
gsk_gpu_image_get_width (GSK_GPU_IMAGE (image)),
|
||||
gsk_gpu_image_get_height (GSK_GPU_IMAGE (image)),
|
||||
},
|
||||
draw_func,
|
||||
buffer);
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
typedef struct _GskGpuUploadCairoOp GskGpuUploadCairoOp;
|
||||
|
||||
struct _GskGpuUploadCairoOp
|
||||
{
|
||||
GskGpuOp op;
|
||||
|
||||
GskGpuImage *image;
|
||||
GskRenderNode *node;
|
||||
graphene_rect_t viewport;
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
GskVulkanBuffer *buffer;
|
||||
#endif
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_gpu_upload_cairo_op_finish (GskGpuOp *op)
|
||||
{
|
||||
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
|
||||
|
||||
g_object_unref (self->image);
|
||||
gsk_render_node_unref (self->node);
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
g_clear_pointer (&self->buffer, gsk_vulkan_buffer_free);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_upload_cairo_op_print (GskGpuOp *op,
|
||||
GString *string,
|
||||
guint indent)
|
||||
{
|
||||
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
|
||||
|
||||
gsk_gpu_print_op (string, indent, "upload-cairo");
|
||||
gsk_gpu_print_image (string, self->image);
|
||||
gsk_gpu_print_newline (string);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_gpu_upload_cairo_op_draw (GskGpuOp *op,
|
||||
guchar *data,
|
||||
gsize stride)
|
||||
{
|
||||
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
|
||||
cairo_surface_t *surface;
|
||||
cairo_t *cr;
|
||||
int width, height;
|
||||
|
||||
width = gsk_gpu_image_get_width (self->image);
|
||||
height = gsk_gpu_image_get_height (self->image);
|
||||
|
||||
surface = cairo_image_surface_create_for_data (data,
|
||||
CAIRO_FORMAT_ARGB32,
|
||||
width, height,
|
||||
stride);
|
||||
cairo_surface_set_device_scale (surface,
|
||||
width / self->viewport.size.width,
|
||||
height / self->viewport.size.height);
|
||||
cr = cairo_create (surface);
|
||||
cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);
|
||||
cairo_paint (cr);
|
||||
cairo_set_operator (cr, CAIRO_OPERATOR_OVER);
|
||||
cairo_translate (cr, -self->viewport.origin.x, -self->viewport.origin.y);
|
||||
|
||||
gsk_render_node_draw_fallback (self->node, cr);
|
||||
|
||||
cairo_destroy (cr);
|
||||
|
||||
cairo_surface_finish (surface);
|
||||
cairo_surface_destroy (surface);
|
||||
}
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_cairo_op_vk_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame,
|
||||
VkRenderPass render_pass,
|
||||
VkCommandBuffer command_buffer)
|
||||
{
|
||||
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
|
||||
|
||||
return gsk_gpu_upload_op_vk_command (op,
|
||||
frame,
|
||||
command_buffer,
|
||||
GSK_VULKAN_IMAGE (self->image),
|
||||
gsk_gpu_upload_cairo_op_draw,
|
||||
&self->buffer);
|
||||
}
|
||||
#endif
|
||||
|
||||
static GskGpuOp *
|
||||
gsk_gpu_upload_cairo_op_gl_command (GskGpuOp *op,
|
||||
GskGpuFrame *frame)
|
||||
{
|
||||
GskGpuUploadCairoOp *self = (GskGpuUploadCairoOp *) op;
|
||||
|
||||
return gsk_gpu_upload_op_gl_command (op,
|
||||
frame,
|
||||
self->image,
|
||||
gsk_gpu_upload_cairo_op_draw);
|
||||
}
|
||||
|
||||
static const GskGpuOpClass GSK_GPU_UPLOAD_CAIRO_OP_CLASS = {
|
||||
GSK_GPU_OP_SIZE (GskGpuUploadCairoOp),
|
||||
GSK_GPU_STAGE_UPLOAD,
|
||||
gsk_gpu_upload_cairo_op_finish,
|
||||
gsk_gpu_upload_cairo_op_print,
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
gsk_gpu_upload_op_vk_reserve_descriptor_sets,
|
||||
gsk_gpu_upload_cairo_op_vk_command,
|
||||
#endif
|
||||
gsk_gpu_upload_cairo_op_gl_command
|
||||
};
|
||||
|
||||
GskGpuImage *
|
||||
gsk_gpu_upload_cairo_op (GskGpuFrame *frame,
|
||||
GskRenderNode *node,
|
||||
const graphene_vec2_t *scale,
|
||||
const graphene_rect_t *viewport)
|
||||
{
|
||||
GskGpuUploadCairoOp *self;
|
||||
|
||||
self = (GskGpuUploadCairoOp *) gsk_gpu_op_alloc (frame, &GSK_GPU_UPLOAD_CAIRO_OP_CLASS);
|
||||
|
||||
self->node = gsk_render_node_ref (node);
|
||||
self->image = gsk_gpu_device_create_upload_image (gsk_gpu_frame_get_device (frame),
|
||||
GDK_MEMORY_DEFAULT,
|
||||
ceil (graphene_vec2_get_x (scale) * viewport->size.width),
|
||||
ceil (graphene_vec2_get_y (scale) * viewport->size.height));
|
||||
/* g_assert (gsk_gpu_image_get_postprocess (self->image) == 0); */
|
||||
self->viewport = *viewport;
|
||||
|
||||
return self->image;
|
||||
}
|
||||
|
25
gsk/gpu/gskgpuuploadopprivate.h
Normal file
25
gsk/gpu/gskgpuuploadopprivate.h
Normal file
@ -0,0 +1,25 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
|
||||
#include "gsktypes.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
GskGpuImage * gsk_gpu_upload_texture_op (GskGpuFrame *frame,
|
||||
GdkTexture *texture);
|
||||
|
||||
GskGpuImage * gsk_gpu_upload_cairo_op (GskGpuFrame *frame,
|
||||
GskRenderNode *node,
|
||||
const graphene_vec2_t *scale,
|
||||
const graphene_rect_t *viewport);
|
||||
|
||||
void gsk_gpu_upload_glyph_op (GskGpuFrame *frame,
|
||||
GskGpuImage *image,
|
||||
cairo_rectangle_int_t *area,
|
||||
PangoFont *font,
|
||||
PangoGlyphInfo *glyph_info,
|
||||
float scale);
|
||||
|
||||
G_END_DECLS
|
||||
|
122
gsk/gpu/gsknglrenderer.c
Normal file
122
gsk/gpu/gsknglrenderer.c
Normal file
@ -0,0 +1,122 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gsknglrendererprivate.h"
|
||||
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskgpurendererprivate.h"
|
||||
#include "gskgldeviceprivate.h"
|
||||
#include "gskglframeprivate.h"
|
||||
#include "gskglimageprivate.h"
|
||||
|
||||
struct _GskNglRenderer
|
||||
{
|
||||
GskGpuRenderer parent_instance;
|
||||
|
||||
GskGpuImage *backbuffer;
|
||||
};
|
||||
|
||||
struct _GskNglRendererClass
|
||||
{
|
||||
GskGpuRendererClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskNglRenderer, gsk_ngl_renderer, GSK_TYPE_GPU_RENDERER)
|
||||
|
||||
static GdkDrawContext *
|
||||
gsk_ngl_renderer_create_context (GskGpuRenderer *renderer,
|
||||
GdkDisplay *display,
|
||||
GdkSurface *surface,
|
||||
GError **error)
|
||||
{
|
||||
GdkGLContext *context;
|
||||
|
||||
if (surface)
|
||||
context = gdk_surface_create_gl_context (surface, error);
|
||||
else
|
||||
context = gdk_display_create_gl_context (display, error);
|
||||
|
||||
if (context == NULL)
|
||||
return NULL;
|
||||
|
||||
/* GLES 2 is not supported */
|
||||
gdk_gl_context_set_required_version (context, 3, 0);
|
||||
|
||||
if (!gdk_gl_context_realize (context, error))
|
||||
{
|
||||
g_object_unref (context);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return GDK_DRAW_CONTEXT (context);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_make_current (GskGpuRenderer *renderer)
|
||||
{
|
||||
gdk_gl_context_make_current (GDK_GL_CONTEXT (gsk_gpu_renderer_get_context (renderer)));
|
||||
}
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_ngl_renderer_get_backbuffer (GskGpuRenderer *renderer)
|
||||
{
|
||||
GskNglRenderer *self = GSK_NGL_RENDERER (renderer);
|
||||
GdkDrawContext *context;
|
||||
GdkSurface *surface;
|
||||
float scale;
|
||||
|
||||
context = gsk_gpu_renderer_get_context (renderer);
|
||||
surface = gdk_draw_context_get_surface (context);
|
||||
scale = gdk_surface_get_scale (surface);
|
||||
|
||||
if (self->backbuffer == NULL ||
|
||||
gsk_gpu_image_get_width (self->backbuffer) != ceil (gdk_surface_get_width (surface) * scale) ||
|
||||
gsk_gpu_image_get_height (self->backbuffer) != ceil (gdk_surface_get_height (surface) * scale))
|
||||
{
|
||||
g_clear_object (&self->backbuffer);
|
||||
self->backbuffer = gsk_gl_image_new_backbuffer (GSK_GL_DEVICE (gsk_gpu_renderer_get_device (renderer)),
|
||||
GDK_MEMORY_DEFAULT /* FIXME */,
|
||||
ceil (gdk_surface_get_width (surface) * scale),
|
||||
ceil (gdk_surface_get_height (surface) * scale));
|
||||
}
|
||||
|
||||
return self->backbuffer;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_wait (GskGpuRenderer *self,
|
||||
GskGpuFrame **frame,
|
||||
gsize n_frames)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_class_init (GskNglRendererClass *klass)
|
||||
{
|
||||
GskGpuRendererClass *gpu_renderer_class = GSK_GPU_RENDERER_CLASS (klass);
|
||||
|
||||
gpu_renderer_class->frame_type = GSK_TYPE_GL_FRAME;
|
||||
|
||||
gpu_renderer_class->get_device = gsk_gl_device_get_for_display;
|
||||
gpu_renderer_class->create_context = gsk_ngl_renderer_create_context;
|
||||
gpu_renderer_class->make_current = gsk_ngl_renderer_make_current;
|
||||
gpu_renderer_class->get_backbuffer = gsk_ngl_renderer_get_backbuffer;
|
||||
gpu_renderer_class->wait = gsk_ngl_renderer_wait;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_ngl_renderer_init (GskNglRenderer *self)
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* gsk_ngl_renderer_new:
|
||||
*
|
||||
* Creates an instance of the new experimental GL renderer.
|
||||
*
|
||||
* Returns: (transfer full): a new GL renderer
|
||||
*/
|
||||
GskRenderer *
|
||||
gsk_ngl_renderer_new (void)
|
||||
{
|
||||
return g_object_new (GSK_TYPE_NGL_RENDERER, NULL);
|
||||
}
|
16
gsk/gpu/gsknglrendererprivate.h
Normal file
16
gsk/gpu/gsknglrendererprivate.h
Normal file
@ -0,0 +1,16 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpurendererprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_NGL_RENDERER (gsk_ngl_renderer_get_type ())
|
||||
|
||||
GDK_AVAILABLE_IN_ALL
|
||||
G_DECLARE_FINAL_TYPE (GskNglRenderer, gsk_ngl_renderer, GSK, NGL_RENDERER, GskGpuRenderer)
|
||||
|
||||
GDK_AVAILABLE_IN_ALL
|
||||
GskRenderer *gsk_ngl_renderer_new (void);
|
||||
|
||||
G_END_DECLS
|
||||
|
120
gsk/gpu/gskvulkanbuffer.c
Normal file
120
gsk/gpu/gskvulkanbuffer.c
Normal file
@ -0,0 +1,120 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskvulkanbufferprivate.h"
|
||||
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
#include "gskvulkanmemoryprivate.h"
|
||||
|
||||
struct _GskVulkanBuffer
|
||||
{
|
||||
GskVulkanDevice *device;
|
||||
|
||||
VkBuffer vk_buffer;
|
||||
|
||||
GskVulkanAllocator *allocator;
|
||||
GskVulkanAllocation allocation;
|
||||
};
|
||||
|
||||
static GskVulkanBuffer *
|
||||
gsk_vulkan_buffer_new_internal (GskVulkanDevice *device,
|
||||
gsize size,
|
||||
VkBufferUsageFlags usage)
|
||||
{
|
||||
VkMemoryRequirements requirements;
|
||||
GskVulkanBuffer *self;
|
||||
|
||||
self = g_new0 (GskVulkanBuffer, 1);
|
||||
|
||||
self->device = g_object_ref (device);
|
||||
|
||||
GSK_VK_CHECK (vkCreateBuffer, gsk_vulkan_device_get_vk_device (device),
|
||||
&(VkBufferCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
||||
.size = size,
|
||||
.flags = 0,
|
||||
.usage = usage,
|
||||
.sharingMode = VK_SHARING_MODE_EXCLUSIVE
|
||||
},
|
||||
NULL,
|
||||
&self->vk_buffer);
|
||||
|
||||
vkGetBufferMemoryRequirements (gsk_vulkan_device_get_vk_device (device),
|
||||
self->vk_buffer,
|
||||
&requirements);
|
||||
|
||||
self->allocator = gsk_vulkan_device_find_allocator (device,
|
||||
requirements.memoryTypeBits,
|
||||
GSK_VULKAN_MEMORY_MAPPABLE,
|
||||
GSK_VULKAN_MEMORY_MAPPABLE);
|
||||
gsk_vulkan_alloc (self->allocator,
|
||||
requirements.size,
|
||||
requirements.alignment,
|
||||
&self->allocation);
|
||||
|
||||
GSK_VK_CHECK (vkBindBufferMemory, gsk_vulkan_device_get_vk_device (device),
|
||||
self->vk_buffer,
|
||||
self->allocation.vk_memory,
|
||||
self->allocation.offset);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
GskVulkanBuffer *
|
||||
gsk_vulkan_buffer_new (GskVulkanDevice *device,
|
||||
gsize size)
|
||||
{
|
||||
return gsk_vulkan_buffer_new_internal (device, size,
|
||||
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
|
||||
| VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
|
||||
}
|
||||
|
||||
GskVulkanBuffer *
|
||||
gsk_vulkan_buffer_new_storage (GskVulkanDevice *device,
|
||||
gsize size)
|
||||
{
|
||||
return gsk_vulkan_buffer_new_internal (device, size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
|
||||
}
|
||||
|
||||
GskVulkanBuffer *
|
||||
gsk_vulkan_buffer_new_map (GskVulkanDevice *device,
|
||||
gsize size,
|
||||
GskVulkanMapMode mode)
|
||||
{
|
||||
return gsk_vulkan_buffer_new_internal (device,
|
||||
size,
|
||||
(mode & GSK_VULKAN_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0) |
|
||||
(mode & GSK_VULKAN_WRITE ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0));
|
||||
}
|
||||
|
||||
void
|
||||
gsk_vulkan_buffer_free (GskVulkanBuffer *self)
|
||||
{
|
||||
vkDestroyBuffer (gsk_vulkan_device_get_vk_device (self->device),
|
||||
self->vk_buffer,
|
||||
NULL);
|
||||
|
||||
gsk_vulkan_free (self->allocator, &self->allocation);
|
||||
|
||||
g_object_unref (self->device);
|
||||
|
||||
g_free (self);
|
||||
}
|
||||
|
||||
VkBuffer
|
||||
gsk_vulkan_buffer_get_buffer (GskVulkanBuffer *self)
|
||||
{
|
||||
return self->vk_buffer;
|
||||
}
|
||||
|
||||
gsize
|
||||
gsk_vulkan_buffer_get_size (GskVulkanBuffer *self)
|
||||
{
|
||||
return self->allocation.size;
|
||||
}
|
||||
|
||||
guchar *
|
||||
gsk_vulkan_buffer_get_data (GskVulkanBuffer *self)
|
||||
{
|
||||
return self->allocation.map;
|
||||
}
|
||||
|
30
gsk/gpu/gskvulkanbufferprivate.h
Normal file
30
gsk/gpu/gskvulkanbufferprivate.h
Normal file
@ -0,0 +1,30 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
typedef struct _GskVulkanBuffer GskVulkanBuffer;
|
||||
|
||||
typedef enum
|
||||
{
|
||||
GSK_VULKAN_READ = (1 << 0),
|
||||
GSK_VULKAN_WRITE = (1 << 1),
|
||||
GSK_VULKAN_READWRITE = GSK_VULKAN_READ | GSK_VULKAN_WRITE
|
||||
} GskVulkanMapMode;
|
||||
|
||||
GskVulkanBuffer * gsk_vulkan_buffer_new (GskVulkanDevice *device,
|
||||
gsize size);
|
||||
GskVulkanBuffer * gsk_vulkan_buffer_new_storage (GskVulkanDevice *device,
|
||||
gsize size);
|
||||
GskVulkanBuffer * gsk_vulkan_buffer_new_map (GskVulkanDevice *device,
|
||||
gsize size,
|
||||
GskVulkanMapMode mode);
|
||||
void gsk_vulkan_buffer_free (GskVulkanBuffer *buffer);
|
||||
|
||||
VkBuffer gsk_vulkan_buffer_get_buffer (GskVulkanBuffer *self);
|
||||
gsize gsk_vulkan_buffer_get_size (GskVulkanBuffer *self);
|
||||
guchar * gsk_vulkan_buffer_get_data (GskVulkanBuffer *self);
|
||||
|
||||
G_END_DECLS
|
||||
|
210
gsk/gpu/gskvulkandevice.c
Normal file
210
gsk/gpu/gskvulkandevice.c
Normal file
@ -0,0 +1,210 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
|
||||
#include "gskvulkanimageprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkvulkancontextprivate.h"
|
||||
|
||||
struct _GskVulkanDevice
|
||||
{
|
||||
GskGpuDevice parent_instance;
|
||||
|
||||
GskVulkanAllocator *allocators[VK_MAX_MEMORY_TYPES];
|
||||
|
||||
VkCommandPool vk_command_pool;
|
||||
};
|
||||
|
||||
struct _GskVulkanDeviceClass
|
||||
{
|
||||
GskGpuDeviceClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskVulkanDevice, gsk_vulkan_device, GSK_TYPE_GPU_DEVICE)
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_vulkan_device_create_offscreen_image (GskGpuDevice *device,
|
||||
GdkMemoryDepth depth,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanDevice *self = GSK_VULKAN_DEVICE (device);
|
||||
|
||||
return gsk_vulkan_image_new_for_offscreen (self,
|
||||
GDK_MEMORY_R8G8B8A8_PREMULTIPLIED,
|
||||
width,
|
||||
height);
|
||||
}
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_vulkan_device_create_upload_image (GskGpuDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanDevice *self = GSK_VULKAN_DEVICE (device);
|
||||
|
||||
return gsk_vulkan_image_new_for_upload (self,
|
||||
format,
|
||||
width,
|
||||
height);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_device_finalize (GObject *object)
|
||||
{
|
||||
GskVulkanDevice *self = GSK_VULKAN_DEVICE (object);
|
||||
GskGpuDevice *device = GSK_GPU_DEVICE (self);
|
||||
GdkDisplay *display;
|
||||
gsize i;
|
||||
|
||||
g_object_steal_data (G_OBJECT (gsk_gpu_device_get_display (device)), "-gsk-vulkan-device");
|
||||
|
||||
display = gsk_gpu_device_get_display (device);
|
||||
|
||||
vkDestroyCommandPool (display->vk_device,
|
||||
self->vk_command_pool,
|
||||
NULL);
|
||||
|
||||
for (i = 0; i < VK_MAX_MEMORY_TYPES; i++)
|
||||
g_clear_pointer (&self->allocators[i], gsk_vulkan_allocator_unref);
|
||||
|
||||
gdk_display_unref_vulkan (display);
|
||||
|
||||
G_OBJECT_CLASS (gsk_vulkan_device_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_device_class_init (GskVulkanDeviceClass *klass)
|
||||
{
|
||||
GskGpuDeviceClass *gpu_device_class = GSK_GPU_DEVICE_CLASS (klass);
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
gpu_device_class->create_offscreen_image = gsk_vulkan_device_create_offscreen_image;
|
||||
gpu_device_class->create_upload_image = gsk_vulkan_device_create_upload_image;
|
||||
|
||||
object_class->finalize = gsk_vulkan_device_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_device_init (GskVulkanDevice *self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_device_setup (GskVulkanDevice *self)
|
||||
{
|
||||
GdkDisplay *display;
|
||||
|
||||
display = gsk_gpu_device_get_display (GSK_GPU_DEVICE (self));
|
||||
|
||||
GSK_VK_CHECK (vkCreateCommandPool, display->vk_device,
|
||||
&(const VkCommandPoolCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
|
||||
.queueFamilyIndex = display->vk_queue_family_index,
|
||||
.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
|
||||
},
|
||||
NULL,
|
||||
&self->vk_command_pool);
|
||||
}
|
||||
|
||||
GskGpuDevice *
|
||||
gsk_vulkan_device_get_for_display (GdkDisplay *display,
|
||||
GError **error)
|
||||
{
|
||||
GskVulkanDevice *self;
|
||||
|
||||
self = g_object_get_data (G_OBJECT (display), "-gsk-vulkan-device");
|
||||
if (self)
|
||||
return GSK_GPU_DEVICE (g_object_ref (self));
|
||||
|
||||
if (!gdk_display_init_vulkan (display, error))
|
||||
return NULL;
|
||||
|
||||
self = g_object_new (GSK_TYPE_VULKAN_DEVICE, NULL);
|
||||
|
||||
gsk_gpu_device_setup (GSK_GPU_DEVICE (self), display);
|
||||
gsk_vulkan_device_setup (self);
|
||||
|
||||
g_object_set_data (G_OBJECT (display), "-gsk-vulkan-device", self);
|
||||
|
||||
return GSK_GPU_DEVICE (self);
|
||||
}
|
||||
|
||||
VkDevice
|
||||
gsk_vulkan_device_get_vk_device (GskVulkanDevice *self)
|
||||
{
|
||||
return gsk_gpu_device_get_display (GSK_GPU_DEVICE (self))->vk_device;
|
||||
}
|
||||
|
||||
VkPhysicalDevice
|
||||
gsk_vulkan_device_get_vk_physical_device (GskVulkanDevice *self)
|
||||
{
|
||||
return gsk_gpu_device_get_display (GSK_GPU_DEVICE (self))->vk_physical_device;
|
||||
}
|
||||
|
||||
VkQueue
|
||||
gsk_vulkan_device_get_vk_queue (GskVulkanDevice *self)
|
||||
{
|
||||
return gsk_gpu_device_get_display (GSK_GPU_DEVICE (self))->vk_queue;
|
||||
}
|
||||
|
||||
VkCommandPool
|
||||
gsk_vulkan_device_get_vk_command_pool (GskVulkanDevice *self)
|
||||
{
|
||||
return self->vk_command_pool;
|
||||
}
|
||||
|
||||
static GskVulkanAllocator *
|
||||
gsk_vulkan_device_get_allocator (GskVulkanDevice *self,
|
||||
gsize index,
|
||||
const VkMemoryType *type)
|
||||
{
|
||||
if (self->allocators[index] == NULL)
|
||||
{
|
||||
self->allocators[index] = gsk_vulkan_direct_allocator_new (gsk_vulkan_device_get_vk_device (self),
|
||||
index,
|
||||
type);
|
||||
self->allocators[index] = gsk_vulkan_buddy_allocator_new (self->allocators[index],
|
||||
1024 * 1024);
|
||||
//allocators[index] = gsk_vulkan_stats_allocator_new (allocators[index]);
|
||||
}
|
||||
|
||||
return self->allocators[index];
|
||||
}
|
||||
|
||||
/* following code found in
|
||||
* https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties.html */
|
||||
GskVulkanAllocator *
|
||||
gsk_vulkan_device_find_allocator (GskVulkanDevice *self,
|
||||
uint32_t allowed_types,
|
||||
VkMemoryPropertyFlags required_flags,
|
||||
VkMemoryPropertyFlags desired_flags)
|
||||
{
|
||||
VkPhysicalDeviceMemoryProperties properties;
|
||||
uint32_t i, found;
|
||||
|
||||
vkGetPhysicalDeviceMemoryProperties (gsk_vulkan_device_get_vk_physical_device (self),
|
||||
&properties);
|
||||
|
||||
found = properties.memoryTypeCount;
|
||||
for (i = 0; i < properties.memoryTypeCount; i++)
|
||||
{
|
||||
if (!(allowed_types & (1 << i)))
|
||||
continue;
|
||||
|
||||
if ((properties.memoryTypes[i].propertyFlags & required_flags) != required_flags)
|
||||
continue;
|
||||
|
||||
found = MIN (i, found);
|
||||
|
||||
if ((properties.memoryTypes[i].propertyFlags & desired_flags) == desired_flags)
|
||||
break;
|
||||
}
|
||||
|
||||
g_assert (found < properties.memoryTypeCount);
|
||||
|
||||
return gsk_vulkan_allocator_ref (gsk_vulkan_device_get_allocator (self, i, &properties.memoryTypes[i]));
|
||||
}
|
||||
|
42
gsk/gpu/gskvulkandeviceprivate.h
Normal file
42
gsk/gpu/gskvulkandeviceprivate.h
Normal file
@ -0,0 +1,42 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpudeviceprivate.h"
|
||||
|
||||
#include "gskdebugprivate.h"
|
||||
#include "gskvulkanmemoryprivate.h"
|
||||
|
||||
#include <gdk/gdkvulkancontext.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_VULKAN_DEVICE (gsk_vulkan_device_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE(GskVulkanDevice, gsk_vulkan_device, GSK, VULKAN_DEVICE, GskGpuDevice)
|
||||
|
||||
GskGpuDevice * gsk_vulkan_device_get_for_display (GdkDisplay *display,
|
||||
GError **error);
|
||||
|
||||
VkDevice gsk_vulkan_device_get_vk_device (GskVulkanDevice *self);
|
||||
VkPhysicalDevice gsk_vulkan_device_get_vk_physical_device (GskVulkanDevice *self);
|
||||
VkQueue gsk_vulkan_device_get_vk_queue (GskVulkanDevice *self);
|
||||
VkCommandPool gsk_vulkan_device_get_vk_command_pool (GskVulkanDevice *self);
|
||||
|
||||
GskVulkanAllocator * gsk_vulkan_device_find_allocator (GskVulkanDevice *self,
|
||||
uint32_t allowed_types,
|
||||
VkMemoryPropertyFlags required_flags,
|
||||
VkMemoryPropertyFlags desired_flags);
|
||||
static inline VkResult
|
||||
gsk_vulkan_handle_result (VkResult res,
|
||||
const char *called_function)
|
||||
{
|
||||
if (res != VK_SUCCESS)
|
||||
{
|
||||
GSK_DEBUG (VULKAN, "%s(): %s (%d)", called_function, gdk_vulkan_strerror (res), res);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
#define GSK_VK_CHECK(func, ...) gsk_vulkan_handle_result (func (__VA_ARGS__), G_STRINGIFY (func))
|
||||
|
||||
|
||||
G_END_DECLS
|
164
gsk/gpu/gskvulkanframe.c
Normal file
164
gsk/gpu/gskvulkanframe.c
Normal file
@ -0,0 +1,164 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskvulkanframeprivate.h"
|
||||
|
||||
#include "gskgpuopprivate.h"
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
|
||||
struct _GskVulkanFrame
|
||||
{
|
||||
GskGpuFrame parent_instance;
|
||||
|
||||
VkFence vk_fence;
|
||||
VkCommandBuffer vk_command_buffer;
|
||||
};
|
||||
|
||||
struct _GskVulkanFrameClass
|
||||
{
|
||||
GskGpuFrameClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskVulkanFrame, gsk_vulkan_frame, GSK_TYPE_GPU_FRAME)
|
||||
|
||||
static gboolean
|
||||
gsk_vulkan_frame_is_busy (GskGpuFrame *frame)
|
||||
{
|
||||
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
||||
VkDevice device;
|
||||
|
||||
device = gsk_vulkan_device_get_vk_device (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
|
||||
|
||||
return vkGetFenceStatus (device, self->vk_fence) == VK_NOT_READY;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_setup (GskGpuFrame *frame)
|
||||
{
|
||||
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
||||
GskVulkanDevice *device;
|
||||
VkDevice vk_device;
|
||||
VkCommandPool vk_command_pool;
|
||||
|
||||
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame));
|
||||
vk_device = gsk_vulkan_device_get_vk_device (device);
|
||||
vk_command_pool = gsk_vulkan_device_get_vk_command_pool (device);
|
||||
|
||||
GSK_VK_CHECK (vkAllocateCommandBuffers, vk_device,
|
||||
&(VkCommandBufferAllocateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
|
||||
.commandPool = vk_command_pool,
|
||||
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
|
||||
.commandBufferCount = 1,
|
||||
},
|
||||
&self->vk_command_buffer);
|
||||
|
||||
GSK_VK_CHECK (vkCreateFence, vk_device,
|
||||
&(VkFenceCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
|
||||
.flags = VK_FENCE_CREATE_SIGNALED_BIT
|
||||
},
|
||||
NULL,
|
||||
&self->vk_fence);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_cleanup (GskGpuFrame *frame)
|
||||
{
|
||||
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
||||
VkDevice device;
|
||||
|
||||
device = gsk_vulkan_device_get_vk_device (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
|
||||
|
||||
GSK_VK_CHECK (vkWaitForFences, device,
|
||||
1,
|
||||
&self->vk_fence,
|
||||
VK_TRUE,
|
||||
INT64_MAX);
|
||||
|
||||
GSK_VK_CHECK (vkResetFences, device,
|
||||
1,
|
||||
&self->vk_fence);
|
||||
|
||||
GSK_VK_CHECK (vkResetCommandBuffer, self->vk_command_buffer,
|
||||
0);
|
||||
|
||||
GSK_GPU_FRAME_CLASS (gsk_vulkan_frame_parent_class)->cleanup (frame);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_submit (GskGpuFrame *frame,
|
||||
GskGpuOp *op)
|
||||
{
|
||||
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
||||
|
||||
GSK_VK_CHECK (vkBeginCommandBuffer, self->vk_command_buffer,
|
||||
&(VkCommandBufferBeginInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
||||
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
||||
});
|
||||
|
||||
while (op)
|
||||
{
|
||||
op = gsk_gpu_op_vk_command (op, frame, VK_NULL_HANDLE, self->vk_command_buffer);
|
||||
}
|
||||
|
||||
GSK_VK_CHECK (vkEndCommandBuffer, self->vk_command_buffer);
|
||||
|
||||
GSK_VK_CHECK (vkQueueSubmit, gsk_vulkan_device_get_vk_queue (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame))),
|
||||
1,
|
||||
&(VkSubmitInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
|
||||
.commandBufferCount = 1,
|
||||
.pCommandBuffers = &self->vk_command_buffer,
|
||||
},
|
||||
self->vk_fence);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_finalize (GObject *object)
|
||||
{
|
||||
GskVulkanFrame *self = GSK_VULKAN_FRAME (object);
|
||||
GskVulkanDevice *device;
|
||||
VkDevice vk_device;
|
||||
VkCommandPool vk_command_pool;
|
||||
|
||||
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (GSK_GPU_FRAME (self)));
|
||||
vk_device = gsk_vulkan_device_get_vk_device (device);
|
||||
vk_command_pool = gsk_vulkan_device_get_vk_command_pool (device);
|
||||
|
||||
vkFreeCommandBuffers (vk_device,
|
||||
vk_command_pool,
|
||||
1, &self->vk_command_buffer);
|
||||
vkDestroyFence (vk_device,
|
||||
self->vk_fence,
|
||||
NULL);
|
||||
|
||||
G_OBJECT_CLASS (gsk_vulkan_frame_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_class_init (GskVulkanFrameClass *klass)
|
||||
{
|
||||
GskGpuFrameClass *gpu_frame_class = GSK_GPU_FRAME_CLASS (klass);
|
||||
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
||||
|
||||
gpu_frame_class->is_busy = gsk_vulkan_frame_is_busy;
|
||||
gpu_frame_class->setup = gsk_vulkan_frame_setup;
|
||||
gpu_frame_class->cleanup = gsk_vulkan_frame_cleanup;
|
||||
gpu_frame_class->submit = gsk_vulkan_frame_submit;
|
||||
|
||||
object_class->finalize = gsk_vulkan_frame_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_frame_init (GskVulkanFrame *self)
|
||||
{
|
||||
}
|
||||
|
||||
VkFence
|
||||
gsk_vulkan_frame_get_vk_fence (GskVulkanFrame *self)
|
||||
{
|
||||
return self->vk_fence;
|
||||
}
|
13
gsk/gpu/gskvulkanframeprivate.h
Normal file
13
gsk/gpu/gskvulkanframeprivate.h
Normal file
@ -0,0 +1,13 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuframeprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_VULKAN_FRAME (gsk_vulkan_frame_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE (GskVulkanFrame, gsk_vulkan_frame, GSK, VULKAN_FRAME, GskGpuFrame)
|
||||
|
||||
VkFence gsk_vulkan_frame_get_vk_fence (GskVulkanFrame *self) G_GNUC_PURE;
|
||||
|
||||
G_END_DECLS
|
943
gsk/gpu/gskvulkanimage.c
Normal file
943
gsk/gpu/gskvulkanimage.c
Normal file
@ -0,0 +1,943 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskvulkanimageprivate.h"
|
||||
|
||||
#include "gskvulkanbufferprivate.h"
|
||||
#include "gskvulkanmemoryprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
#include "gdk/gdkvulkancontextprivate.h"
|
||||
#include "gdk/gdkmemoryformatprivate.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
struct _GskVulkanImage
|
||||
{
|
||||
GskGpuImage parent_instance;
|
||||
|
||||
GdkDisplay *display;
|
||||
|
||||
VkFormat vk_format;
|
||||
VkImageTiling vk_tiling;
|
||||
VkImageUsageFlags vk_usage;
|
||||
VkImage vk_image;
|
||||
VkImageView vk_image_view;
|
||||
VkFramebuffer vk_framebuffer;
|
||||
GskVulkanImagePostprocess postprocess;
|
||||
|
||||
VkPipelineStageFlags vk_pipeline_stage;
|
||||
VkImageLayout vk_image_layout;
|
||||
VkAccessFlags vk_access;
|
||||
|
||||
GskVulkanAllocator *allocator;
|
||||
GskVulkanAllocation allocation;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskVulkanImage, gsk_vulkan_image, GSK_TYPE_GPU_IMAGE)
|
||||
|
||||
typedef struct _GskMemoryFormatInfo GskMemoryFormatInfo;
|
||||
|
||||
struct _GskMemoryFormatInfo
|
||||
{
|
||||
VkFormat format;
|
||||
VkComponentMapping components;
|
||||
GskVulkanImagePostprocess postprocess;
|
||||
};
|
||||
|
||||
static const GskMemoryFormatInfo *
|
||||
gsk_memory_format_get_vk_format_infos (GdkMemoryFormat format)
|
||||
{
|
||||
#define SWIZZLE(a, b, c, d) { VK_COMPONENT_SWIZZLE_ ## a, VK_COMPONENT_SWIZZLE_ ## b, VK_COMPONENT_SWIZZLE_ ## c, VK_COMPONENT_SWIZZLE_ ## d }
|
||||
#define DEFAULT_SWIZZLE SWIZZLE (R, G, B, A)
|
||||
switch (format)
|
||||
{
|
||||
case GDK_MEMORY_A8B8G8R8_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(A, B, G, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
case GDK_MEMORY_B8G8R8A8_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_B8G8R8A8_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(B, G, R, A), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A8R8G8B8_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(G, B, A, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R8G8B8A8_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_B8G8R8A8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_B8G8R8A8_UNORM, DEFAULT_SWIZZLE, GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(B, G, R, A), GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A8R8G8B8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(G, B, A, R), GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R8G8B8A8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, DEFAULT_SWIZZLE, GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A8B8G8R8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(A, B, G, R), GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_X8B8G8R8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(A, B, G, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
case GDK_MEMORY_B8G8R8X8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_B8G8R8A8_UNORM, SWIZZLE(R, G, B, ONE), 0 },
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(B, G, R, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_X8R8G8B8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(G, B, A, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R8G8B8X8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8A8_UNORM, SWIZZLE(R, G, B, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R8G8B8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8B8_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_B8G8R8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_B8G8R8_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_R8G8B8_UNORM, SWIZZLE(B, G, R, A), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16A16_UNORM, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16A16_UNORM, DEFAULT_SWIZZLE, GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16_SFLOAT, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16_FLOAT_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16A16_SFLOAT, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16B16A16_SFLOAT, DEFAULT_SWIZZLE, GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R32G32B32_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R32G32B32_SFLOAT, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R32G32B32A32_SFLOAT, DEFAULT_SWIZZLE, 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_R32G32B32A32_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R32G32B32A32_SFLOAT, DEFAULT_SWIZZLE, GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G8A8_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8_UNORM, SWIZZLE (R, R, R, G), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G8A8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8G8_UNORM, SWIZZLE (R, R, R, G), GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8_UNORM, SWIZZLE (R, R, R, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G16A16_PREMULTIPLIED:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16_UNORM, SWIZZLE (R, R, R, G), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G16A16:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16G16_UNORM, SWIZZLE (R, R, R, G), GSK_VULKAN_IMAGE_PREMULTIPLY },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_G16:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16_UNORM, SWIZZLE (R, R, R, ONE), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A8:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R8_UNORM, SWIZZLE (R, R, R, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A16:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16_UNORM, SWIZZLE (R, R, R, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A16_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R16_SFLOAT, SWIZZLE (R, R, R, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_A32_FLOAT:
|
||||
{
|
||||
static const GskMemoryFormatInfo info[] = {
|
||||
{ VK_FORMAT_R32_SFLOAT, SWIZZLE (R, R, R, R), 0 },
|
||||
{ VK_FORMAT_UNDEFINED }
|
||||
};
|
||||
return info;
|
||||
}
|
||||
|
||||
case GDK_MEMORY_N_FORMATS:
|
||||
default:
|
||||
g_assert_not_reached ();
|
||||
return NULL;
|
||||
}
|
||||
#undef DEFAULT_SWIZZLE
|
||||
#undef SWIZZLE
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gsk_memory_format_info_is_framebuffer_compatible (const GskMemoryFormatInfo *format)
|
||||
{
|
||||
if (format->postprocess)
|
||||
return FALSE;
|
||||
|
||||
if (format->components.r != VK_COMPONENT_SWIZZLE_R ||
|
||||
format->components.g != VK_COMPONENT_SWIZZLE_G ||
|
||||
format->components.b != VK_COMPONENT_SWIZZLE_B ||
|
||||
format->components.a != VK_COMPONENT_SWIZZLE_A)
|
||||
return FALSE;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static GdkMemoryFormat
|
||||
gsk_memory_format_get_fallback (GdkMemoryFormat format)
|
||||
{
|
||||
switch (format)
|
||||
{
|
||||
case GDK_MEMORY_A8B8G8R8_PREMULTIPLIED:
|
||||
case GDK_MEMORY_B8G8R8A8_PREMULTIPLIED:
|
||||
case GDK_MEMORY_A8R8G8B8_PREMULTIPLIED:
|
||||
case GDK_MEMORY_R8G8B8A8_PREMULTIPLIED:
|
||||
case GDK_MEMORY_B8G8R8A8:
|
||||
case GDK_MEMORY_A8R8G8B8:
|
||||
case GDK_MEMORY_R8G8B8A8:
|
||||
case GDK_MEMORY_A8B8G8R8:
|
||||
case GDK_MEMORY_R8G8B8:
|
||||
return GDK_MEMORY_R8G8B8A8_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_B8G8R8X8:
|
||||
case GDK_MEMORY_X8R8G8B8:
|
||||
case GDK_MEMORY_X8B8G8R8:
|
||||
case GDK_MEMORY_R8G8B8X8:
|
||||
case GDK_MEMORY_B8G8R8:
|
||||
return GDK_MEMORY_R8G8B8;
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16_PREMULTIPLIED:
|
||||
return GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_R16G16B16:
|
||||
case GDK_MEMORY_R16G16B16A16:
|
||||
return GDK_MEMORY_R16G16B16A16_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_R16G16B16A16_FLOAT_PREMULTIPLIED:
|
||||
return GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_R16G16B16_FLOAT:
|
||||
case GDK_MEMORY_R16G16B16A16_FLOAT:
|
||||
return GDK_MEMORY_R16G16B16A16_FLOAT_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED:
|
||||
return GDK_MEMORY_R8G8B8A8_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_R32G32B32_FLOAT:
|
||||
case GDK_MEMORY_R32G32B32A32_FLOAT:
|
||||
return GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_G8A8_PREMULTIPLIED:
|
||||
case GDK_MEMORY_G8A8:
|
||||
return GDK_MEMORY_R8G8B8A8_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_G8:
|
||||
return GDK_MEMORY_R8G8B8;
|
||||
|
||||
case GDK_MEMORY_G16A16_PREMULTIPLIED:
|
||||
case GDK_MEMORY_G16A16:
|
||||
return GDK_MEMORY_R16G16B16A16_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_G16:
|
||||
return GDK_MEMORY_R16G16B16;
|
||||
|
||||
case GDK_MEMORY_A8:
|
||||
return GDK_MEMORY_R8G8B8A8_PREMULTIPLIED;
|
||||
case GDK_MEMORY_A16:
|
||||
return GDK_MEMORY_R16G16B16A16_PREMULTIPLIED;
|
||||
case GDK_MEMORY_A16_FLOAT:
|
||||
return GDK_MEMORY_R16G16B16A16_FLOAT_PREMULTIPLIED;
|
||||
case GDK_MEMORY_A32_FLOAT:
|
||||
return GDK_MEMORY_R32G32B32A32_FLOAT_PREMULTIPLIED;
|
||||
|
||||
case GDK_MEMORY_N_FORMATS:
|
||||
default:
|
||||
return GDK_MEMORY_R8G8B8A8_PREMULTIPLIED;
|
||||
}
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gsk_vulkan_device_supports_format (GskVulkanDevice *device,
|
||||
VkFormat format,
|
||||
VkImageTiling tiling,
|
||||
VkImageUsageFlags usage,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
VkPhysicalDevice vk_phys_device;
|
||||
VkFormatProperties properties;
|
||||
VkImageFormatProperties image_properties;
|
||||
VkFormatFeatureFlags features, required;
|
||||
VkResult res;
|
||||
|
||||
vk_phys_device = gsk_vulkan_device_get_vk_physical_device (device);
|
||||
|
||||
vkGetPhysicalDeviceFormatProperties (vk_phys_device,
|
||||
format,
|
||||
&properties);
|
||||
|
||||
switch ((int) tiling)
|
||||
{
|
||||
case VK_IMAGE_TILING_OPTIMAL:
|
||||
features = properties.optimalTilingFeatures;
|
||||
break;
|
||||
case VK_IMAGE_TILING_LINEAR:
|
||||
features = properties.optimalTilingFeatures;
|
||||
break;
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
required = 0;
|
||||
if (usage & VK_IMAGE_USAGE_SAMPLED_BIT)
|
||||
required |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
|
||||
if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
|
||||
required |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT;
|
||||
|
||||
if ((features & required) != required)
|
||||
return FALSE;
|
||||
|
||||
res = vkGetPhysicalDeviceImageFormatProperties (vk_phys_device,
|
||||
format,
|
||||
VK_IMAGE_TYPE_2D,
|
||||
tiling,
|
||||
usage,
|
||||
0,
|
||||
&image_properties);
|
||||
if (res != VK_SUCCESS)
|
||||
return FALSE;
|
||||
|
||||
if (image_properties.maxExtent.width < width ||
|
||||
image_properties.maxExtent.height < height)
|
||||
return FALSE;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_image_create_view (GskVulkanImage *self,
|
||||
const GskMemoryFormatInfo *format)
|
||||
{
|
||||
GSK_VK_CHECK (vkCreateImageView, self->display->vk_device,
|
||||
&(VkImageViewCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
|
||||
.image = self->vk_image,
|
||||
.viewType = VK_IMAGE_VIEW_TYPE_2D,
|
||||
.format = format->format,
|
||||
.components = format->components,
|
||||
.subresourceRange = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = VK_REMAINING_MIP_LEVELS,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
},
|
||||
NULL,
|
||||
&self->vk_image_view);
|
||||
}
|
||||
|
||||
static GskVulkanImage *
|
||||
gsk_vulkan_image_new (GskVulkanDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height,
|
||||
GskVulkanImagePostprocess allowed_postprocess,
|
||||
VkImageTiling tiling,
|
||||
VkImageUsageFlags usage,
|
||||
VkPipelineStageFlags stage,
|
||||
VkImageLayout layout,
|
||||
VkAccessFlags access,
|
||||
VkMemoryPropertyFlags memory)
|
||||
{
|
||||
VkMemoryRequirements requirements;
|
||||
GskVulkanImage *self;
|
||||
VkDevice vk_device;
|
||||
const GskMemoryFormatInfo *vk_format;
|
||||
|
||||
g_assert (width > 0 && height > 0);
|
||||
|
||||
while (TRUE)
|
||||
{
|
||||
for (vk_format = gsk_memory_format_get_vk_format_infos (format);
|
||||
vk_format->format != VK_FORMAT_UNDEFINED;
|
||||
vk_format++)
|
||||
{
|
||||
if (vk_format->postprocess & ~allowed_postprocess)
|
||||
continue;
|
||||
|
||||
if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT &&
|
||||
!gsk_memory_format_info_is_framebuffer_compatible (vk_format))
|
||||
continue;
|
||||
|
||||
if (gsk_vulkan_device_supports_format (device,
|
||||
vk_format->format,
|
||||
tiling, usage,
|
||||
width, height))
|
||||
break;
|
||||
|
||||
if (tiling != VK_IMAGE_TILING_OPTIMAL &&
|
||||
gsk_vulkan_device_supports_format (device,
|
||||
vk_format->format,
|
||||
VK_IMAGE_TILING_OPTIMAL, usage,
|
||||
width, height))
|
||||
{
|
||||
tiling = VK_IMAGE_TILING_OPTIMAL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (vk_format->format != VK_FORMAT_UNDEFINED)
|
||||
break;
|
||||
|
||||
format = gsk_memory_format_get_fallback (format);
|
||||
}
|
||||
|
||||
vk_device = gsk_vulkan_device_get_vk_device (device);
|
||||
|
||||
self = g_object_new (GSK_TYPE_VULKAN_IMAGE, NULL);
|
||||
|
||||
self->display = g_object_ref (gsk_gpu_device_get_display (GSK_GPU_DEVICE (device)));
|
||||
gdk_display_ref_vulkan (self->display);
|
||||
self->vk_format = vk_format->format;
|
||||
self->postprocess = vk_format->postprocess;
|
||||
self->vk_tiling = tiling;
|
||||
self->vk_usage = usage;
|
||||
self->vk_pipeline_stage = stage;
|
||||
self->vk_image_layout = layout;
|
||||
self->vk_access = access;
|
||||
|
||||
gsk_gpu_image_setup (GSK_GPU_IMAGE (self), format, width, height);
|
||||
|
||||
GSK_VK_CHECK (vkCreateImage, vk_device,
|
||||
&(VkImageCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
|
||||
.flags = 0,
|
||||
.imageType = VK_IMAGE_TYPE_2D,
|
||||
.format = vk_format->format,
|
||||
.extent = { width, height, 1 },
|
||||
.mipLevels = 1,
|
||||
.arrayLayers = 1,
|
||||
.samples = VK_SAMPLE_COUNT_1_BIT,
|
||||
.tiling = tiling,
|
||||
.usage = usage,
|
||||
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
||||
.initialLayout = self->vk_image_layout,
|
||||
},
|
||||
NULL,
|
||||
&self->vk_image);
|
||||
|
||||
vkGetImageMemoryRequirements (vk_device,
|
||||
self->vk_image,
|
||||
&requirements);
|
||||
|
||||
self->allocator = gsk_vulkan_device_find_allocator (device,
|
||||
requirements.memoryTypeBits,
|
||||
0,
|
||||
tiling == VK_IMAGE_TILING_LINEAR ? GSK_VULKAN_MEMORY_MAPPABLE : 0);
|
||||
gsk_vulkan_alloc (self->allocator,
|
||||
requirements.size,
|
||||
requirements.alignment,
|
||||
&self->allocation);
|
||||
|
||||
GSK_VK_CHECK (vkBindImageMemory, vk_device,
|
||||
self->vk_image,
|
||||
self->allocation.vk_memory,
|
||||
self->allocation.offset);
|
||||
|
||||
gsk_vulkan_image_create_view (self, vk_format);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_vulkan_image_new_for_upload (GskVulkanDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanImage *self;
|
||||
|
||||
self = gsk_vulkan_image_new (device,
|
||||
format,
|
||||
width,
|
||||
height,
|
||||
-1,
|
||||
VK_IMAGE_TILING_LINEAR,
|
||||
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
|
||||
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
|
||||
VK_IMAGE_USAGE_SAMPLED_BIT,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||
VK_IMAGE_LAYOUT_PREINITIALIZED,
|
||||
VK_ACCESS_TRANSFER_WRITE_BIT,
|
||||
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gsk_vulkan_image_can_map (GskVulkanImage *self)
|
||||
{
|
||||
if (GSK_DEBUG_CHECK (STAGING))
|
||||
return FALSE;
|
||||
|
||||
if (self->vk_tiling != VK_IMAGE_TILING_LINEAR)
|
||||
return FALSE;
|
||||
|
||||
if (self->vk_image_layout != VK_IMAGE_LAYOUT_PREINITIALIZED &&
|
||||
self->vk_image_layout != VK_IMAGE_LAYOUT_GENERAL)
|
||||
return FALSE;
|
||||
|
||||
return self->allocation.map != NULL;
|
||||
}
|
||||
|
||||
guchar *
|
||||
gsk_vulkan_image_get_data (GskVulkanImage *self,
|
||||
gsize *out_stride)
|
||||
{
|
||||
VkImageSubresource image_res;
|
||||
VkSubresourceLayout image_layout;
|
||||
|
||||
if (!gsk_vulkan_image_can_map (self))
|
||||
return NULL;
|
||||
|
||||
image_res.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
image_res.mipLevel = 0;
|
||||
image_res.arrayLayer = 0;
|
||||
|
||||
vkGetImageSubresourceLayout (self->display->vk_device,
|
||||
self->vk_image, &image_res, &image_layout);
|
||||
|
||||
*out_stride = image_layout.rowPitch;
|
||||
|
||||
return self->allocation.map + image_layout.offset;
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_vulkan_image_new_for_swapchain (GskVulkanDevice *device,
|
||||
VkImage image,
|
||||
VkFormat format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanImage *self;
|
||||
|
||||
self = g_object_new (GSK_TYPE_VULKAN_IMAGE, NULL);
|
||||
|
||||
self->display = g_object_ref (gsk_gpu_device_get_display (GSK_GPU_DEVICE (device)));
|
||||
gdk_display_ref_vulkan (self->display);
|
||||
self->vk_tiling = VK_IMAGE_TILING_OPTIMAL;
|
||||
self->vk_image = image;
|
||||
self->vk_format = format;
|
||||
self->vk_pipeline_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
||||
self->vk_image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
self->vk_access = 0;
|
||||
|
||||
gsk_gpu_image_setup (GSK_GPU_IMAGE (self), GDK_MEMORY_DEFAULT, width, height);
|
||||
|
||||
gsk_vulkan_image_create_view (self,
|
||||
&(GskMemoryFormatInfo) {
|
||||
format,
|
||||
{ VK_COMPONENT_SWIZZLE_R,
|
||||
VK_COMPONENT_SWIZZLE_G,
|
||||
VK_COMPONENT_SWIZZLE_B,
|
||||
VK_COMPONENT_SWIZZLE_A
|
||||
}
|
||||
});
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_vulkan_image_new_for_atlas (GskVulkanDevice *device,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanImage *self;
|
||||
|
||||
self = gsk_vulkan_image_new (device,
|
||||
GDK_MEMORY_DEFAULT,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
VK_IMAGE_TILING_OPTIMAL,
|
||||
VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
|
||||
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
||||
VK_IMAGE_LAYOUT_UNDEFINED,
|
||||
0,
|
||||
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
GskGpuImage *
|
||||
gsk_vulkan_image_new_for_offscreen (GskVulkanDevice *device,
|
||||
GdkMemoryFormat preferred_format,
|
||||
gsize width,
|
||||
gsize height)
|
||||
{
|
||||
GskVulkanImage *self;
|
||||
|
||||
self = gsk_vulkan_image_new (device,
|
||||
preferred_format,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
VK_IMAGE_TILING_OPTIMAL,
|
||||
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
|
||||
VK_IMAGE_USAGE_SAMPLED_BIT |
|
||||
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
|
||||
VK_IMAGE_USAGE_TRANSFER_DST_BIT,
|
||||
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
|
||||
VK_IMAGE_LAYOUT_UNDEFINED,
|
||||
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
|
||||
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
||||
|
||||
return GSK_GPU_IMAGE (self);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_image_finalize (GObject *object)
|
||||
{
|
||||
GskVulkanImage *self = GSK_VULKAN_IMAGE (object);
|
||||
VkDevice device;
|
||||
|
||||
device = self->display->vk_device;
|
||||
|
||||
if (self->vk_framebuffer != VK_NULL_HANDLE)
|
||||
vkDestroyFramebuffer (device, self->vk_framebuffer, NULL);
|
||||
|
||||
if (self->vk_image_view != VK_NULL_HANDLE)
|
||||
vkDestroyImageView (device, self->vk_image_view, NULL);
|
||||
|
||||
/* memory is NULL for for_swapchain() images, where we don't own
|
||||
* the VkImage */
|
||||
if (self->allocator)
|
||||
{
|
||||
vkDestroyImage (device, self->vk_image, NULL);
|
||||
gsk_vulkan_free (self->allocator, &self->allocation);
|
||||
}
|
||||
|
||||
gdk_display_unref_vulkan (self->display);
|
||||
g_object_unref (self->display);
|
||||
|
||||
G_OBJECT_CLASS (gsk_vulkan_image_parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_image_class_init (GskVulkanImageClass *klass)
|
||||
{
|
||||
G_OBJECT_CLASS (klass)->finalize = gsk_vulkan_image_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_image_init (GskVulkanImage *self)
|
||||
{
|
||||
}
|
||||
|
||||
VkFramebuffer
|
||||
gsk_vulkan_image_get_framebuffer (GskVulkanImage *self,
|
||||
VkRenderPass render_pass)
|
||||
{
|
||||
if (self->vk_framebuffer)
|
||||
return self->vk_framebuffer;
|
||||
|
||||
GSK_VK_CHECK (vkCreateFramebuffer, self->display->vk_device,
|
||||
&(VkFramebufferCreateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
|
||||
.renderPass = render_pass,
|
||||
.attachmentCount = 1,
|
||||
.pAttachments = (VkImageView[1]) {
|
||||
self->vk_image_view,
|
||||
},
|
||||
.width = gsk_gpu_image_get_width (GSK_GPU_IMAGE (self)),
|
||||
.height = gsk_gpu_image_get_height (GSK_GPU_IMAGE (self)),
|
||||
.layers = 1
|
||||
},
|
||||
NULL,
|
||||
&self->vk_framebuffer);
|
||||
|
||||
return self->vk_framebuffer;
|
||||
}
|
||||
|
||||
GskVulkanImagePostprocess
|
||||
gsk_vulkan_image_get_postprocess (GskVulkanImage *self)
|
||||
{
|
||||
return self->postprocess;
|
||||
}
|
||||
|
||||
VkImage
|
||||
gsk_vulkan_image_get_vk_image (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_image;
|
||||
}
|
||||
|
||||
VkImageView
|
||||
gsk_vulkan_image_get_image_view (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_image_view;
|
||||
}
|
||||
|
||||
VkPipelineStageFlags
|
||||
gsk_vulkan_image_get_vk_pipeline_stage (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_pipeline_stage;
|
||||
}
|
||||
|
||||
VkImageLayout
|
||||
gsk_vulkan_image_get_vk_image_layout (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_image_layout;
|
||||
}
|
||||
|
||||
VkAccessFlags
|
||||
gsk_vulkan_image_get_vk_access (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_access;
|
||||
}
|
||||
|
||||
void
|
||||
gsk_vulkan_image_set_vk_image_layout (GskVulkanImage *self,
|
||||
VkPipelineStageFlags stage,
|
||||
VkImageLayout image_layout,
|
||||
VkAccessFlags access)
|
||||
{
|
||||
self->vk_pipeline_stage = stage;
|
||||
self->vk_image_layout = image_layout;
|
||||
self->vk_access = access;
|
||||
}
|
||||
|
||||
void
|
||||
gsk_vulkan_image_transition (GskVulkanImage *self,
|
||||
VkCommandBuffer command_buffer,
|
||||
VkPipelineStageFlags stage,
|
||||
VkImageLayout image_layout,
|
||||
VkAccessFlags access)
|
||||
{
|
||||
if (self->vk_pipeline_stage == stage &&
|
||||
self->vk_image_layout == image_layout &&
|
||||
self->vk_access == access)
|
||||
return;
|
||||
|
||||
vkCmdPipelineBarrier (command_buffer,
|
||||
self->vk_pipeline_stage,
|
||||
stage,
|
||||
0,
|
||||
0, NULL,
|
||||
0, NULL,
|
||||
1, &(VkImageMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.srcAccessMask = self->vk_access,
|
||||
.dstAccessMask = access,
|
||||
.oldLayout = self->vk_image_layout,
|
||||
.newLayout = image_layout,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.image = self->vk_image,
|
||||
.subresourceRange = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = VK_REMAINING_MIP_LEVELS,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
},
|
||||
});
|
||||
|
||||
gsk_vulkan_image_set_vk_image_layout (self, stage, image_layout, access);
|
||||
}
|
||||
|
||||
VkFormat
|
||||
gsk_vulkan_image_get_vk_format (GskVulkanImage *self)
|
||||
{
|
||||
return self->vk_format;
|
||||
}
|
||||
|
64
gsk/gpu/gskvulkanimageprivate.h
Normal file
64
gsk/gpu/gskvulkanimageprivate.h
Normal file
@ -0,0 +1,64 @@
|
||||
#pragma once
|
||||
|
||||
#include "gskgpuimageprivate.h"
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
/* required postprocessing steps before the image van be used */
|
||||
typedef enum
|
||||
{
|
||||
GSK_VULKAN_IMAGE_PREMULTIPLY = (1 << 0),
|
||||
} GskVulkanImagePostprocess;
|
||||
|
||||
#define GSK_TYPE_VULKAN_IMAGE (gsk_vulkan_image_get_type ())
|
||||
|
||||
G_DECLARE_FINAL_TYPE (GskVulkanImage, gsk_vulkan_image, GSK, VULKAN_IMAGE, GskGpuImage)
|
||||
|
||||
GskGpuImage * gsk_vulkan_image_new_for_swapchain (GskVulkanDevice *device,
|
||||
VkImage image,
|
||||
VkFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
|
||||
GskGpuImage * gsk_vulkan_image_new_for_atlas (GskVulkanDevice *device,
|
||||
gsize width,
|
||||
gsize height);
|
||||
GskGpuImage * gsk_vulkan_image_new_for_offscreen (GskVulkanDevice *device,
|
||||
GdkMemoryFormat preferred_format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
|
||||
GskGpuImage * gsk_vulkan_image_new_for_upload (GskVulkanDevice *device,
|
||||
GdkMemoryFormat format,
|
||||
gsize width,
|
||||
gsize height);
|
||||
guchar * gsk_vulkan_image_get_data (GskVulkanImage *self,
|
||||
gsize *out_stride);
|
||||
|
||||
GskVulkanImagePostprocess
|
||||
gsk_vulkan_image_get_postprocess (GskVulkanImage *self);
|
||||
VkPipelineStageFlags gsk_vulkan_image_get_vk_pipeline_stage (GskVulkanImage *self);
|
||||
VkImageLayout gsk_vulkan_image_get_vk_image_layout (GskVulkanImage *self);
|
||||
VkAccessFlags gsk_vulkan_image_get_vk_access (GskVulkanImage *self);
|
||||
void gsk_vulkan_image_set_vk_image_layout (GskVulkanImage *self,
|
||||
VkPipelineStageFlags stage,
|
||||
VkImageLayout image_layout,
|
||||
VkAccessFlags access);
|
||||
void gsk_vulkan_image_transition (GskVulkanImage *self,
|
||||
VkCommandBuffer command_buffer,
|
||||
VkPipelineStageFlags stage,
|
||||
VkImageLayout image_layout,
|
||||
VkAccessFlags access);
|
||||
#define gdk_vulkan_image_transition_shader(image) \
|
||||
gsk_vulkan_image_transition ((image), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, \
|
||||
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_ACCESS_SHADER_READ_BIT)
|
||||
|
||||
VkImage gsk_vulkan_image_get_vk_image (GskVulkanImage *self);
|
||||
VkImageView gsk_vulkan_image_get_image_view (GskVulkanImage *self);
|
||||
VkFormat gsk_vulkan_image_get_vk_format (GskVulkanImage *self);
|
||||
VkFramebuffer gsk_vulkan_image_get_framebuffer (GskVulkanImage *self,
|
||||
VkRenderPass pass);
|
||||
|
||||
G_END_DECLS
|
||||
|
374
gsk/gpu/gskvulkanmemory.c
Normal file
374
gsk/gpu/gskvulkanmemory.c
Normal file
@ -0,0 +1,374 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gskvulkanmemoryprivate.h"
|
||||
|
||||
/* for GSK_VK_CHECK */
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
|
||||
/* {{{ direct allocator ***/
|
||||
|
||||
typedef struct _GskVulkanDirectAllocator GskVulkanDirectAllocator;
|
||||
|
||||
struct _GskVulkanDirectAllocator
|
||||
{
|
||||
GskVulkanAllocator allocator_class;
|
||||
|
||||
VkDevice device; /* no reference held */
|
||||
uint32_t vk_memory_type_index;
|
||||
VkMemoryType vk_memory_type;
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_vulkan_direct_allocator_free_allocator (GskVulkanAllocator *allocator)
|
||||
{
|
||||
g_free (allocator);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_direct_allocator_alloc (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize alignment,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanDirectAllocator *self = (GskVulkanDirectAllocator *) allocator;
|
||||
|
||||
GSK_VK_CHECK (vkAllocateMemory, self->device,
|
||||
&(VkMemoryAllocateInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
||||
.allocationSize = size,
|
||||
.memoryTypeIndex = self->vk_memory_type_index
|
||||
},
|
||||
NULL,
|
||||
&alloc->vk_memory);
|
||||
|
||||
if ((self->vk_memory_type.propertyFlags & GSK_VULKAN_MEMORY_MAPPABLE) == GSK_VULKAN_MEMORY_MAPPABLE)
|
||||
{
|
||||
GSK_VK_CHECK (vkMapMemory, self->device,
|
||||
alloc->vk_memory,
|
||||
0,
|
||||
size,
|
||||
0,
|
||||
(void **) &alloc->map);
|
||||
}
|
||||
else
|
||||
{
|
||||
alloc->map = NULL;
|
||||
}
|
||||
|
||||
alloc->offset = 0;
|
||||
alloc->size = size;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_direct_allocator_free (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanDirectAllocator *self = (GskVulkanDirectAllocator *) allocator;
|
||||
|
||||
if (alloc->map)
|
||||
vkUnmapMemory (self->device, alloc->vk_memory);
|
||||
|
||||
vkFreeMemory (self->device,
|
||||
alloc->vk_memory,
|
||||
NULL);
|
||||
}
|
||||
|
||||
GskVulkanAllocator *
|
||||
gsk_vulkan_direct_allocator_new (VkDevice device,
|
||||
uint32_t vk_type_index,
|
||||
const VkMemoryType *vk_type)
|
||||
{
|
||||
GskVulkanDirectAllocator *self;
|
||||
|
||||
self = g_new0 (GskVulkanDirectAllocator, 1);
|
||||
self->allocator_class.ref_count = 1;
|
||||
self->allocator_class.free_allocator = gsk_vulkan_direct_allocator_free_allocator;
|
||||
self->allocator_class.alloc = gsk_vulkan_direct_allocator_alloc;
|
||||
self->allocator_class.free = gsk_vulkan_direct_allocator_free;
|
||||
self->device = device;
|
||||
self->vk_memory_type_index = vk_type_index;
|
||||
self->vk_memory_type = *vk_type;
|
||||
|
||||
return (GskVulkanAllocator *) self;
|
||||
}
|
||||
|
||||
/* }}} */
|
||||
/* {{{ buddy allocator ***/
|
||||
|
||||
#define GDK_ARRAY_NAME gsk_vulkan_allocation_list
|
||||
#define GDK_ARRAY_TYPE_NAME GskVulkanAllocationList
|
||||
#define GDK_ARRAY_ELEMENT_TYPE GskVulkanAllocation
|
||||
#define GDK_ARRAY_BY_VALUE 1
|
||||
#define GDK_ARRAY_PREALLOC 4
|
||||
#define GDK_ARRAY_NO_MEMSET 1
|
||||
#include "gdk/gdkarrayimpl.c"
|
||||
|
||||
#define N_SUBDIVISIONS 10
|
||||
|
||||
typedef struct _GskVulkanBuddyAllocator GskVulkanBuddyAllocator;
|
||||
|
||||
struct _GskVulkanBuddyAllocator
|
||||
{
|
||||
GskVulkanAllocator allocator_class;
|
||||
|
||||
GskVulkanAllocator *allocator;
|
||||
|
||||
gsize block_size_slot;
|
||||
|
||||
GskVulkanAllocation cache;
|
||||
GskVulkanAllocationList free_lists[N_SUBDIVISIONS];
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_vulkan_buddy_allocator_free_allocator (GskVulkanAllocator *allocator)
|
||||
{
|
||||
GskVulkanBuddyAllocator *self = (GskVulkanBuddyAllocator *) allocator;
|
||||
gsize i;
|
||||
|
||||
if (self->cache.vk_memory)
|
||||
gsk_vulkan_free (self->allocator, &self->cache);
|
||||
|
||||
for (i = 0; i < N_SUBDIVISIONS; i++)
|
||||
{
|
||||
gsk_vulkan_allocation_list_clear (&self->free_lists[i]);
|
||||
}
|
||||
|
||||
gsk_vulkan_allocator_unref (self->allocator);
|
||||
|
||||
g_free (self);
|
||||
}
|
||||
|
||||
/* must not be 0:
|
||||
* gets exponent for next power of 2 that's >= num.
|
||||
* So num=1234 gets 11, because 2048 = 2^11 */
|
||||
static gsize
|
||||
find_slot (gsize num)
|
||||
{
|
||||
return g_bit_storage (num - 1);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_buddy_allocator_alloc (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize align,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanBuddyAllocator *self = (GskVulkanBuddyAllocator *) allocator;
|
||||
gsize slot;
|
||||
int i;
|
||||
|
||||
size = MAX (size, align);
|
||||
|
||||
slot = find_slot (size);
|
||||
if (slot >= self->block_size_slot)
|
||||
{
|
||||
gsk_vulkan_alloc (self->allocator, size, align, alloc);
|
||||
return;
|
||||
}
|
||||
|
||||
slot = MIN (self->block_size_slot - slot, N_SUBDIVISIONS) - 1;
|
||||
for (i = slot; i >= 0; i--)
|
||||
{
|
||||
if (gsk_vulkan_allocation_list_get_size (&self->free_lists[i]) > 0)
|
||||
break;
|
||||
}
|
||||
if (i < 0)
|
||||
{
|
||||
if (self->cache.vk_memory)
|
||||
{
|
||||
*alloc = self->cache;
|
||||
self->cache.vk_memory = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* We force alignment to our size, so that we can use offset
|
||||
* to find the buddy allocation.
|
||||
*/
|
||||
gsk_vulkan_alloc (self->allocator, 1 << self->block_size_slot, 1 << self->block_size_slot, alloc);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
gsize n = gsk_vulkan_allocation_list_get_size (&self->free_lists[i]);
|
||||
*alloc = *gsk_vulkan_allocation_list_get (&self->free_lists[i], n - 1);
|
||||
gsk_vulkan_allocation_list_set_size (&self->free_lists[i], n - 1);
|
||||
}
|
||||
|
||||
while (i != slot)
|
||||
{
|
||||
i++;
|
||||
alloc->size >>= 1;
|
||||
gsk_vulkan_allocation_list_append (&self->free_lists[i], alloc);
|
||||
alloc->offset += alloc->size;
|
||||
if (alloc->map)
|
||||
alloc->map += alloc->size;
|
||||
}
|
||||
|
||||
g_assert (alloc->size >= size);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_buddy_allocator_free (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanBuddyAllocator *self = (GskVulkanBuddyAllocator *) allocator;
|
||||
gsize slot, i, n;
|
||||
|
||||
slot = find_slot (alloc->size);
|
||||
if (slot >= self->block_size_slot)
|
||||
{
|
||||
gsk_vulkan_free (self->allocator, alloc);
|
||||
return;
|
||||
}
|
||||
|
||||
slot = MIN (self->block_size_slot - slot, N_SUBDIVISIONS) - 1;
|
||||
restart:
|
||||
n = gsk_vulkan_allocation_list_get_size (&self->free_lists[slot]);
|
||||
for (i = 0; i < n; i++)
|
||||
{
|
||||
GskVulkanAllocation *maybe_buddy = gsk_vulkan_allocation_list_index (&self->free_lists[slot], i);
|
||||
if (maybe_buddy->vk_memory == alloc->vk_memory &&
|
||||
maybe_buddy->offset == (alloc->offset ^ alloc->size))
|
||||
{
|
||||
if (i < n - 1)
|
||||
*maybe_buddy = *gsk_vulkan_allocation_list_get (&self->free_lists[slot], n - 1);
|
||||
gsk_vulkan_allocation_list_set_size (&self->free_lists[slot], n - 1);
|
||||
if (alloc->map && alloc->offset & alloc->size)
|
||||
alloc->map -= alloc->size;
|
||||
alloc->offset &= ~alloc->size;
|
||||
alloc->size <<= 1;
|
||||
if (slot == 0)
|
||||
{
|
||||
if (self->cache.vk_memory == NULL)
|
||||
self->cache = *alloc;
|
||||
else
|
||||
gsk_vulkan_free (self->allocator, alloc);
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
slot--;
|
||||
/* no idea how to make this look good with loops */
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
}
|
||||
gsk_vulkan_allocation_list_append (&self->free_lists[slot], alloc);
|
||||
}
|
||||
|
||||
GskVulkanAllocator *
|
||||
gsk_vulkan_buddy_allocator_new (GskVulkanAllocator *allocator,
|
||||
gsize block_size)
|
||||
{
|
||||
GskVulkanBuddyAllocator *self;
|
||||
gsize i;
|
||||
|
||||
self = g_new0 (GskVulkanBuddyAllocator, 1);
|
||||
self->allocator_class.ref_count = 1;
|
||||
self->allocator_class.free_allocator = gsk_vulkan_buddy_allocator_free_allocator;
|
||||
self->allocator_class.alloc = gsk_vulkan_buddy_allocator_alloc;
|
||||
self->allocator_class.free = gsk_vulkan_buddy_allocator_free;
|
||||
self->allocator = allocator;
|
||||
self->block_size_slot = find_slot (block_size);
|
||||
|
||||
for (i = 0; i < N_SUBDIVISIONS; i++)
|
||||
{
|
||||
gsk_vulkan_allocation_list_init (&self->free_lists[i]);
|
||||
}
|
||||
|
||||
return (GskVulkanAllocator *) self;
|
||||
}
|
||||
|
||||
/* }}} */
|
||||
/* {{{ stats allocator ***/
|
||||
|
||||
typedef struct _GskVulkanStatsAllocator GskVulkanStatsAllocator;
|
||||
|
||||
struct _GskVulkanStatsAllocator
|
||||
{
|
||||
GskVulkanAllocator allocator_class;
|
||||
|
||||
GskVulkanAllocator *allocator;
|
||||
|
||||
gsize n_alloc;
|
||||
gsize n_free;
|
||||
|
||||
gsize n_bytes_requested;
|
||||
gsize n_bytes_allocated;
|
||||
gsize n_bytes_freed;
|
||||
};
|
||||
|
||||
static void
|
||||
gsk_vulkan_stats_allocator_dump_stats (GskVulkanStatsAllocator *self,
|
||||
const char *reason)
|
||||
{
|
||||
g_printerr ("%s\n", reason);
|
||||
g_printerr (" %zu bytes requested in %zu allocations\n", self->n_bytes_requested, self->n_alloc);
|
||||
g_printerr (" %zu bytes allocated (%.2f%% overhead)\n", self->n_bytes_allocated,
|
||||
(self->n_bytes_allocated - self->n_bytes_requested) * 100. / self->n_bytes_requested);
|
||||
g_printerr (" %zu bytes freed in %zu frees\n", self->n_bytes_freed , self->n_free);
|
||||
g_printerr (" %zu bytes remaining in %zu allocations\n",
|
||||
self->n_bytes_allocated - self->n_bytes_freed, self->n_alloc - self->n_free);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_stats_allocator_free_allocator (GskVulkanAllocator *allocator)
|
||||
{
|
||||
GskVulkanStatsAllocator *self = (GskVulkanStatsAllocator *) allocator;
|
||||
|
||||
g_assert (self->n_alloc == self->n_free);
|
||||
g_assert (self->n_bytes_allocated == self->n_bytes_freed);
|
||||
|
||||
gsk_vulkan_allocator_unref (self->allocator);
|
||||
|
||||
g_free (self);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_stats_allocator_alloc (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize align,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanStatsAllocator *self = (GskVulkanStatsAllocator *) allocator;
|
||||
|
||||
gsk_vulkan_alloc (self->allocator, size, align, alloc);
|
||||
|
||||
self->n_alloc++;
|
||||
self->n_bytes_requested += size;
|
||||
self->n_bytes_allocated += alloc->size;
|
||||
|
||||
gsk_vulkan_stats_allocator_dump_stats (self, "alloc()");
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_stats_allocator_free (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
GskVulkanStatsAllocator *self = (GskVulkanStatsAllocator *) allocator;
|
||||
|
||||
self->n_free++;
|
||||
self->n_bytes_freed += alloc->size;
|
||||
|
||||
gsk_vulkan_free (self->allocator, alloc);
|
||||
|
||||
gsk_vulkan_stats_allocator_dump_stats (self, "free()");
|
||||
}
|
||||
|
||||
GskVulkanAllocator *
|
||||
gsk_vulkan_stats_allocator_new (GskVulkanAllocator *allocator)
|
||||
{
|
||||
GskVulkanStatsAllocator *self;
|
||||
|
||||
self = g_new0 (GskVulkanStatsAllocator, 1);
|
||||
self->allocator_class.ref_count = 1;
|
||||
self->allocator_class.free_allocator = gsk_vulkan_stats_allocator_free_allocator;
|
||||
self->allocator_class.alloc = gsk_vulkan_stats_allocator_alloc;
|
||||
self->allocator_class.free = gsk_vulkan_stats_allocator_free;
|
||||
self->allocator = allocator;
|
||||
|
||||
return (GskVulkanAllocator *) self;
|
||||
}
|
||||
|
||||
/* }}} */
|
||||
|
89
gsk/gpu/gskvulkanmemoryprivate.h
Normal file
89
gsk/gpu/gskvulkanmemoryprivate.h
Normal file
@ -0,0 +1,89 @@
|
||||
#pragma once
|
||||
|
||||
#include <gdk/gdk.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_VULKAN_MEMORY_MAPPABLE (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | \
|
||||
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | \
|
||||
VK_MEMORY_PROPERTY_HOST_CACHED_BIT)
|
||||
|
||||
typedef struct _GskVulkanAllocator GskVulkanAllocator;
|
||||
typedef struct _GskVulkanAllocation GskVulkanAllocation;
|
||||
typedef struct _GskVulkanMemory GskVulkanMemory;
|
||||
|
||||
struct _GskVulkanAllocation
|
||||
{
|
||||
VkDeviceMemory vk_memory;
|
||||
guchar *map;
|
||||
VkDeviceSize offset;
|
||||
VkDeviceSize size;
|
||||
};
|
||||
|
||||
struct _GskVulkanAllocator
|
||||
{
|
||||
int ref_count;
|
||||
|
||||
void (* free_allocator) (GskVulkanAllocator *allocator);
|
||||
|
||||
void (* alloc) (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize alignment,
|
||||
GskVulkanAllocation *out_alloc);
|
||||
void (* free) (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc);
|
||||
};
|
||||
|
||||
static inline void gsk_vulkan_alloc (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize alignment,
|
||||
GskVulkanAllocation *out_alloc);
|
||||
static inline void gsk_vulkan_free (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc);
|
||||
|
||||
static inline GskVulkanAllocator *
|
||||
gsk_vulkan_allocator_ref (GskVulkanAllocator *allocator);
|
||||
static inline void gsk_vulkan_allocator_unref (GskVulkanAllocator *allocator);
|
||||
|
||||
GskVulkanAllocator * gsk_vulkan_direct_allocator_new (VkDevice device,
|
||||
uint32_t vk_type_index,
|
||||
const VkMemoryType *vk_type);
|
||||
GskVulkanAllocator * gsk_vulkan_buddy_allocator_new (GskVulkanAllocator *allocator,
|
||||
gsize block_size);
|
||||
GskVulkanAllocator * gsk_vulkan_stats_allocator_new (GskVulkanAllocator *allocator);
|
||||
|
||||
static inline void
|
||||
gsk_vulkan_alloc (GskVulkanAllocator *allocator,
|
||||
VkDeviceSize size,
|
||||
VkDeviceSize alignment,
|
||||
GskVulkanAllocation *out_alloc)
|
||||
{
|
||||
allocator->alloc (allocator, size, alignment, out_alloc);
|
||||
}
|
||||
|
||||
static inline void
|
||||
gsk_vulkan_free (GskVulkanAllocator *allocator,
|
||||
GskVulkanAllocation *alloc)
|
||||
{
|
||||
allocator->free (allocator, alloc);
|
||||
}
|
||||
|
||||
static inline GskVulkanAllocator *
|
||||
gsk_vulkan_allocator_ref (GskVulkanAllocator *self)
|
||||
{
|
||||
self->ref_count++;
|
||||
return self;
|
||||
}
|
||||
|
||||
static inline void
|
||||
gsk_vulkan_allocator_unref (GskVulkanAllocator *self)
|
||||
{
|
||||
self->ref_count--;
|
||||
if (self->ref_count > 0)
|
||||
return;
|
||||
|
||||
self->free_allocator (self);
|
||||
}
|
||||
|
||||
G_END_DECLS
|
||||
|
194
gsk/gpu/gskvulkanrenderer.c
Normal file
194
gsk/gpu/gskvulkanrenderer.c
Normal file
@ -0,0 +1,194 @@
|
||||
#include "config.h"
|
||||
|
||||
#include "gsk/vulkan/gskvulkanrenderer.h"
|
||||
|
||||
#include "gskgpurendererprivate.h"
|
||||
#include "gskvulkandeviceprivate.h"
|
||||
#include "gskvulkanframeprivate.h"
|
||||
#include "gskvulkanimageprivate.h"
|
||||
|
||||
#include "gdk/gdkdisplayprivate.h"
|
||||
|
||||
struct _GskVulkanRenderer
|
||||
{
|
||||
GskGpuRenderer parent_instance;
|
||||
|
||||
guint n_targets;
|
||||
GskGpuImage **targets;
|
||||
};
|
||||
|
||||
struct _GskVulkanRendererClass
|
||||
{
|
||||
GskGpuRendererClass parent_class;
|
||||
};
|
||||
|
||||
G_DEFINE_TYPE (GskVulkanRenderer, gsk_vulkan_renderer, GSK_TYPE_GPU_RENDERER)
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_free_targets (GskVulkanRenderer *self)
|
||||
{
|
||||
guint i;
|
||||
|
||||
for (i = 0; i < self->n_targets; i++)
|
||||
{
|
||||
g_object_unref (self->targets[i]);
|
||||
}
|
||||
|
||||
g_clear_pointer (&self->targets, g_free);
|
||||
self->n_targets = 0;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_update_images_cb (GdkVulkanContext *context,
|
||||
GskVulkanRenderer *self)
|
||||
{
|
||||
GskVulkanDevice *device;
|
||||
GdkSurface *surface;
|
||||
double scale;
|
||||
gsize width, height;
|
||||
guint i;
|
||||
|
||||
surface = gsk_renderer_get_surface (GSK_RENDERER (self));
|
||||
if (surface == NULL)
|
||||
return;
|
||||
|
||||
device = GSK_VULKAN_DEVICE (gsk_gpu_renderer_get_device (GSK_GPU_RENDERER (self)));
|
||||
|
||||
gsk_vulkan_renderer_free_targets (self);
|
||||
|
||||
self->n_targets = gdk_vulkan_context_get_n_images (context);
|
||||
self->targets = g_new (GskGpuImage *, self->n_targets);
|
||||
|
||||
scale = gdk_surface_get_scale (surface);
|
||||
width = (gsize) ceil (gdk_surface_get_width (surface) * scale);
|
||||
height = (gsize) ceil (gdk_surface_get_height (surface) * scale);
|
||||
|
||||
for (i = 0; i < self->n_targets; i++)
|
||||
{
|
||||
self->targets[i] = gsk_vulkan_image_new_for_swapchain (device,
|
||||
gdk_vulkan_context_get_image (context, i),
|
||||
gdk_vulkan_context_get_image_format (context),
|
||||
width, height);
|
||||
}
|
||||
}
|
||||
|
||||
static GdkDrawContext *
|
||||
gsk_vulkan_renderer_create_context (GskGpuRenderer *renderer,
|
||||
GdkDisplay *display,
|
||||
GdkSurface *surface,
|
||||
GError **error)
|
||||
{
|
||||
GskVulkanRenderer *self = GSK_VULKAN_RENDERER (renderer);
|
||||
GdkVulkanContext *context;
|
||||
|
||||
if (surface)
|
||||
context = gdk_surface_create_vulkan_context (surface, error);
|
||||
else
|
||||
context = gdk_display_create_vulkan_context (display, error);
|
||||
|
||||
if (context == NULL)
|
||||
return NULL;
|
||||
|
||||
g_signal_connect (context,
|
||||
"images-updated",
|
||||
G_CALLBACK (gsk_vulkan_renderer_update_images_cb),
|
||||
self);
|
||||
gsk_vulkan_renderer_update_images_cb (context, self);
|
||||
|
||||
return GDK_DRAW_CONTEXT (context);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_make_current (GskGpuRenderer *renderer)
|
||||
{
|
||||
}
|
||||
|
||||
static GskGpuImage *
|
||||
gsk_vulkan_renderer_get_backbuffer (GskGpuRenderer *renderer)
|
||||
{
|
||||
GskVulkanRenderer *self = GSK_VULKAN_RENDERER (renderer);
|
||||
GdkVulkanContext *context;
|
||||
|
||||
context = GDK_VULKAN_CONTEXT (gsk_gpu_renderer_get_context (renderer));
|
||||
|
||||
return self->targets[gdk_vulkan_context_get_draw_index (context)];
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_wait (GskGpuRenderer *renderer,
|
||||
GskGpuFrame **frames,
|
||||
gsize n_frames)
|
||||
{
|
||||
VkFence *fences;
|
||||
VkDevice vk_device;
|
||||
gsize i;
|
||||
|
||||
vk_device = gsk_vulkan_device_get_vk_device (GSK_VULKAN_DEVICE (gsk_gpu_renderer_get_device (renderer)));
|
||||
|
||||
fences = g_alloca (sizeof (VkFence) * n_frames);
|
||||
|
||||
for (i = 0; i < n_frames; i++)
|
||||
{
|
||||
fences[i] = gsk_vulkan_frame_get_vk_fence (GSK_VULKAN_FRAME (frames[i]));
|
||||
}
|
||||
|
||||
GSK_VK_CHECK (vkWaitForFences, vk_device,
|
||||
n_frames,
|
||||
fences,
|
||||
VK_FALSE,
|
||||
INT64_MAX);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_unrealize (GskRenderer *renderer)
|
||||
{
|
||||
GskVulkanRenderer *self = GSK_VULKAN_RENDERER (renderer);
|
||||
|
||||
gsk_vulkan_renderer_free_targets (self);
|
||||
g_signal_handlers_disconnect_by_func (gsk_gpu_renderer_get_context (GSK_GPU_RENDERER (self)),
|
||||
gsk_vulkan_renderer_update_images_cb,
|
||||
self);
|
||||
|
||||
GSK_RENDERER_CLASS (gsk_vulkan_renderer_parent_class)->unrealize (renderer);
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_class_init (GskVulkanRendererClass *klass)
|
||||
{
|
||||
GskGpuRendererClass *gpu_renderer_class = GSK_GPU_RENDERER_CLASS (klass);
|
||||
GskRendererClass *renderer_class = GSK_RENDERER_CLASS (klass);
|
||||
|
||||
gpu_renderer_class->frame_type = GSK_TYPE_VULKAN_FRAME;
|
||||
|
||||
gpu_renderer_class->get_device = gsk_vulkan_device_get_for_display;
|
||||
gpu_renderer_class->create_context = gsk_vulkan_renderer_create_context;
|
||||
gpu_renderer_class->make_current = gsk_vulkan_renderer_make_current;
|
||||
gpu_renderer_class->get_backbuffer = gsk_vulkan_renderer_get_backbuffer;
|
||||
gpu_renderer_class->wait = gsk_vulkan_renderer_wait;
|
||||
|
||||
renderer_class->unrealize = gsk_vulkan_renderer_unrealize;
|
||||
}
|
||||
|
||||
static void
|
||||
gsk_vulkan_renderer_init (GskVulkanRenderer *self)
|
||||
{
|
||||
}
|
||||
|
||||
/**
|
||||
* gsk_vulkan_renderer_new:
|
||||
*
|
||||
* Creates a new Vulkan renderer.
|
||||
*
|
||||
* The Vulkan renderer is a renderer that uses the Vulkan library for
|
||||
* rendering.
|
||||
*
|
||||
* This function is only available when GTK was compiled with Vulkan
|
||||
* support.
|
||||
*
|
||||
* Returns: a new Vulkan renderer
|
||||
**/
|
||||
GskRenderer *
|
||||
gsk_vulkan_renderer_new (void)
|
||||
{
|
||||
return g_object_new (GSK_TYPE_VULKAN_RENDERER, NULL);
|
||||
}
|
@ -67,6 +67,20 @@ gsk_private_sources = files([
|
||||
'gl/gskglprofiler.c',
|
||||
'gl/stb_rect_pack.c',
|
||||
'gl/fp16.c',
|
||||
'gpu/gskgldevice.c',
|
||||
'gpu/gskglframe.c',
|
||||
'gpu/gskglimage.c',
|
||||
'gpu/gskgpublitop.c',
|
||||
'gpu/gskgpudownloadop.c',
|
||||
'gpu/gskgpudevice.c',
|
||||
'gpu/gskgpuframe.c',
|
||||
'gpu/gskgpuimage.c',
|
||||
'gpu/gskgpunodeprocessor.c',
|
||||
'gpu/gskgpuop.c',
|
||||
'gpu/gskgpuprint.c',
|
||||
'gpu/gskgpurenderer.c',
|
||||
'gpu/gskgpuuploadop.c',
|
||||
'gpu/gsknglrenderer.c',
|
||||
])
|
||||
|
||||
gsk_f16c_sources = files([
|
||||
@ -104,11 +118,30 @@ if get_variable('broadway_enabled')
|
||||
gsk_public_headers += gsk_public_broadway_headers
|
||||
endif
|
||||
|
||||
if have_vulkan
|
||||
gsk_public_vulkan_headers = files([
|
||||
'vulkan/gskvulkanrenderer.h'
|
||||
])
|
||||
install_headers(gsk_public_vulkan_headers, subdir: 'gtk-4.0/gsk/vulkan')
|
||||
gsk_public_headers += gsk_public_vulkan_headers
|
||||
endif
|
||||
|
||||
gsk_private_vulkan_shaders = []
|
||||
gsk_private_vulkan_compiled_shaders = []
|
||||
gsk_private_vulkan_compiled_shaders_deps = []
|
||||
gsk_private_vulkan_shader_headers = []
|
||||
|
||||
if have_vulkan
|
||||
gsk_private_sources += files([
|
||||
'gpu/gskvulkanbuffer.c',
|
||||
'gpu/gskvulkandevice.c',
|
||||
'gpu/gskvulkanframe.c',
|
||||
'gpu/gskvulkanimage.c',
|
||||
'gpu/gskvulkanmemory.c',
|
||||
'gpu/gskvulkanrenderer.c',
|
||||
])
|
||||
endif # have_vulkan
|
||||
|
||||
if get_variable('broadway_enabled')
|
||||
gsk_public_sources += files([
|
||||
'broadway/gskbroadwayrenderer.c',
|
||||
|
51
gsk/vulkan/gskvulkanrenderer.h
Normal file
51
gsk/vulkan/gskvulkanrenderer.h
Normal file
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Copyright © 2016 Benjamin Otte
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gdk/gdk.h>
|
||||
#include <gsk/gsk.h>
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
|
||||
#include <vulkan/vulkan.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GSK_TYPE_VULKAN_RENDERER (gsk_vulkan_renderer_get_type ())
|
||||
|
||||
#define GSK_VULKAN_RENDERER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GSK_TYPE_VULKAN_RENDERER, GskVulkanRenderer))
|
||||
#define GSK_IS_VULKAN_RENDERER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GSK_TYPE_VULKAN_RENDERER))
|
||||
#define GSK_VULKAN_RENDERER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GSK_TYPE_VULKAN_RENDERER, GskVulkanRendererClass))
|
||||
#define GSK_IS_VULKAN_RENDERER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GSK_TYPE_VULKAN_RENDERER))
|
||||
#define GSK_VULKAN_RENDERER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GSK_TYPE_VULKAN_RENDERER, GskVulkanRendererClass))
|
||||
|
||||
/**
|
||||
* GskVulkanRenderer:
|
||||
*
|
||||
* A GSK renderer that is using Vulkan.
|
||||
*/
|
||||
typedef struct _GskVulkanRenderer GskVulkanRenderer;
|
||||
typedef struct _GskVulkanRendererClass GskVulkanRendererClass;
|
||||
|
||||
GDK_AVAILABLE_IN_ALL
|
||||
GType gsk_vulkan_renderer_get_type (void) G_GNUC_CONST;
|
||||
|
||||
GDK_AVAILABLE_IN_ALL
|
||||
GskRenderer * gsk_vulkan_renderer_new (void);
|
||||
|
||||
#endif
|
@ -46,6 +46,10 @@
|
||||
#include <gsk/broadway/gskbroadwayrenderer.h>
|
||||
#endif
|
||||
|
||||
#ifdef GDK_RENDERING_VULKAN
|
||||
#include <gsk/vulkan/gskvulkanrenderer.h>
|
||||
#endif
|
||||
|
||||
#ifdef GDK_WINDOWING_X11
|
||||
#include <gdk/x11/gdkx.h>
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user