2023-08-12 20:10:16 +00:00
|
|
|
#include "config.h"
|
|
|
|
|
|
|
|
#include "gskvulkanframeprivate.h"
|
|
|
|
|
|
|
|
#include "gskgpuopprivate.h"
|
2023-08-21 00:18:37 +00:00
|
|
|
#include "gskvulkanbufferprivate.h"
|
2023-10-03 19:04:21 +00:00
|
|
|
#include "gskvulkandescriptorsprivate.h"
|
2023-08-12 20:10:16 +00:00
|
|
|
#include "gskvulkandeviceprivate.h"
|
2023-08-21 00:18:37 +00:00
|
|
|
#include "gskvulkanimageprivate.h"
|
2023-11-05 09:00:11 +00:00
|
|
|
#include "gskvulkanrealdescriptorsprivate.h"
|
|
|
|
#include "gskvulkansubdescriptorsprivate.h"
|
2023-08-12 20:10:16 +00:00
|
|
|
|
|
|
|
#include "gdk/gdkdisplayprivate.h"
|
2023-10-19 11:36:15 +00:00
|
|
|
#include "gdk/gdkdmabuftextureprivate.h"
|
2023-08-12 20:10:16 +00:00
|
|
|
|
2023-11-04 05:11:27 +00:00
|
|
|
#define GDK_ARRAY_NAME gsk_descriptors
|
|
|
|
#define GDK_ARRAY_TYPE_NAME GskDescriptors
|
2023-11-05 09:00:11 +00:00
|
|
|
#define GDK_ARRAY_ELEMENT_TYPE GskVulkanRealDescriptors *
|
2023-11-04 05:11:27 +00:00
|
|
|
#define GDK_ARRAY_FREE_FUNC g_object_unref
|
2023-10-19 11:30:12 +00:00
|
|
|
#define GDK_ARRAY_NO_MEMSET 1
|
|
|
|
#include "gdk/gdkarrayimpl.c"
|
|
|
|
|
2023-11-29 22:14:00 +00:00
|
|
|
#define GDK_ARRAY_NAME gsk_semaphores
|
|
|
|
#define GDK_ARRAY_TYPE_NAME GskSemaphores
|
|
|
|
#define GDK_ARRAY_ELEMENT_TYPE VkSemaphore
|
|
|
|
#define GDK_ARRAY_PREALLOC 16
|
|
|
|
#define GDK_ARRAY_NO_MEMSET 1
|
|
|
|
#include "gdk/gdkarrayimpl.c"
|
|
|
|
|
|
|
|
#define GDK_ARRAY_NAME gsk_pipeline_stages
|
|
|
|
#define GDK_ARRAY_TYPE_NAME GskPipelineStages
|
|
|
|
#define GDK_ARRAY_ELEMENT_TYPE VkPipelineStageFlags
|
|
|
|
#define GDK_ARRAY_PREALLOC 16
|
|
|
|
#define GDK_ARRAY_NO_MEMSET 1
|
|
|
|
#include "gdk/gdkarrayimpl.c"
|
|
|
|
|
|
|
|
struct _GskVulkanSemaphores
|
|
|
|
{
|
|
|
|
GskSemaphores wait_semaphores;
|
|
|
|
GskPipelineStages wait_stages;
|
|
|
|
GskSemaphores signal_semaphores;
|
|
|
|
};
|
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
struct _GskVulkanFrame
|
|
|
|
{
|
|
|
|
GskGpuFrame parent_instance;
|
|
|
|
|
|
|
|
VkFence vk_fence;
|
|
|
|
VkCommandBuffer vk_command_buffer;
|
2023-08-21 00:18:37 +00:00
|
|
|
VkDescriptorPool vk_descriptor_pool;
|
2023-11-04 05:11:27 +00:00
|
|
|
|
|
|
|
GskDescriptors descriptors;
|
|
|
|
|
2023-11-03 01:48:06 +00:00
|
|
|
gsize pool_n_sets;
|
2023-11-04 05:11:27 +00:00
|
|
|
gsize pool_n_images;
|
2023-11-03 01:48:06 +00:00
|
|
|
gsize pool_n_buffers;
|
2023-08-12 20:10:16 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct _GskVulkanFrameClass
|
|
|
|
{
|
|
|
|
GskGpuFrameClass parent_class;
|
|
|
|
};
|
|
|
|
|
|
|
|
G_DEFINE_TYPE (GskVulkanFrame, gsk_vulkan_frame, GSK_TYPE_GPU_FRAME)
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gsk_vulkan_frame_is_busy (GskGpuFrame *frame)
|
|
|
|
{
|
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
|
|
|
VkDevice device;
|
|
|
|
|
|
|
|
device = gsk_vulkan_device_get_vk_device (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
|
|
|
|
|
|
|
|
return vkGetFenceStatus (device, self->vk_fence) == VK_NOT_READY;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_vulkan_frame_setup (GskGpuFrame *frame)
|
|
|
|
{
|
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
|
|
|
GskVulkanDevice *device;
|
|
|
|
VkDevice vk_device;
|
|
|
|
VkCommandPool vk_command_pool;
|
|
|
|
|
|
|
|
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame));
|
|
|
|
vk_device = gsk_vulkan_device_get_vk_device (device);
|
|
|
|
vk_command_pool = gsk_vulkan_device_get_vk_command_pool (device);
|
|
|
|
|
|
|
|
GSK_VK_CHECK (vkAllocateCommandBuffers, vk_device,
|
|
|
|
&(VkCommandBufferAllocateInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
|
|
|
|
.commandPool = vk_command_pool,
|
|
|
|
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
|
|
|
|
.commandBufferCount = 1,
|
|
|
|
},
|
|
|
|
&self->vk_command_buffer);
|
|
|
|
|
|
|
|
GSK_VK_CHECK (vkCreateFence, vk_device,
|
|
|
|
&(VkFenceCreateInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
|
|
|
|
.flags = VK_FENCE_CREATE_SIGNALED_BIT
|
|
|
|
},
|
|
|
|
NULL,
|
|
|
|
&self->vk_fence);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_vulkan_frame_cleanup (GskGpuFrame *frame)
|
|
|
|
{
|
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
2023-10-19 05:43:31 +00:00
|
|
|
GskVulkanDevice *device;
|
2023-08-21 00:18:37 +00:00
|
|
|
VkDevice vk_device;
|
2023-08-12 20:10:16 +00:00
|
|
|
|
2023-10-19 05:43:31 +00:00
|
|
|
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame));
|
|
|
|
vk_device = gsk_vulkan_device_get_vk_device (device);
|
2023-08-12 20:10:16 +00:00
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
GSK_VK_CHECK (vkWaitForFences, vk_device,
|
2023-08-12 20:10:16 +00:00
|
|
|
1,
|
|
|
|
&self->vk_fence,
|
|
|
|
VK_TRUE,
|
|
|
|
INT64_MAX);
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
GSK_VK_CHECK (vkResetFences, vk_device,
|
2023-08-12 20:10:16 +00:00
|
|
|
1,
|
|
|
|
&self->vk_fence);
|
|
|
|
|
|
|
|
GSK_VK_CHECK (vkResetCommandBuffer, self->vk_command_buffer,
|
|
|
|
0);
|
|
|
|
|
2023-11-03 01:48:06 +00:00
|
|
|
if (self->vk_descriptor_pool != VK_NULL_HANDLE)
|
|
|
|
{
|
|
|
|
GSK_VK_CHECK (vkResetDescriptorPool, vk_device,
|
|
|
|
self->vk_descriptor_pool,
|
|
|
|
0);
|
|
|
|
}
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2023-11-04 05:11:27 +00:00
|
|
|
gsk_descriptors_set_size (&self->descriptors, 0);
|
2023-10-19 05:43:31 +00:00
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
GSK_GPU_FRAME_CLASS (gsk_vulkan_frame_parent_class)->cleanup (frame);
|
|
|
|
}
|
|
|
|
|
2023-10-19 11:36:15 +00:00
|
|
|
static GskGpuImage *
|
|
|
|
gsk_vulkan_frame_upload_texture (GskGpuFrame *frame,
|
2023-11-01 05:43:15 +00:00
|
|
|
gboolean with_mipmap,
|
2023-10-19 11:36:15 +00:00
|
|
|
GdkTexture *texture)
|
|
|
|
{
|
|
|
|
#ifdef HAVE_DMABUF
|
|
|
|
if (GDK_IS_DMABUF_TEXTURE (texture))
|
|
|
|
{
|
|
|
|
GskGpuImage *image = gsk_vulkan_image_new_for_dmabuf (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
|
|
|
|
texture);
|
|
|
|
if (image)
|
|
|
|
return image;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2023-11-01 05:43:15 +00:00
|
|
|
return GSK_GPU_FRAME_CLASS (gsk_vulkan_frame_parent_class)->upload_texture (frame, with_mipmap, texture);
|
2023-10-19 11:36:15 +00:00
|
|
|
}
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
static void
|
2023-11-19 14:27:10 +00:00
|
|
|
gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
|
|
|
GskVulkanDevice *device;
|
|
|
|
VkDevice vk_device;
|
2023-11-04 18:38:21 +00:00
|
|
|
gsize i, n_images, n_buffers, n_sets;
|
2023-08-21 00:18:37 +00:00
|
|
|
|
|
|
|
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (GSK_GPU_FRAME (self)));
|
|
|
|
vk_device = gsk_vulkan_device_get_vk_device (device);
|
|
|
|
|
2023-11-04 05:11:27 +00:00
|
|
|
n_images = 0;
|
|
|
|
n_buffers = 0;
|
2023-11-04 18:38:21 +00:00
|
|
|
n_sets = 2 * gsk_descriptors_get_size (&self->descriptors);
|
2023-11-04 05:11:27 +00:00
|
|
|
for (i = 0; i < gsk_descriptors_get_size (&self->descriptors); i++)
|
|
|
|
{
|
2023-11-04 18:38:21 +00:00
|
|
|
gsize n_desc_images, n_desc_buffers;
|
2023-11-05 09:00:11 +00:00
|
|
|
GskVulkanRealDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
|
2023-11-26 08:28:20 +00:00
|
|
|
gsk_vulkan_real_descriptors_prepare (desc, &n_desc_images, &n_desc_buffers);
|
2023-11-04 18:38:21 +00:00
|
|
|
n_images += n_desc_images;
|
|
|
|
n_buffers += n_desc_buffers;
|
2023-11-04 05:11:27 +00:00
|
|
|
}
|
2023-11-03 01:48:06 +00:00
|
|
|
|
2023-11-04 18:38:21 +00:00
|
|
|
if (n_sets > self->pool_n_sets ||
|
|
|
|
n_images > self->pool_n_images ||
|
2023-11-03 01:48:06 +00:00
|
|
|
n_buffers > self->pool_n_buffers)
|
|
|
|
{
|
|
|
|
if (self->vk_descriptor_pool != VK_NULL_HANDLE)
|
|
|
|
{
|
|
|
|
vkDestroyDescriptorPool (vk_device,
|
|
|
|
self->vk_descriptor_pool,
|
|
|
|
NULL);
|
|
|
|
self->vk_descriptor_pool = VK_NULL_HANDLE;
|
|
|
|
}
|
2023-11-04 18:38:21 +00:00
|
|
|
if (n_sets > self->pool_n_sets)
|
|
|
|
self->pool_n_sets = 4 << g_bit_nth_msf (n_sets - 1, -1);
|
2023-11-04 05:11:27 +00:00
|
|
|
if (n_images > self->pool_n_images)
|
|
|
|
self->pool_n_images = 2 << g_bit_nth_msf (n_images - 1, -1);
|
2023-11-03 01:48:06 +00:00
|
|
|
if (n_buffers > self->pool_n_buffers)
|
2023-11-04 18:38:21 +00:00
|
|
|
self->pool_n_buffers = 4 << g_bit_nth_msf (n_buffers - 1, -1);
|
2023-11-03 01:48:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (self->vk_descriptor_pool == VK_NULL_HANDLE)
|
|
|
|
{
|
|
|
|
GSK_VK_CHECK (vkCreateDescriptorPool, vk_device,
|
|
|
|
&(VkDescriptorPoolCreateInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
|
2023-11-04 05:14:45 +00:00
|
|
|
.flags = 0,
|
2023-11-03 01:48:06 +00:00
|
|
|
.maxSets = self->pool_n_sets,
|
|
|
|
.poolSizeCount = 2,
|
|
|
|
.pPoolSizes = (VkDescriptorPoolSize[2]) {
|
|
|
|
{
|
|
|
|
.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
2023-11-04 05:11:27 +00:00
|
|
|
.descriptorCount = self->pool_n_images,
|
2023-11-03 01:48:06 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
|
|
|
|
.descriptorCount = self->pool_n_buffers,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
NULL,
|
|
|
|
&self->vk_descriptor_pool);
|
|
|
|
}
|
|
|
|
|
2023-11-04 05:11:27 +00:00
|
|
|
for (i = 0; i < gsk_descriptors_get_size (&self->descriptors); i++)
|
2023-08-21 00:18:37 +00:00
|
|
|
{
|
2023-11-05 09:00:11 +00:00
|
|
|
GskVulkanRealDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2023-11-05 09:00:11 +00:00
|
|
|
gsk_vulkan_real_descriptors_update_sets (desc, self->vk_descriptor_pool);
|
2023-11-04 05:11:27 +00:00
|
|
|
}
|
2023-08-21 00:18:37 +00:00
|
|
|
}
|
|
|
|
|
2023-10-03 19:04:21 +00:00
|
|
|
static GskGpuDescriptors *
|
|
|
|
gsk_vulkan_frame_create_descriptors (GskGpuFrame *frame)
|
|
|
|
{
|
2023-11-04 05:11:27 +00:00
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
|
|
|
|
2023-11-05 09:00:11 +00:00
|
|
|
if (gsk_vulkan_device_has_feature (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)), GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING))
|
|
|
|
{
|
|
|
|
GskVulkanRealDescriptors *parent;
|
|
|
|
|
|
|
|
if (gsk_descriptors_get_size (&self->descriptors) > 0)
|
|
|
|
{
|
|
|
|
parent = gsk_descriptors_get (&self->descriptors, gsk_descriptors_get_size (&self->descriptors) - 1);
|
|
|
|
if (gsk_vulkan_real_descriptors_is_full (parent))
|
|
|
|
parent = NULL;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
parent = NULL;
|
|
|
|
|
|
|
|
if (parent == NULL)
|
|
|
|
{
|
2023-11-26 08:28:20 +00:00
|
|
|
parent = gsk_vulkan_real_descriptors_new (self);
|
2023-11-05 09:00:11 +00:00
|
|
|
gsk_descriptors_append (&self->descriptors, parent);
|
|
|
|
}
|
2023-11-04 05:11:27 +00:00
|
|
|
|
2023-11-05 09:00:11 +00:00
|
|
|
return GSK_GPU_DESCRIPTORS (gsk_vulkan_sub_descriptors_new (GSK_VULKAN_DESCRIPTORS (parent)));
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
GskVulkanRealDescriptors *desc;
|
|
|
|
|
2023-11-26 08:28:20 +00:00
|
|
|
desc = gsk_vulkan_real_descriptors_new (self);
|
2023-11-05 09:00:11 +00:00
|
|
|
gsk_descriptors_append (&self->descriptors, desc);
|
|
|
|
|
|
|
|
return GSK_GPU_DESCRIPTORS (g_object_ref (desc));
|
|
|
|
}
|
2023-10-03 19:04:21 +00:00
|
|
|
}
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
static GskGpuBuffer *
|
|
|
|
gsk_vulkan_frame_create_vertex_buffer (GskGpuFrame *frame,
|
|
|
|
gsize size)
|
|
|
|
{
|
|
|
|
return gsk_vulkan_buffer_new_vertex (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)), size);
|
|
|
|
}
|
|
|
|
|
2023-08-30 19:57:41 +00:00
|
|
|
static GskGpuBuffer *
|
|
|
|
gsk_vulkan_frame_create_storage_buffer (GskGpuFrame *frame,
|
|
|
|
gsize size)
|
|
|
|
{
|
|
|
|
return gsk_vulkan_buffer_new_storage (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)), size);
|
|
|
|
}
|
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
static void
|
2023-08-21 00:18:37 +00:00
|
|
|
gsk_vulkan_frame_submit (GskGpuFrame *frame,
|
|
|
|
GskGpuBuffer *vertex_buffer,
|
|
|
|
GskGpuOp *op)
|
2023-08-12 20:10:16 +00:00
|
|
|
{
|
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
|
2023-11-29 22:14:00 +00:00
|
|
|
GskVulkanSemaphores semaphores;
|
2023-11-04 02:57:38 +00:00
|
|
|
GskVulkanCommandState state;
|
2023-08-12 20:10:16 +00:00
|
|
|
|
2023-11-05 09:00:11 +00:00
|
|
|
if (gsk_descriptors_get_size (&self->descriptors) == 0)
|
2023-11-26 08:28:20 +00:00
|
|
|
gsk_descriptors_append (&self->descriptors, gsk_vulkan_real_descriptors_new (self));
|
2023-11-05 09:00:11 +00:00
|
|
|
|
2023-11-19 14:27:10 +00:00
|
|
|
gsk_vulkan_frame_prepare_descriptors (self);
|
2023-11-03 00:59:24 +00:00
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
GSK_VK_CHECK (vkBeginCommandBuffer, self->vk_command_buffer,
|
|
|
|
&(VkCommandBufferBeginInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
|
|
|
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
|
|
});
|
|
|
|
|
2023-08-21 00:18:37 +00:00
|
|
|
if (vertex_buffer)
|
|
|
|
vkCmdBindVertexBuffers (self->vk_command_buffer,
|
|
|
|
0,
|
|
|
|
1,
|
|
|
|
(VkBuffer[1]) {
|
|
|
|
gsk_vulkan_buffer_get_vk_buffer (GSK_VULKAN_BUFFER (vertex_buffer))
|
|
|
|
},
|
|
|
|
(VkDeviceSize[1]) { 0 });
|
|
|
|
|
2023-11-29 22:14:00 +00:00
|
|
|
gsk_semaphores_init (&semaphores.wait_semaphores);
|
|
|
|
gsk_pipeline_stages_init (&semaphores.wait_stages);
|
|
|
|
gsk_semaphores_init (&semaphores.signal_semaphores);
|
|
|
|
|
2023-11-04 02:57:38 +00:00
|
|
|
state.vk_command_buffer = self->vk_command_buffer;
|
|
|
|
state.vk_render_pass = VK_NULL_HANDLE;
|
|
|
|
state.vk_format = VK_FORMAT_UNDEFINED;
|
2023-12-25 04:21:13 +00:00
|
|
|
state.blend = GSK_GPU_BLEND_OVER; /* should we have a BLEND_NONE? */
|
2023-11-05 09:00:11 +00:00
|
|
|
state.desc = GSK_VULKAN_DESCRIPTORS (gsk_descriptors_get (&self->descriptors, 0));
|
2023-11-29 22:14:00 +00:00
|
|
|
state.semaphores = &semaphores;
|
|
|
|
|
2023-11-05 09:00:11 +00:00
|
|
|
gsk_vulkan_descriptors_bind (GSK_VULKAN_DESCRIPTORS (gsk_descriptors_get (&self->descriptors, 0)),
|
|
|
|
NULL,
|
|
|
|
state.vk_command_buffer);
|
2023-11-04 02:57:38 +00:00
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
while (op)
|
|
|
|
{
|
2023-11-04 02:57:38 +00:00
|
|
|
op = gsk_gpu_op_vk_command (op, frame, &state);
|
2023-08-12 20:10:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
GSK_VK_CHECK (vkEndCommandBuffer, self->vk_command_buffer);
|
|
|
|
|
|
|
|
GSK_VK_CHECK (vkQueueSubmit, gsk_vulkan_device_get_vk_queue (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame))),
|
|
|
|
1,
|
|
|
|
&(VkSubmitInfo) {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
|
|
|
|
.commandBufferCount = 1,
|
|
|
|
.pCommandBuffers = &self->vk_command_buffer,
|
2023-11-29 22:14:00 +00:00
|
|
|
.pWaitSemaphores = gsk_semaphores_get_data (&semaphores.wait_semaphores),
|
|
|
|
.pWaitDstStageMask = gsk_pipeline_stages_get_data (&semaphores.wait_stages),
|
|
|
|
.waitSemaphoreCount = gsk_semaphores_get_size (&semaphores.wait_semaphores),
|
|
|
|
.pSignalSemaphores = gsk_semaphores_get_data (&semaphores.signal_semaphores),
|
|
|
|
.signalSemaphoreCount = gsk_semaphores_get_size (&semaphores.signal_semaphores),
|
2023-08-12 20:10:16 +00:00
|
|
|
},
|
|
|
|
self->vk_fence);
|
2023-11-29 22:14:00 +00:00
|
|
|
|
|
|
|
gsk_semaphores_clear (&semaphores.wait_semaphores);
|
|
|
|
gsk_pipeline_stages_clear (&semaphores.wait_stages);
|
|
|
|
gsk_semaphores_clear (&semaphores.signal_semaphores);
|
2023-08-12 20:10:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_vulkan_frame_finalize (GObject *object)
|
|
|
|
{
|
|
|
|
GskVulkanFrame *self = GSK_VULKAN_FRAME (object);
|
|
|
|
GskVulkanDevice *device;
|
|
|
|
VkDevice vk_device;
|
|
|
|
VkCommandPool vk_command_pool;
|
|
|
|
|
|
|
|
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (GSK_GPU_FRAME (self)));
|
|
|
|
vk_device = gsk_vulkan_device_get_vk_device (device);
|
|
|
|
vk_command_pool = gsk_vulkan_device_get_vk_command_pool (device);
|
|
|
|
|
2023-11-03 01:48:06 +00:00
|
|
|
if (self->vk_descriptor_pool != VK_NULL_HANDLE)
|
|
|
|
{
|
|
|
|
vkDestroyDescriptorPool (vk_device,
|
|
|
|
self->vk_descriptor_pool,
|
|
|
|
NULL);
|
|
|
|
}
|
2023-11-04 05:11:27 +00:00
|
|
|
gsk_descriptors_clear (&self->descriptors);
|
2023-08-21 00:18:37 +00:00
|
|
|
|
2023-08-12 20:10:16 +00:00
|
|
|
vkFreeCommandBuffers (vk_device,
|
|
|
|
vk_command_pool,
|
|
|
|
1, &self->vk_command_buffer);
|
|
|
|
vkDestroyFence (vk_device,
|
|
|
|
self->vk_fence,
|
|
|
|
NULL);
|
|
|
|
|
|
|
|
G_OBJECT_CLASS (gsk_vulkan_frame_parent_class)->finalize (object);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_vulkan_frame_class_init (GskVulkanFrameClass *klass)
|
|
|
|
{
|
|
|
|
GskGpuFrameClass *gpu_frame_class = GSK_GPU_FRAME_CLASS (klass);
|
|
|
|
GObjectClass *object_class = G_OBJECT_CLASS (klass);
|
|
|
|
|
|
|
|
gpu_frame_class->is_busy = gsk_vulkan_frame_is_busy;
|
|
|
|
gpu_frame_class->setup = gsk_vulkan_frame_setup;
|
|
|
|
gpu_frame_class->cleanup = gsk_vulkan_frame_cleanup;
|
2023-10-19 11:36:15 +00:00
|
|
|
gpu_frame_class->upload_texture = gsk_vulkan_frame_upload_texture;
|
2023-10-03 19:04:21 +00:00
|
|
|
gpu_frame_class->create_descriptors = gsk_vulkan_frame_create_descriptors;
|
2023-08-21 00:18:37 +00:00
|
|
|
gpu_frame_class->create_vertex_buffer = gsk_vulkan_frame_create_vertex_buffer;
|
2023-08-30 19:57:41 +00:00
|
|
|
gpu_frame_class->create_storage_buffer = gsk_vulkan_frame_create_storage_buffer;
|
2023-08-12 20:10:16 +00:00
|
|
|
gpu_frame_class->submit = gsk_vulkan_frame_submit;
|
|
|
|
|
|
|
|
object_class->finalize = gsk_vulkan_frame_finalize;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gsk_vulkan_frame_init (GskVulkanFrame *self)
|
|
|
|
{
|
2023-11-04 05:11:27 +00:00
|
|
|
gsk_descriptors_init (&self->descriptors);
|
2023-11-03 01:48:06 +00:00
|
|
|
|
|
|
|
self->pool_n_sets = 4;
|
2023-11-04 05:11:27 +00:00
|
|
|
self->pool_n_images = 8;
|
2023-11-03 01:48:06 +00:00
|
|
|
self->pool_n_buffers = 8;
|
2023-08-12 20:10:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
VkFence
|
|
|
|
gsk_vulkan_frame_get_vk_fence (GskVulkanFrame *self)
|
|
|
|
{
|
|
|
|
return self->vk_fence;
|
|
|
|
}
|
2023-10-19 05:43:31 +00:00
|
|
|
|
2023-11-29 22:14:00 +00:00
|
|
|
void
|
|
|
|
gsk_vulkan_semaphores_add_wait (GskVulkanSemaphores *self,
|
|
|
|
VkSemaphore semaphore,
|
|
|
|
VkPipelineStageFlags stage)
|
|
|
|
{
|
|
|
|
gsk_semaphores_append (&self->wait_semaphores, semaphore);
|
|
|
|
gsk_pipeline_stages_append (&self->wait_stages, stage);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
gsk_vulkan_semaphores_add_signal (GskVulkanSemaphores *self,
|
|
|
|
VkSemaphore semaphore)
|
|
|
|
{
|
|
|
|
gsk_semaphores_append (&self->signal_semaphores, semaphore);
|
|
|
|
}
|
|
|
|
|