gpu: Add back single descriptors set usage with descriptor indexing

This commit is contained in:
Benjamin Otte 2023-11-05 10:00:11 +01:00
parent ae2020aca2
commit d50e235753
11 changed files with 562 additions and 344 deletions

View File

@ -44,7 +44,8 @@ gsk_gpu_globals_op_vk_command (GskGpuOp *op,
GskGpuGlobalsOp *self = (GskGpuGlobalsOp *) op;
vkCmdPushConstants (state->vk_command_buffer,
gsk_vulkan_descriptors_get_vk_pipeline_layout (state->desc),
gsk_vulkan_device_get_vk_pipeline_layout (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
gsk_vulkan_descriptors_get_pipeline_layout (state->desc)),
VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT,
0,
sizeof (self->instance),

View File

@ -84,7 +84,7 @@ gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
if (desc == NULL)
{
gsk_vulkan_descriptors_bind (GSK_VULKAN_DESCRIPTORS (shader->desc), state->vk_command_buffer);
gsk_vulkan_descriptors_bind (GSK_VULKAN_DESCRIPTORS (shader->desc), state->desc, state->vk_command_buffer);
state->desc = GSK_VULKAN_DESCRIPTORS (shader->desc);
}
desc = shader->desc;

View File

@ -49,7 +49,7 @@ gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
desc = GSK_VULKAN_DESCRIPTORS (self->desc);
if (desc && state->desc != desc)
{
gsk_vulkan_descriptors_bind (desc, state->vk_command_buffer);
gsk_vulkan_descriptors_bind (desc, state->desc, state->vk_command_buffer);
state->desc = desc;
}
@ -68,7 +68,7 @@ gsk_gpu_shader_op_vk_command_n (GskGpuOp *op,
vkCmdBindPipeline (state->vk_command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
gsk_vulkan_device_get_vk_pipeline (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),
gsk_vulkan_descriptors_get_pipeline_layout (desc),
gsk_vulkan_descriptors_get_pipeline_layout (state->desc),
shader_op_class,
self->clip,
state->vk_format,

View File

@ -6,167 +6,27 @@
#include "gskvulkanframeprivate.h"
#include "gskvulkanimageprivate.h"
#define GDK_ARRAY_NAME gsk_descriptor_image_infos
#define GDK_ARRAY_TYPE_NAME GskDescriptorImageInfos
#define GDK_ARRAY_ELEMENT_TYPE VkDescriptorImageInfo
#define GDK_ARRAY_BY_VALUE 1
#define GDK_ARRAY_PREALLOC 128
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
#define GDK_ARRAY_NAME gsk_descriptor_buffer_infos
#define GDK_ARRAY_TYPE_NAME GskDescriptorBufferInfos
#define GDK_ARRAY_ELEMENT_TYPE VkDescriptorBufferInfo
#define GDK_ARRAY_BY_VALUE 1
#define GDK_ARRAY_PREALLOC 32
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
#define GDK_ARRAY_NAME gsk_samplers
#define GDK_ARRAY_TYPE_NAME GskSamplers
#define GDK_ARRAY_ELEMENT_TYPE VkSampler
#define GDK_ARRAY_PREALLOC 32
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
struct _GskVulkanDescriptors
{
GskGpuDescriptors parent_instance;
GskVulkanDevice *device;
GskVulkanPipelineLayout *pipeline_layout;
GskSamplers immutable_samplers;
GskDescriptorImageInfos descriptor_immutable_images;
GskDescriptorImageInfos descriptor_images;
GskDescriptorBufferInfos descriptor_buffers;
VkDescriptorSet descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS];
};
G_DEFINE_TYPE (GskVulkanDescriptors, gsk_vulkan_descriptors, GSK_TYPE_GPU_DESCRIPTORS)
static gboolean
gsk_vulkan_descriptors_add_image (GskGpuDescriptors *desc,
GskGpuImage *image,
GskGpuSampler sampler,
guint32 *out_descriptor)
{
GskVulkanDescriptors *self = GSK_VULKAN_DESCRIPTORS (desc);
GskVulkanImage *vulkan_image = GSK_VULKAN_IMAGE (image);
VkSampler vk_sampler;
guint32 result;
vk_sampler = gsk_vulkan_image_get_vk_sampler (vulkan_image);
if (vk_sampler)
{
if (gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) >=
gsk_vulkan_device_get_max_immutable_samplers (self->device))
return FALSE;
result = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) << 1 | 1;
gsk_samplers_append (&self->immutable_samplers, vk_sampler);
gsk_descriptor_image_infos_append (&self->descriptor_immutable_images,
&(VkDescriptorImageInfo) {
.imageView = gsk_vulkan_image_get_vk_image_view (vulkan_image),
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
});
}
else
{
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) >=
gsk_vulkan_device_get_max_samplers (self->device))
return FALSE;
result = gsk_descriptor_image_infos_get_size (&self->descriptor_images) << 1;
gsk_descriptor_image_infos_append (&self->descriptor_images,
&(VkDescriptorImageInfo) {
.sampler = gsk_vulkan_device_get_vk_sampler (self->device, sampler),
.imageView = gsk_vulkan_image_get_vk_image_view (vulkan_image),
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
});
}
*out_descriptor = result;
return TRUE;
}
static void
gsk_vulkan_descriptors_finalize (GObject *object)
{
GskVulkanDescriptors *self = GSK_VULKAN_DESCRIPTORS (object);
gsk_samplers_clear (&self->immutable_samplers);
gsk_descriptor_image_infos_clear (&self->descriptor_immutable_images);
gsk_descriptor_image_infos_clear (&self->descriptor_images);
gsk_descriptor_buffer_infos_clear (&self->descriptor_buffers);
gsk_vulkan_device_release_pipeline_layout (self->device, self->pipeline_layout);
g_object_unref (self->device);
G_OBJECT_CLASS (gsk_vulkan_descriptors_parent_class)->finalize (object);
}
static void
gsk_vulkan_descriptors_class_init (GskVulkanDescriptorsClass *klass)
{
GskGpuDescriptorsClass *descriptors_class = GSK_GPU_DESCRIPTORS_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gsk_vulkan_descriptors_finalize;
descriptors_class->add_image = gsk_vulkan_descriptors_add_image;
}
static void
gsk_vulkan_descriptors_init (GskVulkanDescriptors *self)
{
gsk_samplers_init (&self->immutable_samplers);
gsk_descriptor_image_infos_init (&self->descriptor_immutable_images);
gsk_descriptor_image_infos_init (&self->descriptor_images);
gsk_descriptor_buffer_infos_init (&self->descriptor_buffers);
}
GskVulkanDescriptors *
gsk_vulkan_descriptors_new (GskVulkanDevice *device)
{
GskVulkanDescriptors *self;
self = g_object_new (GSK_TYPE_VULKAN_DESCRIPTORS, NULL);
self->device = g_object_ref (device);
return self;
}
gboolean
gsk_vulkan_descriptors_is_full (GskVulkanDescriptors *self)
{
return gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) >= gsk_vulkan_device_get_max_immutable_samplers (self->device) ||
gsk_descriptor_image_infos_get_size (&self->descriptor_images) >= gsk_vulkan_device_get_max_samplers (self->device) ||
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) >= gsk_vulkan_device_get_max_buffers (self->device);
}
GskVulkanPipelineLayout *
gsk_vulkan_descriptors_get_pipeline_layout (GskVulkanDescriptors *self)
{
return self->pipeline_layout;
}
VkPipelineLayout
gsk_vulkan_descriptors_get_vk_pipeline_layout (GskVulkanDescriptors *self)
{
return gsk_vulkan_device_get_vk_pipeline_layout (self->device, self->pipeline_layout);
return GSK_VULKAN_DESCRIPTORS_GET_CLASS (self)->get_pipeline_layout (self);
}
void
gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
VkCommandBuffer command_buffer)
VkCommandBuffer vk_command_buffer)
{
GskGpuDescriptors *desc = GSK_GPU_DESCRIPTORS (self);
gsize i;
@ -174,179 +34,17 @@ gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
for (i = 0; i < gsk_gpu_descriptors_get_size (desc); i++)
{
gsk_vulkan_image_transition (GSK_VULKAN_IMAGE (gsk_gpu_descriptors_get_image (desc, i)),
command_buffer,
vk_command_buffer,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_ACCESS_SHADER_READ_BIT);
}
}
static void
gsk_vulkan_descriptors_fill_sets (GskVulkanDescriptors *self)
{
gsize n_immutable_samplers, n_samplers, n_buffers;
if (gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING))
return;
/* If descriptor indexing isn't supported, all descriptors in the shaders
* must be properly setup. And that means we need to have
* descriptors for all of them.
*/
gsk_vulkan_device_get_pipeline_sizes (self->device,
self->pipeline_layout,
&n_immutable_samplers,
&n_samplers,
&n_buffers);
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) == 0)
{
guint32 ignored;
/* We have no image, find any random image and attach it */
if (!gsk_gpu_descriptors_add_image (GSK_GPU_DESCRIPTORS (self),
gsk_gpu_device_get_atlas_image (GSK_GPU_DEVICE (self->device)),
GSK_GPU_SAMPLER_DEFAULT,
&ignored))
{
g_assert_not_reached ();
}
}
while (n_immutable_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_immutable_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
while (n_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
/* That should be the storage buffer */
g_assert (gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) > 0);
while (n_buffers > gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers))
{
gsk_descriptor_buffer_infos_append (&self->descriptor_buffers, gsk_descriptor_buffer_infos_get (&self->descriptor_buffers, 0));
}
}
void
gsk_vulkan_descriptors_prepare (GskVulkanDescriptors *self,
gsize *n_images,
gsize *n_buffers)
{
self->pipeline_layout = gsk_vulkan_device_acquire_pipeline_layout (self->device,
gsk_samplers_get_data (&self->immutable_samplers),
gsk_samplers_get_size (&self->immutable_samplers),
gsk_descriptor_image_infos_get_size (&self->descriptor_images),
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers));
gsk_vulkan_descriptors_fill_sets (self);
*n_images = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) +
gsk_descriptor_image_infos_get_size (&self->descriptor_images);
*n_buffers = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
}
void
gsk_vulkan_descriptors_update_sets (GskVulkanDescriptors *self,
VkDescriptorPool vk_descriptor_pool)
{
VkWriteDescriptorSet write_descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS + 1];
gsize n_descriptor_sets;
VkDevice vk_device;
gboolean descriptor_indexing;
descriptor_indexing = gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING);
vk_device = gsk_vulkan_device_get_vk_device (self->device);
GSK_VK_CHECK (vkAllocateDescriptorSets, vk_device,
&(VkDescriptorSetAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
.descriptorPool = vk_descriptor_pool,
.descriptorSetCount = GSK_VULKAN_N_DESCRIPTOR_SETS,
.pSetLayouts = (VkDescriptorSetLayout[GSK_VULKAN_N_DESCRIPTOR_SETS]) {
gsk_vulkan_device_get_vk_image_set_layout (self->device, self->pipeline_layout),
gsk_vulkan_device_get_vk_buffer_set_layout (self->device, self->pipeline_layout),
},
.pNext = !descriptor_indexing ? NULL : &(VkDescriptorSetVariableDescriptorCountAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
.descriptorSetCount = GSK_VULKAN_N_DESCRIPTOR_SETS,
.pDescriptorCounts = (uint32_t[GSK_VULKAN_N_DESCRIPTOR_SETS]) {
gsk_descriptor_image_infos_get_size (&self->descriptor_images),
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers)
}
}
},
self->descriptor_sets);
n_descriptor_sets = 0;
if (gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_IMAGE_SET_LAYOUT],
.dstBinding = 0,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images),
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.pImageInfo = gsk_descriptor_image_infos_get_data (&self->descriptor_immutable_images)
};
}
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_IMAGE_SET_LAYOUT],
.dstBinding = 1,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_image_infos_get_size (&self->descriptor_images),
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.pImageInfo = gsk_descriptor_image_infos_get_data (&self->descriptor_images)
};
}
if (gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_BUFFER_SET_LAYOUT],
.dstBinding = 0,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers),
.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
.pBufferInfo = gsk_descriptor_buffer_infos_get_data (&self->descriptor_buffers)
};
}
vkUpdateDescriptorSets (vk_device,
n_descriptor_sets,
write_descriptor_sets,
0, NULL);
}
guint32
gsk_vulkan_descriptors_get_buffer_descriptor (GskVulkanDescriptors *self,
GskGpuBuffer *buffer)
{
guint32 result;
result = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
gsk_descriptor_buffer_infos_append (&self->descriptor_buffers,
&(VkDescriptorBufferInfo) {
.buffer = gsk_vulkan_buffer_get_vk_buffer (GSK_VULKAN_BUFFER (buffer)),
.offset = 0,
.range = VK_WHOLE_SIZE
});
return result;
}
void
gsk_vulkan_descriptors_bind (GskVulkanDescriptors *self,
GskVulkanDescriptors *previous,
VkCommandBuffer vk_command_buffer)
{
vkCmdBindDescriptorSets (vk_command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
gsk_vulkan_device_get_vk_pipeline_layout (self->device, self->pipeline_layout),
0,
G_N_ELEMENTS (self->descriptor_sets),
self->descriptor_sets,
0,
NULL);
return GSK_VULKAN_DESCRIPTORS_GET_CLASS (self)->bind (self, previous, vk_command_buffer);
}

View File

@ -2,34 +2,45 @@
#include "gskgpudescriptorsprivate.h"
#include "gskvulkanframeprivate.h"
#include "gskvulkandeviceprivate.h"
G_BEGIN_DECLS
#define GSK_TYPE_VULKAN_DESCRIPTORS (gsk_vulkan_descriptors_get_type ())
#define GSK_TYPE_VULKAN_DESCRIPTORS (gsk_vulkan_descriptors_get_type ())
#define GSK_VULKAN_DESCRIPTORS(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GSK_TYPE_VULKAN_DESCRIPTORS, GskVulkanDescriptors))
#define GSK_VULKAN_DESCRIPTORS_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), GSK_TYPE_VULKAN_DESCRIPTORS, GskVulkanDescriptorsClass))
#define GSK_IS_VULKAN_DESCRIPTORS(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GSK_TYPE_VULKAN_DESCRIPTORS))
#define GSK_IS_VULKAN_DESCRIPTORS_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), GSK_TYPE_VULKAN_DESCRIPTORS))
#define GSK_VULKAN_DESCRIPTORS_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GSK_TYPE_VULKAN_DESCRIPTORS, GskVulkanDescriptorsClass))
G_DECLARE_FINAL_TYPE (GskVulkanDescriptors, gsk_vulkan_descriptors, GSK, VULKAN_DESCRIPTORS, GskGpuDescriptors)
typedef struct _GskVulkanDescriptorsClass GskVulkanDescriptorsClass;
GskVulkanDescriptors * gsk_vulkan_descriptors_new (GskVulkanDevice *device);
struct _GskVulkanDescriptors
{
GskGpuDescriptors parent_instance;
};
gboolean gsk_vulkan_descriptors_is_full (GskVulkanDescriptors *self);
GskVulkanPipelineLayout * gsk_vulkan_descriptors_get_pipeline_layout
(GskVulkanDescriptors *self);
VkPipelineLayout gsk_vulkan_descriptors_get_vk_pipeline_layout
(GskVulkanDescriptors *self);
guint32 gsk_vulkan_descriptors_get_buffer_descriptor
(GskVulkanDescriptors *self,
GskGpuBuffer *buffer);
struct _GskVulkanDescriptorsClass
{
GskGpuDescriptorsClass parent_class;
void gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
VkCommandBuffer command_buffer);
void gsk_vulkan_descriptors_prepare (GskVulkanDescriptors *self,
gsize *n_images,
gsize *n_buffers);
void gsk_vulkan_descriptors_update_sets (GskVulkanDescriptors *self,
VkDescriptorPool vk_descriptor_pool);
void gsk_vulkan_descriptors_bind (GskVulkanDescriptors *self,
VkCommandBuffer vk_command_buffer);
GskVulkanPipelineLayout * (* get_pipeline_layout) (GskVulkanDescriptors *self);
void (* bind) (GskVulkanDescriptors *self,
GskVulkanDescriptors *previous,
VkCommandBuffer vk_command_buffer);
};
GType gsk_vulkan_descriptors_get_type (void) G_GNUC_CONST;
GskVulkanPipelineLayout * gsk_vulkan_descriptors_get_pipeline_layout (GskVulkanDescriptors *self);
void gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
VkCommandBuffer vk_command_buffer);
void gsk_vulkan_descriptors_bind (GskVulkanDescriptors *self,
GskVulkanDescriptors *previous,
VkCommandBuffer vk_command_buffer);
G_DEFINE_AUTOPTR_CLEANUP_FUNC(GskVulkanDescriptors, g_object_unref)
G_END_DECLS

View File

@ -7,13 +7,15 @@
#include "gskvulkandescriptorsprivate.h"
#include "gskvulkandeviceprivate.h"
#include "gskvulkanimageprivate.h"
#include "gskvulkanrealdescriptorsprivate.h"
#include "gskvulkansubdescriptorsprivate.h"
#include "gdk/gdkdisplayprivate.h"
#include "gdk/gdkdmabuftextureprivate.h"
#define GDK_ARRAY_NAME gsk_descriptors
#define GDK_ARRAY_TYPE_NAME GskDescriptors
#define GDK_ARRAY_ELEMENT_TYPE GskVulkanDescriptors *
#define GDK_ARRAY_ELEMENT_TYPE GskVulkanRealDescriptors *
#define GDK_ARRAY_FREE_FUNC g_object_unref
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
@ -151,14 +153,14 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
for (i = 0; i < gsk_descriptors_get_size (&self->descriptors); i++)
{
gsize n_desc_images, n_desc_buffers;
GskVulkanDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
GskVulkanRealDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
if (storage_buffer)
{
G_GNUC_UNUSED guint32 descriptor;
descriptor = gsk_vulkan_descriptors_get_buffer_descriptor (desc, storage_buffer);
descriptor = gsk_vulkan_real_descriptors_get_buffer_descriptor (desc, storage_buffer);
g_assert (descriptor == 0);
}
gsk_vulkan_descriptors_prepare (desc, &n_desc_images, &n_desc_buffers);
gsk_vulkan_real_descriptors_prepare (desc, &n_desc_images, &n_desc_buffers);
n_images += n_desc_images;
n_buffers += n_desc_buffers;
}
@ -207,9 +209,9 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
for (i = 0; i < gsk_descriptors_get_size (&self->descriptors); i++)
{
GskVulkanDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
GskVulkanRealDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
gsk_vulkan_descriptors_update_sets (desc, self->vk_descriptor_pool);
gsk_vulkan_real_descriptors_update_sets (desc, self->vk_descriptor_pool);
}
}
@ -217,12 +219,37 @@ static GskGpuDescriptors *
gsk_vulkan_frame_create_descriptors (GskGpuFrame *frame)
{
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
GskVulkanDescriptors *desc;
desc = gsk_vulkan_descriptors_new (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
gsk_descriptors_append (&self->descriptors, desc);
if (gsk_vulkan_device_has_feature (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)), GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING))
{
GskVulkanRealDescriptors *parent;
return GSK_GPU_DESCRIPTORS (g_object_ref (desc));
if (gsk_descriptors_get_size (&self->descriptors) > 0)
{
parent = gsk_descriptors_get (&self->descriptors, gsk_descriptors_get_size (&self->descriptors) - 1);
if (gsk_vulkan_real_descriptors_is_full (parent))
parent = NULL;
}
else
parent = NULL;
if (parent == NULL)
{
parent = gsk_vulkan_real_descriptors_new (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
gsk_descriptors_append (&self->descriptors, parent);
}
return GSK_GPU_DESCRIPTORS (gsk_vulkan_sub_descriptors_new (GSK_VULKAN_DESCRIPTORS (parent)));
}
else
{
GskVulkanRealDescriptors *desc;
desc = gsk_vulkan_real_descriptors_new (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)));
gsk_descriptors_append (&self->descriptors, desc);
return GSK_GPU_DESCRIPTORS (g_object_ref (desc));
}
}
static GskGpuBuffer *
@ -248,6 +275,9 @@ gsk_vulkan_frame_submit (GskGpuFrame *frame,
GskVulkanFrame *self = GSK_VULKAN_FRAME (frame);
GskVulkanCommandState state;
if (gsk_descriptors_get_size (&self->descriptors) == 0)
gsk_descriptors_append (&self->descriptors, gsk_vulkan_real_descriptors_new (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame))));
gsk_vulkan_frame_prepare_descriptors (self, storage_buffer);
GSK_VK_CHECK (vkBeginCommandBuffer, self->vk_command_buffer,
@ -268,8 +298,10 @@ gsk_vulkan_frame_submit (GskGpuFrame *frame,
state.vk_command_buffer = self->vk_command_buffer;
state.vk_render_pass = VK_NULL_HANDLE;
state.vk_format = VK_FORMAT_UNDEFINED;
state.desc = gsk_descriptors_get (&self->descriptors, 0);
gsk_vulkan_descriptors_bind (gsk_descriptors_get (&self->descriptors, 0), state.vk_command_buffer);
state.desc = GSK_VULKAN_DESCRIPTORS (gsk_descriptors_get (&self->descriptors, 0));
gsk_vulkan_descriptors_bind (GSK_VULKAN_DESCRIPTORS (gsk_descriptors_get (&self->descriptors, 0)),
NULL,
state.vk_command_buffer);
while (op)
{

View File

@ -0,0 +1,342 @@
#include "config.h"
#include "gskvulkanrealdescriptorsprivate.h"
#include "gskvulkanbufferprivate.h"
#include "gskvulkanframeprivate.h"
#include "gskvulkanimageprivate.h"
#define GDK_ARRAY_NAME gsk_descriptor_image_infos
#define GDK_ARRAY_TYPE_NAME GskDescriptorImageInfos
#define GDK_ARRAY_ELEMENT_TYPE VkDescriptorImageInfo
#define GDK_ARRAY_BY_VALUE 1
#define GDK_ARRAY_PREALLOC 128
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
#define GDK_ARRAY_NAME gsk_descriptor_buffer_infos
#define GDK_ARRAY_TYPE_NAME GskDescriptorBufferInfos
#define GDK_ARRAY_ELEMENT_TYPE VkDescriptorBufferInfo
#define GDK_ARRAY_BY_VALUE 1
#define GDK_ARRAY_PREALLOC 32
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
#define GDK_ARRAY_NAME gsk_samplers
#define GDK_ARRAY_TYPE_NAME GskSamplers
#define GDK_ARRAY_ELEMENT_TYPE VkSampler
#define GDK_ARRAY_PREALLOC 32
#define GDK_ARRAY_NO_MEMSET 1
#include "gdk/gdkarrayimpl.c"
struct _GskVulkanRealDescriptors
{
GskVulkanDescriptors parent_instance;
GskVulkanDevice *device;
GskVulkanPipelineLayout *pipeline_layout;
GskSamplers immutable_samplers;
GskDescriptorImageInfos descriptor_immutable_images;
GskDescriptorImageInfos descriptor_images;
GskDescriptorBufferInfos descriptor_buffers;
VkDescriptorSet descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS];
};
G_DEFINE_TYPE (GskVulkanRealDescriptors, gsk_vulkan_real_descriptors, GSK_TYPE_VULKAN_DESCRIPTORS)
static GskVulkanPipelineLayout *
gsk_vulkan_real_descriptors_get_pipeline_layout (GskVulkanDescriptors *desc)
{
GskVulkanRealDescriptors *self = GSK_VULKAN_REAL_DESCRIPTORS (desc);
return self->pipeline_layout;
}
static void
gsk_vulkan_real_descriptors_bind (GskVulkanDescriptors *desc,
GskVulkanDescriptors *previous,
VkCommandBuffer vk_command_buffer)
{
GskVulkanRealDescriptors *self = GSK_VULKAN_REAL_DESCRIPTORS (desc);
if (desc == previous)
return;
vkCmdBindDescriptorSets (vk_command_buffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
gsk_vulkan_device_get_vk_pipeline_layout (self->device, self->pipeline_layout),
0,
G_N_ELEMENTS (self->descriptor_sets),
self->descriptor_sets,
0,
NULL);
}
static gboolean
gsk_vulkan_real_descriptors_add_image (GskGpuDescriptors *desc,
GskGpuImage *image,
GskGpuSampler sampler,
guint32 *out_descriptor)
{
GskVulkanRealDescriptors *self = GSK_VULKAN_REAL_DESCRIPTORS (desc);
GskVulkanImage *vulkan_image = GSK_VULKAN_IMAGE (image);
VkSampler vk_sampler;
guint32 result;
vk_sampler = gsk_vulkan_image_get_vk_sampler (vulkan_image);
if (vk_sampler)
{
if (gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) >=
gsk_vulkan_device_get_max_immutable_samplers (self->device))
return FALSE;
result = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) << 1 | 1;
gsk_samplers_append (&self->immutable_samplers, vk_sampler);
gsk_descriptor_image_infos_append (&self->descriptor_immutable_images,
&(VkDescriptorImageInfo) {
.imageView = gsk_vulkan_image_get_vk_image_view (vulkan_image),
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
});
}
else
{
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) >=
gsk_vulkan_device_get_max_samplers (self->device))
return FALSE;
result = gsk_descriptor_image_infos_get_size (&self->descriptor_images) << 1;
gsk_descriptor_image_infos_append (&self->descriptor_images,
&(VkDescriptorImageInfo) {
.sampler = gsk_vulkan_device_get_vk_sampler (self->device, sampler),
.imageView = gsk_vulkan_image_get_vk_image_view (vulkan_image),
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
});
}
*out_descriptor = result;
return TRUE;
}
static void
gsk_vulkan_real_descriptors_finalize (GObject *object)
{
GskVulkanRealDescriptors *self = GSK_VULKAN_REAL_DESCRIPTORS (object);
gsk_samplers_clear (&self->immutable_samplers);
gsk_descriptor_image_infos_clear (&self->descriptor_immutable_images);
gsk_descriptor_image_infos_clear (&self->descriptor_images);
gsk_descriptor_buffer_infos_clear (&self->descriptor_buffers);
gsk_vulkan_device_release_pipeline_layout (self->device, self->pipeline_layout);
g_object_unref (self->device);
G_OBJECT_CLASS (gsk_vulkan_real_descriptors_parent_class)->finalize (object);
}
static void
gsk_vulkan_real_descriptors_class_init (GskVulkanRealDescriptorsClass *klass)
{
GskVulkanDescriptorsClass *vulkan_descriptors_class = GSK_VULKAN_DESCRIPTORS_CLASS (klass);
GskGpuDescriptorsClass *descriptors_class = GSK_GPU_DESCRIPTORS_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gsk_vulkan_real_descriptors_finalize;
descriptors_class->add_image = gsk_vulkan_real_descriptors_add_image;
vulkan_descriptors_class->get_pipeline_layout = gsk_vulkan_real_descriptors_get_pipeline_layout;
vulkan_descriptors_class->bind = gsk_vulkan_real_descriptors_bind;
}
static void
gsk_vulkan_real_descriptors_init (GskVulkanRealDescriptors *self)
{
gsk_samplers_init (&self->immutable_samplers);
gsk_descriptor_image_infos_init (&self->descriptor_immutable_images);
gsk_descriptor_image_infos_init (&self->descriptor_images);
gsk_descriptor_buffer_infos_init (&self->descriptor_buffers);
}
GskVulkanRealDescriptors *
gsk_vulkan_real_descriptors_new (GskVulkanDevice *device)
{
GskVulkanRealDescriptors *self;
self = g_object_new (GSK_TYPE_VULKAN_REAL_DESCRIPTORS, NULL);
self->device = g_object_ref (device);
return self;
}
gboolean
gsk_vulkan_real_descriptors_is_full (GskVulkanRealDescriptors *self)
{
return gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) >= gsk_vulkan_device_get_max_immutable_samplers (self->device) ||
gsk_descriptor_image_infos_get_size (&self->descriptor_images) >= gsk_vulkan_device_get_max_samplers (self->device) ||
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) >= gsk_vulkan_device_get_max_buffers (self->device);
}
static void
gsk_vulkan_real_descriptors_fill_sets (GskVulkanRealDescriptors *self)
{
gsize n_immutable_samplers, n_samplers, n_buffers;
if (gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING))
return;
/* If descriptor indexing isn't supported, all descriptors in the shaders
* must be properly setup. And that means we need to have
* descriptors for all of them.
*/
gsk_vulkan_device_get_pipeline_sizes (self->device,
self->pipeline_layout,
&n_immutable_samplers,
&n_samplers,
&n_buffers);
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) == 0)
{
guint32 ignored;
/* We have no image, find any random image and attach it */
if (!gsk_gpu_descriptors_add_image (GSK_GPU_DESCRIPTORS (self),
gsk_gpu_device_get_atlas_image (GSK_GPU_DEVICE (self->device)),
GSK_GPU_SAMPLER_DEFAULT,
&ignored))
{
g_assert_not_reached ();
}
}
while (n_immutable_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_immutable_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
while (n_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
/* That should be the storage buffer */
g_assert (gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) > 0);
while (n_buffers > gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers))
{
gsk_descriptor_buffer_infos_append (&self->descriptor_buffers, gsk_descriptor_buffer_infos_get (&self->descriptor_buffers, 0));
}
}
void
gsk_vulkan_real_descriptors_prepare (GskVulkanRealDescriptors *self,
gsize *n_images,
gsize *n_buffers)
{
self->pipeline_layout = gsk_vulkan_device_acquire_pipeline_layout (self->device,
gsk_samplers_get_data (&self->immutable_samplers),
gsk_samplers_get_size (&self->immutable_samplers),
gsk_descriptor_image_infos_get_size (&self->descriptor_images),
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers));
gsk_vulkan_real_descriptors_fill_sets (self);
*n_images = MAX (1, gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images)) +
gsk_descriptor_image_infos_get_size (&self->descriptor_images);
*n_buffers = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
}
void
gsk_vulkan_real_descriptors_update_sets (GskVulkanRealDescriptors *self,
VkDescriptorPool vk_descriptor_pool)
{
VkWriteDescriptorSet write_descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS + 1];
gsize n_descriptor_sets;
VkDevice vk_device;
gboolean descriptor_indexing;
descriptor_indexing = gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING);
vk_device = gsk_vulkan_device_get_vk_device (self->device);
GSK_VK_CHECK (vkAllocateDescriptorSets, vk_device,
&(VkDescriptorSetAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
.descriptorPool = vk_descriptor_pool,
.descriptorSetCount = GSK_VULKAN_N_DESCRIPTOR_SETS,
.pSetLayouts = (VkDescriptorSetLayout[GSK_VULKAN_N_DESCRIPTOR_SETS]) {
gsk_vulkan_device_get_vk_image_set_layout (self->device, self->pipeline_layout),
gsk_vulkan_device_get_vk_buffer_set_layout (self->device, self->pipeline_layout),
},
.pNext = !descriptor_indexing ? NULL : &(VkDescriptorSetVariableDescriptorCountAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
.descriptorSetCount = GSK_VULKAN_N_DESCRIPTOR_SETS,
.pDescriptorCounts = (uint32_t[GSK_VULKAN_N_DESCRIPTOR_SETS]) {
gsk_descriptor_image_infos_get_size (&self->descriptor_images),
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers)
}
}
},
self->descriptor_sets);
n_descriptor_sets = 0;
if (gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_IMAGE_SET_LAYOUT],
.dstBinding = 0,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images),
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.pImageInfo = gsk_descriptor_image_infos_get_data (&self->descriptor_immutable_images)
};
}
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_IMAGE_SET_LAYOUT],
.dstBinding = 1,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_image_infos_get_size (&self->descriptor_images),
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.pImageInfo = gsk_descriptor_image_infos_get_data (&self->descriptor_images)
};
}
if (gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) > 0)
{
write_descriptor_sets[n_descriptor_sets++] = (VkWriteDescriptorSet) {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.dstSet = self->descriptor_sets[GSK_VULKAN_BUFFER_SET_LAYOUT],
.dstBinding = 0,
.dstArrayElement = 0,
.descriptorCount = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers),
.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
.pBufferInfo = gsk_descriptor_buffer_infos_get_data (&self->descriptor_buffers)
};
}
vkUpdateDescriptorSets (vk_device,
n_descriptor_sets,
write_descriptor_sets,
0, NULL);
}
guint32
gsk_vulkan_real_descriptors_get_buffer_descriptor (GskVulkanRealDescriptors *self,
GskGpuBuffer *buffer)
{
guint32 result;
result = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
gsk_descriptor_buffer_infos_append (&self->descriptor_buffers,
&(VkDescriptorBufferInfo) {
.buffer = gsk_vulkan_buffer_get_vk_buffer (GSK_VULKAN_BUFFER (buffer)),
.offset = 0,
.range = VK_WHOLE_SIZE
});
return result;
}

View File

@ -0,0 +1,25 @@
#pragma once
#include "gskvulkandescriptorsprivate.h"
G_BEGIN_DECLS
#define GSK_TYPE_VULKAN_REAL_DESCRIPTORS (gsk_vulkan_real_descriptors_get_type ())
G_DECLARE_FINAL_TYPE (GskVulkanRealDescriptors, gsk_vulkan_real_descriptors, GSK, VULKAN_REAL_DESCRIPTORS, GskVulkanDescriptors)
GskVulkanRealDescriptors * gsk_vulkan_real_descriptors_new (GskVulkanDevice *device);
gboolean gsk_vulkan_real_descriptors_is_full (GskVulkanRealDescriptors *self);
guint32 gsk_vulkan_real_descriptors_get_buffer_descriptor
(GskVulkanRealDescriptors *self,
GskGpuBuffer *buffer);
void gsk_vulkan_real_descriptors_prepare (GskVulkanRealDescriptors *self,
gsize *n_images,
gsize *n_buffers);
void gsk_vulkan_real_descriptors_update_sets (GskVulkanRealDescriptors *self,
VkDescriptorPool vk_descriptor_pool);
G_END_DECLS

View File

@ -0,0 +1,93 @@
#include "config.h"
#include "gskvulkansubdescriptorsprivate.h"
struct _GskVulkanSubDescriptors
{
GskVulkanDescriptors parent_instance;
GskVulkanDescriptors *parent;
};
G_DEFINE_TYPE (GskVulkanSubDescriptors, gsk_vulkan_sub_descriptors, GSK_TYPE_VULKAN_DESCRIPTORS)
static GskVulkanPipelineLayout *
gsk_vulkan_sub_descriptors_get_pipeline_layout (GskVulkanDescriptors *desc)
{
GskVulkanSubDescriptors *self = GSK_VULKAN_SUB_DESCRIPTORS (desc);
return gsk_vulkan_descriptors_get_pipeline_layout (self->parent);
}
static void
gsk_vulkan_sub_descriptors_bind (GskVulkanDescriptors *desc,
GskVulkanDescriptors *previous,
VkCommandBuffer vk_command_buffer)
{
GskVulkanSubDescriptors *self = GSK_VULKAN_SUB_DESCRIPTORS (desc);
if (GSK_IS_VULKAN_SUB_DESCRIPTORS (previous))
previous = GSK_VULKAN_SUB_DESCRIPTORS (previous)->parent;
if (self->parent == previous)
return;
gsk_vulkan_descriptors_bind (self->parent, previous, vk_command_buffer);
}
static gboolean
gsk_vulkan_sub_descriptors_add_image (GskGpuDescriptors *desc,
GskGpuImage *image,
GskGpuSampler sampler,
guint32 *out_descriptor)
{
GskVulkanSubDescriptors *self = GSK_VULKAN_SUB_DESCRIPTORS (desc);
return gsk_gpu_descriptors_add_image (GSK_GPU_DESCRIPTORS (self->parent),
image,
sampler,
out_descriptor);
}
static void
gsk_vulkan_sub_descriptors_finalize (GObject *object)
{
GskVulkanSubDescriptors *self = GSK_VULKAN_SUB_DESCRIPTORS (object);
g_object_unref (self->parent);
G_OBJECT_CLASS (gsk_vulkan_sub_descriptors_parent_class)->finalize (object);
}
static void
gsk_vulkan_sub_descriptors_class_init (GskVulkanSubDescriptorsClass *klass)
{
GskVulkanDescriptorsClass *vulkan_descriptors_class = GSK_VULKAN_DESCRIPTORS_CLASS (klass);
GskGpuDescriptorsClass *descriptors_class = GSK_GPU_DESCRIPTORS_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->finalize = gsk_vulkan_sub_descriptors_finalize;
descriptors_class->add_image = gsk_vulkan_sub_descriptors_add_image;
vulkan_descriptors_class->get_pipeline_layout = gsk_vulkan_sub_descriptors_get_pipeline_layout;
vulkan_descriptors_class->bind = gsk_vulkan_sub_descriptors_bind;
}
static void
gsk_vulkan_sub_descriptors_init (GskVulkanSubDescriptors *self)
{
}
GskVulkanSubDescriptors *
gsk_vulkan_sub_descriptors_new (GskVulkanDescriptors *parent)
{
GskVulkanSubDescriptors *self;
self = g_object_new (GSK_TYPE_VULKAN_SUB_DESCRIPTORS, NULL);
self->parent = g_object_ref (parent);
return self;
}

View File

@ -0,0 +1,14 @@
#pragma once
#include "gskvulkandescriptorsprivate.h"
G_BEGIN_DECLS
#define GSK_TYPE_VULKAN_SUB_DESCRIPTORS (gsk_vulkan_sub_descriptors_get_type ())
G_DECLARE_FINAL_TYPE (GskVulkanSubDescriptors, gsk_vulkan_sub_descriptors, GSK, VULKAN_SUB_DESCRIPTORS, GskVulkanDescriptors)
GskVulkanSubDescriptors * gsk_vulkan_sub_descriptors_new (GskVulkanDescriptors *parent);
G_END_DECLS

View File

@ -160,7 +160,9 @@ if have_vulkan
'gpu/gskvulkanframe.c',
'gpu/gskvulkanimage.c',
'gpu/gskvulkanmemory.c',
'gpu/gskvulkanrealdescriptors.c',
'gpu/gskvulkanrenderer.c',
'gpu/gskvulkansubdescriptors.c',
])
endif # have_vulkan