gpu: Make descriptor-indexing optional

Do extra work if it's not available.
This commit is contained in:
Benjamin Otte 2023-11-04 19:38:21 +01:00
parent 450524f6cf
commit ae2020aca2
8 changed files with 157 additions and 49 deletions

View File

@ -369,7 +369,40 @@ gsk_gpu_cached_atlas_allocate (GskGpuCachedAtlas *atlas,
return TRUE;
}
static void
gsk_gpu_device_ensure_atlas (GskGpuDevice *self,
gboolean recreate,
gint64 timestamp)
{
GskGpuDevicePrivate *priv = gsk_gpu_device_get_instance_private (self);
if (priv->current_atlas && !recreate)
return;
priv->current_atlas = g_new (GskGpuCachedAtlas, 1);
*priv->current_atlas = (GskGpuCachedAtlas) {
.entry = {
.type = GSK_GPU_CACHE_ATLAS,
.last_use_timestamp = timestamp,
},
.image = GSK_GPU_DEVICE_GET_CLASS (self)->create_atlas_image (self, ATLAS_SIZE, ATLAS_SIZE),
.n_slices = 0,
};
gsk_gpu_cache_entries_append (&priv->cache, (GskGpuCacheEntry *) priv->current_atlas);
}
GskGpuImage *
gsk_gpu_device_get_atlas_image (GskGpuDevice *self)
{
GskGpuDevicePrivate *priv = gsk_gpu_device_get_instance_private (self);
gsk_gpu_device_ensure_atlas (self, FALSE, g_get_monotonic_time ());
return priv->current_atlas->image;
}
static GskGpuImage *
gsk_gpu_device_add_atlas_image (GskGpuDevice *self,
gint64 timestamp,
@ -383,24 +416,15 @@ gsk_gpu_device_add_atlas_image (GskGpuDevice *self,
if (width > MAX_ATLAS_ITEM_SIZE || height > MAX_ATLAS_ITEM_SIZE)
return NULL;
if (priv->current_atlas &&
gsk_gpu_cached_atlas_allocate (priv->current_atlas, width, height, out_x, out_y))
gsk_gpu_device_ensure_atlas (self, FALSE, timestamp);
if (gsk_gpu_cached_atlas_allocate (priv->current_atlas, width, height, out_x, out_y))
{
priv->current_atlas->entry.last_use_timestamp = timestamp;
return priv->current_atlas->image;
}
priv->current_atlas = g_new (GskGpuCachedAtlas, 1);
*priv->current_atlas = (GskGpuCachedAtlas) {
.entry = {
.type = GSK_GPU_CACHE_ATLAS,
.last_use_timestamp = timestamp,
},
.image = GSK_GPU_DEVICE_GET_CLASS (self)->create_atlas_image (self, ATLAS_SIZE, ATLAS_SIZE),
.n_slices = 0,
};
gsk_gpu_cache_entries_append (&priv->cache, (GskGpuCacheEntry *) priv->current_atlas);
gsk_gpu_device_ensure_atlas (self, TRUE, timestamp);
if (gsk_gpu_cached_atlas_allocate (priv->current_atlas, width, height, out_x, out_y))
{

View File

@ -49,6 +49,7 @@ void gsk_gpu_device_gc (GskGpuD
GdkDisplay * gsk_gpu_device_get_display (GskGpuDevice *self);
gsize gsk_gpu_device_get_max_image_size (GskGpuDevice *self);
GskGpuImage * gsk_gpu_device_get_atlas_image (GskGpuDevice *self);
GskGpuImage * gsk_gpu_device_create_offscreen_image (GskGpuDevice *self,
gboolean with_mipmap,

View File

@ -63,8 +63,8 @@ gsk_gpu_render_pass_type_to_vk_image_layout (GskRenderPassType type)
}
static void
gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
VkCommandBuffer command_buffer)
gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
GskVulkanCommandState *state)
{
GskGpuShaderOp *shader;
GskGpuOp *op;
@ -82,9 +82,17 @@ gsk_gpu_render_pass_op_do_barriers (GskGpuRenderPassOp *self,
if (shader->desc == NULL || shader->desc == desc)
continue;
if (desc == NULL)
{
gsk_vulkan_descriptors_bind (GSK_VULKAN_DESCRIPTORS (shader->desc), state->vk_command_buffer);
state->desc = GSK_VULKAN_DESCRIPTORS (shader->desc);
}
desc = shader->desc;
gsk_vulkan_descriptors_transition (GSK_VULKAN_DESCRIPTORS (desc), command_buffer);
gsk_vulkan_descriptors_transition (GSK_VULKAN_DESCRIPTORS (desc), state->vk_command_buffer);
}
if (desc == NULL)
gsk_vulkan_descriptors_transition (state->desc, state->vk_command_buffer);
}
static GskGpuOp *
@ -97,7 +105,7 @@ gsk_gpu_render_pass_op_vk_command (GskGpuOp *op,
/* nesting frame passes not allowed */
g_assert (state->vk_render_pass == VK_NULL_HANDLE);
gsk_gpu_render_pass_op_do_barriers (self, state->vk_command_buffer);
gsk_gpu_render_pass_op_do_barriers (self, state);
state->vk_format = gsk_vulkan_image_get_vk_format (GSK_VULKAN_IMAGE (self->target));
state->vk_render_pass = gsk_vulkan_device_get_vk_render_pass (GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (frame)),

View File

@ -152,19 +152,6 @@ gsk_vulkan_descriptors_is_full (GskVulkanDescriptors *self)
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) >= gsk_vulkan_device_get_max_buffers (self->device);
}
gsize
gsk_vulkan_descriptors_get_n_images (GskVulkanDescriptors *self)
{
return gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) +
gsk_descriptor_image_infos_get_size (&self->descriptor_images);
}
gsize
gsk_vulkan_descriptors_get_n_buffers (GskVulkanDescriptors *self)
{
return gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
}
GskVulkanPipelineLayout *
gsk_vulkan_descriptors_get_pipeline_layout (GskVulkanDescriptors *self)
{
@ -194,21 +181,80 @@ gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
}
}
static void
gsk_vulkan_descriptors_fill_sets (GskVulkanDescriptors *self)
{
gsize n_immutable_samplers, n_samplers, n_buffers;
if (gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING))
return;
/* If descriptor indexing isn't supported, all descriptors in the shaders
* must be properly setup. And that means we need to have
* descriptors for all of them.
*/
gsk_vulkan_device_get_pipeline_sizes (self->device,
self->pipeline_layout,
&n_immutable_samplers,
&n_samplers,
&n_buffers);
if (gsk_descriptor_image_infos_get_size (&self->descriptor_images) == 0)
{
guint32 ignored;
/* We have no image, find any random image and attach it */
if (!gsk_gpu_descriptors_add_image (GSK_GPU_DESCRIPTORS (self),
gsk_gpu_device_get_atlas_image (GSK_GPU_DEVICE (self->device)),
GSK_GPU_SAMPLER_DEFAULT,
&ignored))
{
g_assert_not_reached ();
}
}
while (n_immutable_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_immutable_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
while (n_samplers > gsk_descriptor_image_infos_get_size (&self->descriptor_images))
{
gsk_descriptor_image_infos_append (&self->descriptor_images, gsk_descriptor_image_infos_get (&self->descriptor_images, 0));
}
/* That should be the storage buffer */
g_assert (gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers) > 0);
while (n_buffers > gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers))
{
gsk_descriptor_buffer_infos_append (&self->descriptor_buffers, gsk_descriptor_buffer_infos_get (&self->descriptor_buffers, 0));
}
}
void
gsk_vulkan_descriptors_prepare (GskVulkanDescriptors *self,
VkDescriptorPool vk_descriptor_pool)
gsize *n_images,
gsize *n_buffers)
{
VkWriteDescriptorSet write_descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS + 1];
gsize n_descriptor_sets;
VkDevice vk_device;
vk_device = gsk_vulkan_device_get_vk_device (self->device);
self->pipeline_layout = gsk_vulkan_device_acquire_pipeline_layout (self->device,
gsk_samplers_get_data (&self->immutable_samplers),
gsk_samplers_get_size (&self->immutable_samplers),
gsk_descriptor_image_infos_get_size (&self->descriptor_images),
gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers));
gsk_vulkan_descriptors_fill_sets (self);
*n_images = gsk_descriptor_image_infos_get_size (&self->descriptor_immutable_images) +
gsk_descriptor_image_infos_get_size (&self->descriptor_images);
*n_buffers = gsk_descriptor_buffer_infos_get_size (&self->descriptor_buffers);
}
void
gsk_vulkan_descriptors_update_sets (GskVulkanDescriptors *self,
VkDescriptorPool vk_descriptor_pool)
{
VkWriteDescriptorSet write_descriptor_sets[GSK_VULKAN_N_DESCRIPTOR_SETS + 1];
gsize n_descriptor_sets;
VkDevice vk_device;
gboolean descriptor_indexing;
descriptor_indexing = gsk_vulkan_device_has_feature (self->device, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING);
vk_device = gsk_vulkan_device_get_vk_device (self->device);
GSK_VK_CHECK (vkAllocateDescriptorSets, vk_device,
&(VkDescriptorSetAllocateInfo) {
@ -219,7 +265,7 @@ gsk_vulkan_descriptors_prepare (GskVulkanDescriptors *self,
gsk_vulkan_device_get_vk_image_set_layout (self->device, self->pipeline_layout),
gsk_vulkan_device_get_vk_buffer_set_layout (self->device, self->pipeline_layout),
},
.pNext = &(VkDescriptorSetVariableDescriptorCountAllocateInfo) {
.pNext = !descriptor_indexing ? NULL : &(VkDescriptorSetVariableDescriptorCountAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
.descriptorSetCount = GSK_VULKAN_N_DESCRIPTOR_SETS,
.pDescriptorCounts = (uint32_t[GSK_VULKAN_N_DESCRIPTOR_SETS]) {

View File

@ -13,8 +13,6 @@ G_DECLARE_FINAL_TYPE (GskVulkanDescriptors, gsk_vulkan_descriptors, GSK, VULKAN_
GskVulkanDescriptors * gsk_vulkan_descriptors_new (GskVulkanDevice *device);
gboolean gsk_vulkan_descriptors_is_full (GskVulkanDescriptors *self);
gsize gsk_vulkan_descriptors_get_n_images (GskVulkanDescriptors *self);
gsize gsk_vulkan_descriptors_get_n_buffers (GskVulkanDescriptors *self);
GskVulkanPipelineLayout * gsk_vulkan_descriptors_get_pipeline_layout
(GskVulkanDescriptors *self);
VkPipelineLayout gsk_vulkan_descriptors_get_vk_pipeline_layout
@ -26,6 +24,9 @@ guint32 gsk_vulkan_descriptors_get_buffer_descriptor
void gsk_vulkan_descriptors_transition (GskVulkanDescriptors *self,
VkCommandBuffer command_buffer);
void gsk_vulkan_descriptors_prepare (GskVulkanDescriptors *self,
gsize *n_images,
gsize *n_buffers);
void gsk_vulkan_descriptors_update_sets (GskVulkanDescriptors *self,
VkDescriptorPool vk_descriptor_pool);
void gsk_vulkan_descriptors_bind (GskVulkanDescriptors *self,
VkCommandBuffer vk_command_buffer);

View File

@ -155,6 +155,9 @@ gsk_vulkan_pipeline_layout_new (GskVulkanDevice *self,
{
GskVulkanPipelineLayout *layout;
GdkDisplay *display;
gboolean descriptor_indexing;
descriptor_indexing = gsk_vulkan_device_has_feature (self, GDK_VULKAN_FEATURE_DESCRIPTOR_INDEXING);
layout = g_malloc (sizeof (GskVulkanPipelineLayout) + setup->n_immutable_samplers * sizeof (VkSampler));
layout->ref_count = 1;
@ -186,7 +189,7 @@ gsk_vulkan_pipeline_layout_new (GskVulkanDevice *self,
.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
}
},
.pNext = &(VkDescriptorSetLayoutBindingFlagsCreateInfo) {
.pNext = !descriptor_indexing ? NULL : &(VkDescriptorSetLayoutBindingFlagsCreateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
.bindingCount = 2,
.pBindingFlags = (VkDescriptorBindingFlags[2]) {
@ -212,7 +215,7 @@ gsk_vulkan_pipeline_layout_new (GskVulkanDevice *self,
.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
},
},
.pNext = &(VkDescriptorSetLayoutBindingFlagsCreateInfo) {
.pNext = !descriptor_indexing ? NULL : &(VkDescriptorSetLayoutBindingFlagsCreateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
.bindingCount = 1,
.pBindingFlags = (VkDescriptorBindingFlags[1]) {
@ -1058,6 +1061,8 @@ gsk_vulkan_device_acquire_pipeline_layout (GskVulkanDevice *self,
immutable_samplers = fallback;
n_immutable_samplers = 1;
}
/* round the number of samplers/buffer up a bit, so we don't (re)create
* excessive amounts of layouts */
n_samplers = MAX (n_samplers, 8);
g_assert (n_samplers <= self->max_samplers);
n_buffers = MAX (n_buffers, 8);
@ -1087,6 +1092,18 @@ gsk_vulkan_device_release_pipeline_layout (GskVulkanDevice *self,
self->pipeline_layout_cache = layout;
}
void
gsk_vulkan_device_get_pipeline_sizes (GskVulkanDevice *self,
GskVulkanPipelineLayout*layout,
gsize *n_immutable_samplers,
gsize *n_samplers,
gsize *n_buffers)
{
*n_immutable_samplers = layout->setup.n_immutable_samplers;
*n_samplers = layout->setup.n_samplers;
*n_buffers = layout->setup.n_buffers;
}
static GskVulkanAllocator *
gsk_vulkan_device_get_allocator (GskVulkanDevice *self,
gsize index,

View File

@ -50,6 +50,11 @@ GskVulkanPipelineLayout *
gsize n_buffers);
void gsk_vulkan_device_release_pipeline_layout (GskVulkanDevice *self,
GskVulkanPipelineLayout*layout);
void gsk_vulkan_device_get_pipeline_sizes (GskVulkanDevice *self,
GskVulkanPipelineLayout*layout,
gsize *n_immutable_samplers,
gsize *n_samplers,
gsize *n_buffers);
VkDescriptorSetLayout gsk_vulkan_device_get_vk_image_set_layout (GskVulkanDevice *self,
GskVulkanPipelineLayout*layout) G_GNUC_PURE;
VkDescriptorSetLayout gsk_vulkan_device_get_vk_buffer_set_layout (GskVulkanDevice *self,

View File

@ -140,15 +140,17 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
{
GskVulkanDevice *device;
VkDevice vk_device;
gsize i, n_images, n_buffers;
gsize i, n_images, n_buffers, n_sets;
device = GSK_VULKAN_DEVICE (gsk_gpu_frame_get_device (GSK_GPU_FRAME (self)));
vk_device = gsk_vulkan_device_get_vk_device (device);
n_images = 0;
n_buffers = 0;
n_sets = 2 * gsk_descriptors_get_size (&self->descriptors);
for (i = 0; i < gsk_descriptors_get_size (&self->descriptors); i++)
{
gsize n_desc_images, n_desc_buffers;
GskVulkanDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
if (storage_buffer)
{
@ -156,11 +158,13 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
descriptor = gsk_vulkan_descriptors_get_buffer_descriptor (desc, storage_buffer);
g_assert (descriptor == 0);
}
n_images += gsk_vulkan_descriptors_get_n_images (desc);
n_buffers += gsk_vulkan_descriptors_get_n_buffers (desc);
gsk_vulkan_descriptors_prepare (desc, &n_desc_images, &n_desc_buffers);
n_images += n_desc_images;
n_buffers += n_desc_buffers;
}
if (n_images > self->pool_n_images ||
if (n_sets > self->pool_n_sets ||
n_images > self->pool_n_images ||
n_buffers > self->pool_n_buffers)
{
if (self->vk_descriptor_pool != VK_NULL_HANDLE)
@ -170,10 +174,12 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
NULL);
self->vk_descriptor_pool = VK_NULL_HANDLE;
}
if (n_sets > self->pool_n_sets)
self->pool_n_sets = 4 << g_bit_nth_msf (n_sets - 1, -1);
if (n_images > self->pool_n_images)
self->pool_n_images = 2 << g_bit_nth_msf (n_images - 1, -1);
if (n_buffers > self->pool_n_buffers)
self->pool_n_buffers = 2 << g_bit_nth_msf (n_buffers - 1, -1);
self->pool_n_buffers = 4 << g_bit_nth_msf (n_buffers - 1, -1);
}
if (self->vk_descriptor_pool == VK_NULL_HANDLE)
@ -203,7 +209,7 @@ gsk_vulkan_frame_prepare_descriptors (GskVulkanFrame *self,
{
GskVulkanDescriptors *desc = gsk_descriptors_get (&self->descriptors, i);
gsk_vulkan_descriptors_prepare (desc, self->vk_descriptor_pool);
gsk_vulkan_descriptors_update_sets (desc, self->vk_descriptor_pool);
}
}