Merge branch 'wip/otte/vulkan-for-main' into 'main'

vulkan: Move some code

See merge request GNOME/gtk!6096
This commit is contained in:
Benjamin Otte 2023-06-14 06:01:47 +00:00
commit 447d1fab62
8 changed files with 202 additions and 247 deletions

View File

@ -15,8 +15,7 @@ static const GdkDebugKey gsk_debug_keys[] = {
{ "geometry", GSK_DEBUG_GEOMETRY, "Show borders (when using cairo)" },
{ "full-redraw", GSK_DEBUG_FULL_REDRAW, "Force full redraws" },
{ "sync", GSK_DEBUG_SYNC, "Sync after each frame" },
{ "vulkan-staging-image", GSK_DEBUG_VULKAN_STAGING_IMAGE, "Use a staging image for Vulkan texture upload" },
{ "vulkan-staging-buffer", GSK_DEBUG_VULKAN_STAGING_BUFFER, "Use a staging buffer for Vulkan texture upload" }
{ "staging", GSK_DEBUG_STAGING, "Use a staging image for texture upload (Vulkan only)" },
};
static guint gsk_debug_flags;

View File

@ -18,8 +18,7 @@ typedef enum {
GSK_DEBUG_GEOMETRY = 1 << 9,
GSK_DEBUG_FULL_REDRAW = 1 << 10,
GSK_DEBUG_SYNC = 1 << 11,
GSK_DEBUG_VULKAN_STAGING_IMAGE = 1 << 12,
GSK_DEBUG_VULKAN_STAGING_BUFFER = 1 << 13
GSK_DEBUG_STAGING = 1 << 12
} GskDebugFlags;
#define GSK_DEBUG_ANY ((1 << 13) - 1)

View File

@ -254,43 +254,6 @@ gsk_vulkan_image_new (GdkVulkanContext *context,
return self;
}
static void
gsk_vulkan_image_upload_data (GskVulkanImage *self,
guchar *data,
gsize width,
gsize height,
gsize data_stride)
{
VkImageSubresource image_res;
VkSubresourceLayout image_layout;
gsize mem_stride;
guchar *mem;
image_res.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
image_res.mipLevel = 0;
image_res.arrayLayer = 0;
mem_stride = width * 4;
vkGetImageSubresourceLayout (gdk_vulkan_context_get_device (self->vulkan),
self->vk_image, &image_res, &image_layout);
mem = gsk_vulkan_memory_map (self->memory) + image_layout.offset;
if (image_layout.rowPitch == width * 4 && data_stride == mem_stride)
{
memcpy (mem, data, data_stride * height);
}
else
{
for (gsize i = 0; i < height; i++)
{
memcpy (mem + i * image_layout.rowPitch, data + i * data_stride, width * 4);
}
}
gsk_vulkan_memory_unmap (self->memory);
}
static void
gsk_vulkan_image_ensure_view (GskVulkanImage *self,
VkFormat format)
@ -320,34 +283,99 @@ gsk_vulkan_image_ensure_view (GskVulkanImage *self,
&self->vk_image_view);
}
static GskVulkanImage *
gsk_vulkan_image_new_from_data_via_staging_buffer (GskVulkanUploader *uploader,
guchar *data,
gsize width,
gsize height,
gsize stride)
GskVulkanImage *
gsk_vulkan_image_new_from_texture (GskVulkanUploader *uploader,
GdkTexture *texture)
{
GdkTextureDownloader *downloader;
GskVulkanImage *result;
GskVulkanImageMap map;
downloader = gdk_texture_downloader_new (texture);
result = gsk_vulkan_image_new_for_upload (uploader,
gdk_texture_get_width (texture),
gdk_texture_get_height (texture));
gsk_vulkan_image_map_memory (result, uploader, &map);
gdk_texture_downloader_download_into (downloader, map.data, map.stride);
gsk_vulkan_image_unmap_memory (result, uploader, &map);
gdk_texture_downloader_free (downloader);
return result;
}
GskVulkanImage *
gsk_vulkan_image_new_for_upload (GskVulkanUploader *uploader,
gsize width,
gsize height)
{
GskVulkanImage *self;
GskVulkanBuffer *staging;
gsize buffer_size = width * height * 4;
guchar *mem;
staging = gsk_vulkan_buffer_new_staging (uploader->vulkan, buffer_size);
mem = gsk_vulkan_buffer_map (staging);
self = gsk_vulkan_image_new (uploader->vulkan,
width,
height,
VK_IMAGE_TILING_LINEAR,
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_SAMPLED_BIT,
VK_IMAGE_LAYOUT_UNDEFINED,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
if (stride == width * 4)
{
memcpy (mem, data, stride * height);
}
else
{
for (gsize i = 0; i < height; i++)
{
memcpy (mem + i * width * 4, data + i * stride, width * 4);
}
}
gsk_vulkan_image_ensure_view (self, VK_FORMAT_B8G8R8A8_UNORM);
gsk_vulkan_buffer_unmap (staging);
return self;
}
static void
gsk_vulkan_image_map_memory_direct (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
VkImageSubresource image_res;
VkSubresourceLayout image_layout;
image_res.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
image_res.mipLevel = 0;
image_res.arrayLayer = 0;
vkGetImageSubresourceLayout (gdk_vulkan_context_get_device (self->vulkan),
self->vk_image, &image_res, &image_layout);
map->staging_buffer = NULL;
map->data = gsk_vulkan_memory_map (self->memory) + image_layout.offset;
map->stride = image_layout.rowPitch;
}
static void
gsk_vulkan_image_unmap_memory_direct (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
gsk_vulkan_memory_unmap (self->memory);
gsk_vulkan_uploader_add_image_barrier (uploader,
TRUE,
self,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_ACCESS_SHADER_READ_BIT);
}
static void
gsk_vulkan_image_map_memory_indirect (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
gsize buffer_size = self->width * self->height * 4;
map->staging_buffer = gsk_vulkan_buffer_new_staging (uploader->vulkan, buffer_size);
map->data = gsk_vulkan_buffer_map (map->staging_buffer);
map->stride = self->width * 4;
}
static void
gsk_vulkan_image_unmap_memory_indirect (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
gsk_vulkan_buffer_unmap (map->staging_buffer);
gsk_vulkan_uploader_add_buffer_barrier (uploader,
FALSE,
@ -357,21 +385,11 @@ gsk_vulkan_image_new_from_data_via_staging_buffer (GskVulkanUploader *uploader,
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.buffer = gsk_vulkan_buffer_get_buffer (staging),
.buffer = gsk_vulkan_buffer_get_buffer (map->staging_buffer),
.offset = 0,
.size = buffer_size,
.size = VK_WHOLE_SIZE,
});
self = gsk_vulkan_image_new (uploader->vulkan,
width,
height,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_SAMPLED_BIT,
VK_IMAGE_LAYOUT_UNDEFINED,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
gsk_vulkan_uploader_add_image_barrier (uploader,
FALSE,
self,
@ -379,7 +397,7 @@ gsk_vulkan_image_new_from_data_via_staging_buffer (GskVulkanUploader *uploader,
VK_ACCESS_TRANSFER_WRITE_BIT);
vkCmdCopyBufferToImage (gsk_vulkan_uploader_get_copy_buffer (uploader),
gsk_vulkan_buffer_get_buffer (staging),
gsk_vulkan_buffer_get_buffer (map->staging_buffer),
self->vk_image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
@ -394,8 +412,8 @@ gsk_vulkan_image_new_from_data_via_staging_buffer (GskVulkanUploader *uploader,
},
.imageOffset = { 0, 0, 0 },
.imageExtent = {
.width = width,
.height = height,
.width = self->width,
.height = self->height,
.depth = 1
}
}
@ -407,141 +425,33 @@ gsk_vulkan_image_new_from_data_via_staging_buffer (GskVulkanUploader *uploader,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_ACCESS_SHADER_READ_BIT);
uploader->staging_buffer_free_list = g_slist_prepend (uploader->staging_buffer_free_list, staging);
gsk_vulkan_image_ensure_view (self, VK_FORMAT_B8G8R8A8_UNORM);
return self;
uploader->staging_buffer_free_list = g_slist_prepend (uploader->staging_buffer_free_list,
map->staging_buffer);
}
static GskVulkanImage *
gsk_vulkan_image_new_from_data_via_staging_image (GskVulkanUploader *uploader,
guchar *data,
gsize width,
gsize height,
gsize stride)
void
gsk_vulkan_image_map_memory (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
GskVulkanImage *self, *staging;
g_assert (self->vk_image_layout == VK_IMAGE_LAYOUT_UNDEFINED ||
self->vk_image_layout == VK_IMAGE_LAYOUT_PREINITIALIZED);
staging = gsk_vulkan_image_new (uploader->vulkan,
width,
height,
VK_IMAGE_TILING_LINEAR,
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
VK_IMAGE_LAYOUT_PREINITIALIZED,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
gsk_vulkan_image_upload_data (staging, data, width, height, stride);
self = gsk_vulkan_image_new (uploader->vulkan,
width,
height,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_SAMPLED_BIT,
VK_IMAGE_LAYOUT_UNDEFINED,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
gsk_vulkan_uploader_add_image_barrier (uploader,
FALSE,
staging,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
VK_ACCESS_TRANSFER_READ_BIT);
gsk_vulkan_uploader_add_image_barrier (uploader,
FALSE,
self,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_ACCESS_TRANSFER_WRITE_BIT);
vkCmdCopyImage (gsk_vulkan_uploader_get_copy_buffer (uploader),
staging->vk_image,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
self->vk_image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&(VkImageCopy) {
.srcSubresource = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.mipLevel = 0,
.baseArrayLayer = 0,
.layerCount = 1
},
.srcOffset = { 0, 0, 0 },
.dstSubresource = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.mipLevel = 0,
.baseArrayLayer = 0,
.layerCount = 1
},
.dstOffset = { 0, 0, 0 },
.extent = {
.width = width,
.height = height,
.depth = 1
}
});
gsk_vulkan_uploader_add_image_barrier (uploader,
TRUE,
self,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_ACCESS_SHADER_READ_BIT);
uploader->staging_image_free_list = g_slist_prepend (uploader->staging_image_free_list, staging);
gsk_vulkan_image_ensure_view (self, VK_FORMAT_B8G8R8A8_UNORM);
return self;
}
static GskVulkanImage *
gsk_vulkan_image_new_from_data_directly (GskVulkanUploader *uploader,
guchar *data,
gsize width,
gsize height,
gsize stride)
{
GskVulkanImage *self;
self = gsk_vulkan_image_new (uploader->vulkan,
width,
height,
VK_IMAGE_TILING_LINEAR,
VK_IMAGE_USAGE_SAMPLED_BIT,
VK_IMAGE_LAYOUT_PREINITIALIZED,
VK_ACCESS_HOST_WRITE_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
gsk_vulkan_image_upload_data (self, data, width, height, stride);
gsk_vulkan_uploader_add_image_barrier (uploader,
TRUE,
self,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_ACCESS_SHADER_READ_BIT);
gsk_vulkan_image_ensure_view (self, VK_FORMAT_B8G8R8A8_UNORM);
return self;
}
GskVulkanImage *
gsk_vulkan_image_new_from_data (GskVulkanUploader *uploader,
guchar *data,
gsize width,
gsize height,
gsize stride)
{
if (GSK_DEBUG_CHECK (VULKAN_STAGING_BUFFER))
return gsk_vulkan_image_new_from_data_via_staging_buffer (uploader, data, width, height, stride);
else if (GSK_DEBUG_CHECK (VULKAN_STAGING_IMAGE))
return gsk_vulkan_image_new_from_data_via_staging_image (uploader, data, width, height, stride);
if (!GSK_DEBUG_CHECK (STAGING) && gsk_vulkan_memory_can_map (self->memory, TRUE))
gsk_vulkan_image_map_memory_direct (self, uploader, map);
else
return gsk_vulkan_image_new_from_data_directly (uploader, data, width, height, stride);
gsk_vulkan_image_map_memory_indirect (self, uploader, map);
}
void
gsk_vulkan_image_unmap_memory (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map)
{
if (map->staging_buffer)
gsk_vulkan_image_unmap_memory_indirect (self, uploader, map);
else
gsk_vulkan_image_unmap_memory_direct (self, uploader, map);
}
GskVulkanImage *

View File

@ -24,11 +24,8 @@ GskVulkanImage * gsk_vulkan_image_new_for_swapchain (GdkVulk
VkFormat format,
gsize width,
gsize height);
GskVulkanImage * gsk_vulkan_image_new_from_data (GskVulkanUploader *uploader,
guchar *data,
gsize width,
gsize height,
gsize stride);
GskVulkanImage * gsk_vulkan_image_new_from_texture (GskVulkanUploader *uploader,
GdkTexture *texture);
typedef struct {
guchar *data;
@ -56,6 +53,27 @@ GskVulkanImage * gsk_vulkan_image_new_for_offscreen (GdkVulk
GdkTexture * gsk_vulkan_image_download (GskVulkanImage *self,
GskVulkanUploader *uploader);
typedef struct _GskVulkanImageMap GskVulkanImageMap;
struct _GskVulkanImageMap
{
guchar *data;
gsize stride;
/* private */
gpointer staging_buffer;
};
GskVulkanImage * gsk_vulkan_image_new_for_upload (GskVulkanUploader *uploader,
gsize width,
gsize height);
void gsk_vulkan_image_map_memory (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map);
void gsk_vulkan_image_unmap_memory (GskVulkanImage *self,
GskVulkanUploader *uploader,
GskVulkanImageMap *map);
gsize gsk_vulkan_image_get_width (GskVulkanImage *self);
gsize gsk_vulkan_image_get_height (GskVulkanImage *self);
VkImage gsk_vulkan_image_get_image (GskVulkanImage *self);

View File

@ -9,6 +9,7 @@ struct _GskVulkanMemory
gsize size;
VkMemoryType vk_memory_type;
VkDeviceMemory vk_memory;
};
@ -41,6 +42,7 @@ gsk_vulkan_memory_new (GdkVulkanContext *context,
g_assert (i < properties.memoryTypeCount);
self->vk_memory_type = properties.memoryTypes[i];
GSK_VK_CHECK (vkAllocateMemory, gdk_vulkan_context_get_device (context),
&(VkMemoryAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
@ -71,11 +73,33 @@ gsk_vulkan_memory_get_device_memory (GskVulkanMemory *self)
return self->vk_memory;
}
gboolean
gsk_vulkan_memory_can_map (GskVulkanMemory *self,
gboolean fast)
{
if (!(self->vk_memory_type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
return FALSE;
/* FIXME: no support implemented for this */
if (!(self->vk_memory_type.propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
return FALSE;
if (!fast)
return TRUE;
if (!(self->vk_memory_type.propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT))
return FALSE;
return TRUE;
}
guchar *
gsk_vulkan_memory_map (GskVulkanMemory *self)
{
void *data;
g_assert (gsk_vulkan_memory_can_map (self, FALSE));
GSK_VK_CHECK (vkMapMemory, gdk_vulkan_context_get_device (self->vulkan),
self->vk_memory,
0,

View File

@ -14,6 +14,8 @@ void gsk_vulkan_memory_free (GskVulk
VkDeviceMemory gsk_vulkan_memory_get_device_memory (GskVulkanMemory *self);
gboolean gsk_vulkan_memory_can_map (GskVulkanMemory *self,
gboolean fast);
guchar * gsk_vulkan_memory_map (GskVulkanMemory *self);
void gsk_vulkan_memory_unmap (GskVulkanMemory *self);

View File

@ -422,20 +422,13 @@ gsk_vulkan_renderer_ref_texture_image (GskVulkanRenderer *self,
GskVulkanUploader *uploader)
{
GskVulkanTextureData *data;
cairo_surface_t *surface;
GskVulkanImage *image;
data = gdk_texture_get_render_data (texture, self);
if (data)
return g_object_ref (data->image);
surface = gdk_texture_download_surface (texture);
image = gsk_vulkan_image_new_from_data (uploader,
cairo_image_surface_get_data (surface),
cairo_image_surface_get_width (surface),
cairo_image_surface_get_height (surface),
cairo_image_surface_get_stride (surface));
cairo_surface_destroy (surface);
image = gsk_vulkan_image_new_from_texture (uploader, texture);
data = g_new0 (GskVulkanTextureData, 1);
data->image = image;

View File

@ -1294,6 +1294,8 @@ gsk_vulkan_render_pass_get_node_as_texture (GskVulkanRenderPass *self,
{
VkSemaphore semaphore;
GskVulkanImage *result;
GskVulkanImageMap map;
gsize width, height;
cairo_surface_t *surface;
cairo_t *cr;
@ -1363,10 +1365,18 @@ gsk_vulkan_render_pass_get_node_as_texture (GskVulkanRenderPass *self,
#endif
/* XXX: We could intersect bounds with clip bounds here */
surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32,
ceil (node->bounds.size.width * graphene_vec2_get_x (scale)),
ceil (node->bounds.size.height * graphene_vec2_get_y (scale)));
cairo_surface_set_device_scale (surface, graphene_vec2_get_x (scale), graphene_vec2_get_y (scale));
width = ceil (node->bounds.size.width * graphene_vec2_get_x (scale));
height = ceil (node->bounds.size.height * graphene_vec2_get_y (scale));
result = gsk_vulkan_image_new_for_upload (uploader, width, height);
gsk_vulkan_image_map_memory (result, uploader, &map);
surface = cairo_image_surface_create_for_data (map.data,
CAIRO_FORMAT_ARGB32,
width, height,
map.stride);
cairo_surface_set_device_scale (surface,
width / node->bounds.size.width,
height / node->bounds.size.height);
cr = cairo_create (surface);
cairo_translate (cr, -node->bounds.origin.x, -node->bounds.origin.y);
@ -1374,14 +1384,10 @@ gsk_vulkan_render_pass_get_node_as_texture (GskVulkanRenderPass *self,
cairo_destroy (cr);
result = gsk_vulkan_image_new_from_data (uploader,
cairo_image_surface_get_data (surface),
cairo_image_surface_get_width (surface),
cairo_image_surface_get_height (surface),
cairo_image_surface_get_stride (surface));
cairo_surface_finish (surface);
cairo_surface_destroy (surface);
gsk_vulkan_image_unmap_memory (result, uploader, &map);
gsk_vulkan_render_add_cleanup_image (render, result);
*tex_bounds = node->bounds;
@ -1396,9 +1402,10 @@ gsk_vulkan_render_pass_upload_fallback (GskVulkanRenderPass *self,
GskVulkanUploader *uploader)
{
GskRenderNode *node;
GskVulkanImageMap map;
gsize width, height;
cairo_surface_t *surface;
cairo_t *cr;
float scale_x, scale_y;
node = op->node;
@ -1418,13 +1425,20 @@ gsk_vulkan_render_pass_upload_fallback (GskVulkanRenderPass *self,
}
#endif
scale_x = graphene_vec2_get_x (&self->scale);
scale_y = graphene_vec2_get_y (&self->scale);
/* XXX: We could intersect bounds with clip bounds here */
surface = cairo_image_surface_create (CAIRO_FORMAT_ARGB32,
ceil (node->bounds.size.width * scale_x),
ceil (node->bounds.size.height * scale_y));
cairo_surface_set_device_scale (surface, scale_x, scale_y);
width = ceil (node->bounds.size.width * graphene_vec2_get_x (&self->scale));
height = ceil (node->bounds.size.height * graphene_vec2_get_y (&self->scale));
op->source = gsk_vulkan_image_new_for_upload (uploader, width, height);
gsk_vulkan_image_map_memory (op->source, uploader, &map);
surface = cairo_image_surface_create_for_data (map.data,
CAIRO_FORMAT_ARGB32,
width, height,
map.stride);
cairo_surface_set_device_scale (surface,
width / node->bounds.size.width,
height / node->bounds.size.height);
cr = cairo_create (surface);
cairo_translate (cr, -node->bounds.origin.x, -node->bounds.origin.y);
@ -1474,17 +1488,13 @@ gsk_vulkan_render_pass_upload_fallback (GskVulkanRenderPass *self,
cairo_destroy (cr);
op->source = gsk_vulkan_image_new_from_data (uploader,
cairo_image_surface_get_data (surface),
cairo_image_surface_get_width (surface),
cairo_image_surface_get_height (surface),
cairo_image_surface_get_stride (surface));
op->source_rect = GRAPHENE_RECT_INIT(0, 0, 1, 1);
cairo_surface_finish (surface);
cairo_surface_destroy (surface);
gsk_vulkan_image_unmap_memory (op->source, uploader, &map);
gsk_vulkan_render_add_cleanup_image (render, op->source);
op->source_rect = GRAPHENE_RECT_INIT(0, 0, 1, 1);
}
static void