2020-06-10 15:04:51 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2020 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
|
|
|
|
|
|
|
|
#ifdef SK_VULKAN
|
|
|
|
|
2020-07-17 15:59:01 +00:00
|
|
|
#include "include/gpu/GrDirectContext.h"
|
2020-10-14 15:23:11 +00:00
|
|
|
#include "src/gpu/GrDirectContextPriv.h"
|
2020-06-11 14:54:43 +00:00
|
|
|
#include "src/gpu/vk/GrVkGpu.h"
|
|
|
|
#include "src/gpu/vk/GrVkUtil.h"
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
|
|
|
|
return 16 + (x + y) * 219 / (width + height - 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
|
|
|
|
return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:54:43 +00:00
|
|
|
GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
|
2020-07-17 15:59:01 +00:00
|
|
|
return (GrVkGpu*) fDContext->priv().getGpu();
|
2020-06-11 14:54:43 +00:00
|
|
|
}
|
2020-06-10 15:04:51 +00:00
|
|
|
|
2020-07-17 15:59:01 +00:00
|
|
|
VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
|
|
|
|
SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
|
2020-06-11 14:54:43 +00:00
|
|
|
}
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
|
2020-06-11 14:54:43 +00:00
|
|
|
GrVkGpu* vkGpu = this->vkGpu();
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
if (fImage != VK_NULL_HANDLE) {
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
|
2020-06-10 15:04:51 +00:00
|
|
|
fImage = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
if (fImageMemory != VK_NULL_HANDLE) {
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
|
2020-06-10 15:04:51 +00:00
|
|
|
fImageMemory = VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:54:43 +00:00
|
|
|
bool VkYcbcrSamplerHelper::isYCbCrSupported() {
|
|
|
|
GrVkGpu* vkGpu = this->vkGpu();
|
2020-06-10 15:04:51 +00:00
|
|
|
|
2020-06-11 14:54:43 +00:00
|
|
|
return vkGpu->vkCaps().supportsYcbcrConversion();
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
2020-06-15 18:26:58 +00:00
|
|
|
bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
|
2020-06-11 14:54:43 +00:00
|
|
|
GrVkGpu* vkGpu = this->vkGpu();
|
|
|
|
VkResult result;
|
|
|
|
|
2020-06-10 15:04:51 +00:00
|
|
|
// Verify that the image format is supported.
|
|
|
|
VkFormatProperties formatProperties;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(),
|
|
|
|
GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
|
|
|
|
VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
|
|
|
|
&formatProperties));
|
2020-06-10 15:04:51 +00:00
|
|
|
if (!(formatProperties.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
|
|
|
|
// VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Create YCbCr image.
|
|
|
|
VkImageCreateInfo vkImageInfo = {};
|
|
|
|
vkImageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
|
|
|
|
vkImageInfo.imageType = VK_IMAGE_TYPE_2D;
|
|
|
|
vkImageInfo.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
|
|
|
|
vkImageInfo.extent = VkExtent3D{width, height, 1};
|
|
|
|
vkImageInfo.mipLevels = 1;
|
|
|
|
vkImageInfo.arrayLayers = 1;
|
|
|
|
vkImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
vkImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
|
2020-08-21 15:26:12 +00:00
|
|
|
vkImageInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
|
|
|
|
VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
2020-06-10 15:04:51 +00:00
|
|
|
vkImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
|
|
|
vkImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
|
|
|
|
SkASSERT(fImage == VK_NULL_HANDLE);
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
VkMemoryRequirements requirements;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
|
|
|
|
fImage,
|
|
|
|
&requirements));
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
uint32_t memoryTypeIndex = 0;
|
|
|
|
bool foundHeap = false;
|
|
|
|
VkPhysicalDeviceMemoryProperties phyDevMemProps;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
|
|
|
|
&phyDevMemProps));
|
2020-06-10 15:04:51 +00:00
|
|
|
for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount && !foundHeap; ++i) {
|
|
|
|
if (requirements.memoryTypeBits & (1 << i)) {
|
|
|
|
// Map host-visible memory.
|
|
|
|
if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
|
|
|
memoryTypeIndex = i;
|
|
|
|
foundHeap = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!foundHeap) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
VkMemoryAllocateInfo allocInfo = {};
|
|
|
|
allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
|
|
|
allocInfo.allocationSize = requirements.size;
|
|
|
|
allocInfo.memoryTypeIndex = memoryTypeIndex;
|
|
|
|
|
|
|
|
SkASSERT(fImageMemory == VK_NULL_HANDLE);
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
|
|
|
|
nullptr, &fImageMemory));
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void* mappedBuffer;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
|
|
|
|
requirements.size, 0u, &mappedBuffer));
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Write Y channel.
|
|
|
|
VkImageSubresource subresource;
|
|
|
|
subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
|
|
|
|
subresource.mipLevel = 0;
|
|
|
|
subresource.arrayLayer = 0;
|
|
|
|
|
|
|
|
VkSubresourceLayout yLayout;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
|
|
|
|
&subresource, &yLayout));
|
2020-06-10 15:04:51 +00:00
|
|
|
uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
|
|
|
|
for (size_t y = 0; y < height; ++y) {
|
|
|
|
for (size_t x = 0; x < width; ++x) {
|
|
|
|
bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Write UV channels.
|
|
|
|
subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
|
|
|
|
VkSubresourceLayout uvLayout;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
|
|
|
|
&subresource, &uvLayout));
|
2020-06-10 15:04:51 +00:00
|
|
|
bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
|
|
|
|
for (size_t y = 0; y < height / 2; ++y) {
|
|
|
|
for (size_t x = 0; x < width / 2; ++x) {
|
|
|
|
auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
|
|
|
|
bufferData[y * uvLayout.rowPitch + x * 2] = u;
|
|
|
|
bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VkMappedMemoryRange flushRange;
|
|
|
|
flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
|
|
|
flushRange.pNext = nullptr;
|
|
|
|
flushRange.memory = fImageMemory;
|
|
|
|
flushRange.offset = 0;
|
|
|
|
flushRange.size = VK_WHOLE_SIZE;
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
// Bind image memory.
|
2020-06-11 14:54:43 +00:00
|
|
|
GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
|
|
|
|
if (result != VK_SUCCESS) {
|
2020-06-15 18:26:58 +00:00
|
|
|
return false;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Wrap the image into SkImage.
|
2020-08-20 18:35:18 +00:00
|
|
|
GrVkYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
|
|
|
|
/*externalFormat=*/0,
|
|
|
|
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
|
|
|
|
VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
|
|
|
|
VK_CHROMA_LOCATION_COSITED_EVEN,
|
|
|
|
VK_CHROMA_LOCATION_COSITED_EVEN,
|
|
|
|
VK_FILTER_LINEAR,
|
|
|
|
false,
|
|
|
|
formatProperties.linearTilingFeatures};
|
|
|
|
GrVkAlloc alloc;
|
|
|
|
alloc.fMemory = fImageMemory;
|
|
|
|
alloc.fOffset = 0;
|
|
|
|
alloc.fSize = requirements.size;
|
|
|
|
|
|
|
|
GrVkImageInfo imageInfo = {fImage,
|
|
|
|
alloc,
|
|
|
|
VK_IMAGE_TILING_LINEAR,
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
vkImageInfo.format,
|
2020-08-21 15:26:12 +00:00
|
|
|
vkImageInfo.usage,
|
2020-09-28 21:02:49 +00:00
|
|
|
1 /* sample count */,
|
2020-08-20 18:35:18 +00:00
|
|
|
1 /* levelCount */,
|
|
|
|
VK_QUEUE_FAMILY_IGNORED,
|
|
|
|
GrProtected::kNo,
|
|
|
|
ycbcrInfo};
|
2020-06-10 15:04:51 +00:00
|
|
|
|
|
|
|
fTexture = GrBackendTexture(width, height, imageInfo);
|
2020-06-15 18:26:58 +00:00
|
|
|
return true;
|
2020-06-10 15:04:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif // SK_VULKAN
|