Add Vulkan support to GrAHardwareBufferImageGenerator

Bug: skia:
Change-Id: I37b08f336919fdac77fcd2726f129425bf8f4f2b
Reviewed-on: https://skia-review.googlesource.com/150132
Reviewed-by: Stan Iliev <stani@google.com>
Reviewed-by: Jim Van Verth <jvanverth@google.com>
Commit-Queue: Greg Daniel <egdaniel@google.com>
This commit is contained in:
Greg Daniel 2018-09-12 09:44:25 -04:00 committed by Skia Commit-Bot
parent 46a5f6c28a
commit 637c06aec7
10 changed files with 312 additions and 20 deletions

View File

@ -20,6 +20,7 @@
#include "GrProxyProvider.h"
#include "GrResourceCache.h"
#include "GrResourceProvider.h"
#include "GrResourceProviderPriv.h"
#include "GrTexture.h"
#include "GrTextureProxy.h"
#include "SkMessageBus.h"
@ -31,6 +32,11 @@
#include <GLES/gl.h>
#include <GLES/glext.h>
#ifdef SK_VULKAN
#include "vk/GrVkExtensions.h"
#include "vk/GrVkGpu.h"
#endif
#define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
#define EGL_PROTECTED_CONTENT_EXT 0x32C0
@ -111,6 +117,216 @@ GrAHardwareBufferImageGenerator::~GrAHardwareBufferImageGenerator() {
///////////////////////////////////////////////////////////////////////////////////////////////////
#ifdef SK_VULKAN
class VulkanCleanupHelper {
public:
VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
: fDevice(gpu->device())
, fImage(image)
, fMemory(memory)
, fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
, fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
~VulkanCleanupHelper() {
fDestroyImage(fDevice, fImage, nullptr);
fFreeMemory(fDevice, fMemory, nullptr);
}
private:
VkDevice fDevice;
VkImage fImage;
VkDeviceMemory fMemory;
PFN_vkDestroyImage fDestroyImage;
PFN_vkFreeMemory fFreeMemory;
};
void GrAHardwareBufferImageGenerator::DeleteVkImage(void* context) {
VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
delete cleanupHelper;
}
#define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
static GrBackendTexture make_vk_backend_texture(
GrContext* context, AHardwareBuffer* hardwareBuffer,
int width, int height, GrPixelConfig config,
GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
bool isProtectedContent,
const GrBackendFormat& backendFormat) {
SkASSERT(context->contextPriv().getBackend() == kVulkan_GrBackend);
GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
VkPhysicalDevice physicalDevice = gpu->physicalDevice();
VkDevice device = gpu->device();
SkASSERT(gpu);
if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
return GrBackendTexture();
}
SkASSERT(backendFormat.getVkFormat());
VkFormat format = *backendFormat.getVkFormat();
VkResult err;
VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
hwbFormatProps.pNext = nullptr;
VkAndroidHardwareBufferPropertiesANDROID hwbProps;
hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
hwbProps.pNext = &hwbFormatProps;
err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
if (VK_SUCCESS != err) {
return GrBackendTexture();
}
SkASSERT(format == hwbFormatProps.format);
SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
const VkExternalMemoryImageCreateInfo externalMemoryImageInfo {
VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
nullptr, // pNext
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
};
VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT;
// TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
// to use linear. Add better linear support throughout Ganesh.
VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
const VkImageCreateInfo imageCreateInfo = {
VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
&externalMemoryImageInfo, // pNext
0, // VkImageCreateFlags
VK_IMAGE_TYPE_2D, // VkImageType
format, // VkFormat
{ (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
1, // mipLevels
1, // arrayLayers
VK_SAMPLE_COUNT_1_BIT, // samples
tiling, // VkImageTiling
usageFlags, // VkImageUsageFlags
VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
0, // queueFamilyCount
0, // pQueueFamilyIndices
VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
};
VkImage image;
err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
if (VK_SUCCESS != err) {
return GrBackendTexture();
}
VkImageMemoryRequirementsInfo2 memReqsInfo;
memReqsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
memReqsInfo.pNext = nullptr;
memReqsInfo.image = image;
VkMemoryDedicatedRequirements dedicatedMemReqs;
dedicatedMemReqs.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
dedicatedMemReqs.pNext = nullptr;
VkMemoryRequirements2 memReqs;
memReqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
memReqs.pNext = &dedicatedMemReqs;
VK_CALL(GetImageMemoryRequirements2(device, &memReqsInfo, &memReqs));
SkASSERT(VK_TRUE == dedicatedMemReqs.requiresDedicatedAllocation);
VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
phyDevMemProps.pNext = nullptr;
uint32_t typeIndex = 0;
uint32_t heapIndex = 0;
bool foundHeap = false;
VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
if (hwbProps.memoryTypeBits & (1 << i)) {
const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
typeIndex = i;
heapIndex = pdmp.memoryTypes[i].heapIndex;
foundHeap = true;
}
}
}
if (!foundHeap) {
VK_CALL(DestroyImage(device, image, nullptr));
return GrBackendTexture();
}
VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
hwbImportInfo.pNext = nullptr;
hwbImportInfo.buffer = hardwareBuffer;
VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
dedicatedAllocInfo.pNext = &hwbImportInfo;
dedicatedAllocInfo.image = image;
dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
VkMemoryAllocateInfo allocInfo = {
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
&dedicatedAllocInfo, // pNext
hwbProps.allocationSize, // allocationSize
typeIndex, // memoryTypeIndex
};
VkDeviceMemory memory;
err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
if (VK_SUCCESS != err) {
VK_CALL(DestroyImage(device, image, nullptr));
return GrBackendTexture();
}
VkBindImageMemoryInfo bindImageInfo;
bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
bindImageInfo.pNext = nullptr;
bindImageInfo.image = image;
bindImageInfo.memory = memory;
bindImageInfo.memoryOffset = 0;
err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
if (VK_SUCCESS != err) {
VK_CALL(DestroyImage(device, image, nullptr));
VK_CALL(FreeMemory(device, memory, nullptr));
return GrBackendTexture();
}
GrVkImageInfo imageInfo;
imageInfo.fImage = image;
imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
imageInfo.fImageTiling = tiling;
imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageInfo.fFormat = format;
imageInfo.fLevelCount = 1;
// TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
// support that extension. Or if we know the source of the AHardwareBuffer is not from a
// "foreign" device we can leave them as external.
imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
*deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage;
*deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
return GrBackendTexture(width, height, imageInfo);
}
#endif
class GLCleanupHelper {
public:
GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
@ -128,7 +344,6 @@ private:
EGLDisplay fDisplay;
};
void GrAHardwareBufferImageGenerator::DeleteGLTexture(void* context) {
GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
delete cleanupHelper;
@ -199,13 +414,25 @@ static GrBackendTexture make_backend_texture(
GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
bool isProtectedContent,
const GrBackendFormat& backendFormat) {
if (context->abandoned() || kOpenGL_GrBackend != context->contextPriv().getBackend()) {
// Check if GrContext is not abandoned and the backend is GL.
if (context->abandoned()) {
return GrBackendTexture();
}
bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
deleteCtx, createProtectedImage, backendFormat);
if (kOpenGL_GrBackend == context->contextPriv().getBackend()) {
return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
deleteCtx, createProtectedImage, backendFormat);
} else {
SkASSERT(kVulkan_GrBackend == context->contextPriv().getBackend());
#ifdef SK_VULKAN
// Currently we don't support protected images on vulkan
SkASSERT(!createProtectedImage);
return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
deleteCtx, createProtectedImage, backendFormat);
#else
return GrBackendTexture();
#endif
}
}
GrBackendFormat get_backend_format(GrBackend backend, uint32_t bufferFormat) {
@ -226,13 +453,30 @@ GrBackendFormat get_backend_format(GrBackend backend, uint32_t bufferFormat) {
default:
return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
}
} else if (backend == kVulkan_GrBackend) {
switch (bufferFormat) {
//TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
default:
return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
}
}
return GrBackendFormat();
}
sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* context) {
if (context->abandoned() || kOpenGL_GrBackend != context->contextPriv().getBackend()) {
// Check if GrContext is not abandoned and the backend is GL.
if (context->abandoned()) {
return nullptr;
}
@ -293,9 +537,11 @@ sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* cont
return sk_sp<GrTexture>();
}
sk_sp<GrReleaseProcHelper> releaseProcHelper(
new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
tex->setRelease(releaseProcHelper);
if (deleteImageProc) {
sk_sp<GrReleaseProcHelper> releaseProcHelper(
new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
tex->setRelease(releaseProcHelper);
}
return tex;
},
@ -333,8 +579,8 @@ bool GrAHardwareBufferImageGenerator::onIsValid(GrContext* context) const {
if (nullptr == context) {
return false; //CPU backend is not supported, because hardware buffer can be swizzled
}
// TODO: add Vulkan support
return kOpenGL_GrBackend == context->contextPriv().getBackend();
return kOpenGL_GrBackend == context->contextPriv().getBackend() ||
kVulkan_GrBackend == context->contextPriv().getBackend();
}
#endif //SK_BUILD_FOR_ANDROID_FRAMEWORK

View File

@ -40,6 +40,10 @@ public:
static void DeleteGLTexture(void* ctx);
#ifdef SK_VULKAN
static void DeleteVkImage(void* ctx);
#endif
protected:
bool onIsValid(GrContext*) const override;

View File

@ -213,6 +213,11 @@ void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface*
fSupportsMemoryRequirements2 = true;
}
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions.hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
fSupportsBindMemory2 = true;
}
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions.hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
fSupportsMaintenance1 = true;
@ -229,7 +234,7 @@ void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface*
}
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
(extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 3) &&
(extensions.hasExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, 1) &&
this->supportsMemoryRequirements2())) {
fSupportsDedicatedAllocation = true;
}
@ -243,11 +248,15 @@ void GrVkCaps::init(const GrContextOptions& contextOptions, const GrVkInterface*
}
#ifdef SK_BUILD_FOR_ANDROID
// Currently Adreno devices are not supporting the QUEUE_FAMILY_FOREIGN_EXTENSION, so until they
// do we don't explicitly require it here even the spec says it is required.
if (extensions.hasExtension(
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3) &&
extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&
this->supportsExternalMemory()) {
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2) &&
/* extensions.hasExtension(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, 1) &&*/
this->supportsExternalMemory() &&
this->supportsBindMemory2()) {
fSupportsAndroidHWBExternalMemory = true;
fSupportsAHardwareBufferImages = true;
}
#endif

View File

@ -109,6 +109,10 @@ public:
bool supportsPhysicalDeviceProperties2() const { return fSupportsPhysicalDeviceProperties2; }
// Returns whether the device supports the ability to extend VkMemoryRequirements struct.
bool supportsMemoryRequirements2() const { return fSupportsMemoryRequirements2; }
// Returns whether the device supports the ability to extend the vkBindMemory call.
bool supportsBindMemory2() const { return fSupportsBindMemory2; }
// Returns whether or not the device suports the various API maintenance fixes to Vulkan 1.0. In
// Vulkan 1.1 all these maintenance are part of the core spec.
bool supportsMaintenance1() const { return fSupportsMaintenance1; }
@ -219,6 +223,7 @@ private:
bool fSupportsPhysicalDeviceProperties2 = false;
bool fSupportsMemoryRequirements2 = false;
bool fSupportsBindMemory2 = false;
bool fSupportsMaintenance1 = false;
bool fSupportsMaintenance2 = false;
bool fSupportsMaintenance3 = false;

View File

@ -118,6 +118,7 @@ GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
, fInterface(std::move(interface))
, fMemoryAllocator(backendContext.fMemoryAllocator)
, fInstance(backendContext.fInstance)
, fPhysicalDevice(backendContext.fPhysicalDevice)
, fDevice(backendContext.fDevice)
, fQueue(backendContext.fQueue)
, fQueueIndex(backendContext.fGraphicsQueueIndex)

View File

@ -51,6 +51,7 @@ public:
GrVkMemoryAllocator* memoryAllocator() const { return fMemoryAllocator.get(); }
VkPhysicalDevice physicalDevice() const { return fPhysicalDevice; }
VkDevice device() const { return fDevice; }
VkQueue queue() const { return fQueue; }
uint32_t queueIndex() const { return fQueueIndex; }
@ -233,6 +234,7 @@ private:
sk_sp<GrVkCaps> fVkCaps;
VkInstance fInstance;
VkPhysicalDevice fPhysicalDevice;
VkDevice fDevice;
VkQueue fQueue; // Must be Graphics queue
uint32_t fQueueIndex;

View File

@ -216,7 +216,7 @@ GrVkImage::~GrVkImage() {
void GrVkImage::releaseImage(const GrVkGpu* gpu) {
if (fInfo.fCurrentQueueFamily != fInitialQueueFamily) {
this->setImageLayout(gpu, fInfo.fImageLayout, 0, 0, false, true);
this->setImageLayout(gpu, this->currentLayout(), 0, 0, false, true);
}
if (fResource) {
fResource->unref(gpu);

View File

@ -198,6 +198,15 @@ GrVkInterface::GrVkInterface(GrVkGetProc getProc,
ACQUIRE_PROC_SUFFIX(GetImageSparseMemoryRequirements2, KHR, VK_NULL_HANDLE, device);
}
// Functions for VK_KHR_bind_memory2
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
ACQUIRE_PROC(BindBufferMemory2, VK_NULL_HANDLE, device);
ACQUIRE_PROC(BindImageMemory2, VK_NULL_HANDLE, device);
} else if (extensions->hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
ACQUIRE_PROC_SUFFIX(BindBufferMemory2, KHR, VK_NULL_HANDLE, device);
ACQUIRE_PROC_SUFFIX(BindImageMemory2, KHR, VK_NULL_HANDLE, device);
}
// Functions for VK_KHR_maintenance1 or vulkan 1.1
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
ACQUIRE_PROC(TrimCommandPool, VK_NULL_HANDLE, device);
@ -223,7 +232,7 @@ GrVkInterface::GrVkInterface(GrVkGetProc getProc,
#ifdef SK_BUILD_FOR_ANDROID
// Functions for VK_ANDROID_external_memory_android_hardware_buffer
if (extensions->hasExtension(
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3)) {
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2)) {
ACQUIRE_PROC_SUFFIX(GetAndroidHardwareBufferProperties, ANDROID, VK_NULL_HANDLE, device);
ACQUIRE_PROC_SUFFIX(GetMemoryAndroidHardwareBuffer, ANDROID, VK_NULL_HANDLE, device);
}
@ -406,6 +415,15 @@ bool GrVkInterface::validate(uint32_t instanceVersion, uint32_t physicalDeviceVe
}
}
// Functions for VK_KHR_bind_memory2
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions->hasExtension(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, 1)) {
if (nullptr == fFunctions.fBindBufferMemory2 ||
nullptr == fFunctions.fBindImageMemory2) {
RETURN_FALSE_INTERFACE
}
}
// Functions for VK_KHR_maintenance1 or vulkan 1.1
if (physicalDeviceVersion >= VK_MAKE_VERSION(1, 1, 0) ||
extensions->hasExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, 1)) {
@ -433,7 +451,7 @@ bool GrVkInterface::validate(uint32_t instanceVersion, uint32_t physicalDeviceVe
#ifdef SK_BUILD_FOR_ANDROID
// Functions for VK_ANDROID_external_memory_android_hardware_buffer
if (extensions->hasExtension(
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 3)) {
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, 2)) {
if (nullptr == fFunctions.fGetAndroidHardwareBufferProperties ||
nullptr == fFunctions.fGetMemoryAndroidHardwareBuffer) {
RETURN_FALSE_INTERFACE

View File

@ -206,6 +206,10 @@ public:
VkPtr<PFN_vkGetBufferMemoryRequirements2> fGetBufferMemoryRequirements2;
VkPtr<PFN_vkGetImageSparseMemoryRequirements2> fGetImageSparseMemoryRequirements2;
//Functions for VK_KHR_bind_memory2
VkPtr<PFN_vkBindBufferMemory2> fBindBufferMemory2;
VkPtr<PFN_vkBindImageMemory2> fBindImageMemory2;
// Functions for VK_KHR_maintenance1 or vulkan 1.1
VkPtr<PFN_vkTrimCommandPool> fTrimCommandPool;

View File

@ -82,7 +82,10 @@ static bool check_read(skiatest::Reporter* reporter, const SkBitmap& expectedBit
ERRORF(reporter, "Expected readback pixel (%d, %d) value 0x%08x, got 0x%08x.",
x, y, srcPixel, dstPixel);
result = false;
}
}/* else {
SkDebugf("Got good pixel (%d, %d) value 0x%08x, got 0x%08x.\n",
x, y, srcPixel, dstPixel);
}*/
}
}
return result;