Revert "Reland "Make GrVkBackendContext no longer derive from SkRefCnt.""
This reverts commit0db3a8846a
. Reason for revert: fuchsia change reverted Original change's description: > Reland "Make GrVkBackendContext no longer derive from SkRefCnt." > > This reverts commit059a9ab4bc
. > > Reason for revert: Fix landed in Fuchsia > > Original change's description: > > Revert "Make GrVkBackendContext no longer derive from SkRefCnt." > > > > This reverts commit93ae233773
. > > > > Reason for revert: <INSERT REASONING HERE> > > > > Original change's description: > > > Make GrVkBackendContext no longer derive from SkRefCnt. > > > > > > Also moves the helper Create functions to VkTestUtils since no clients > > > are using them anymore. > > > > > > Bug: skia: > > > Change-Id: I7e8e4912e7ef6fb00a7e2a00407aed5e83211799 > > > Reviewed-on: https://skia-review.googlesource.com/135323 > > > Reviewed-by: Jim Van Verth <jvanverth@google.com> > > > Reviewed-by: Brian Salomon <bsalomon@google.com> > > > Commit-Queue: Greg Daniel <egdaniel@google.com> > > > > TBR=egdaniel@google.com,jvanverth@google.com,bsalomon@google.com > > > > # Not skipping CQ checks because original CL landed > 1 day ago. > > > > Bug: skia: > > Change-Id: If7201917631dc22753ea3fa6e9d2984463e38e4c > > Reviewed-on: https://skia-review.googlesource.com/137903 > > Reviewed-by: Greg Daniel <egdaniel@google.com> > > Commit-Queue: Greg Daniel <egdaniel@google.com> > > TBR=egdaniel@google.com,jvanverth@google.com,bsalomon@google.com > > # Not skipping CQ checks because original CL landed > 1 day ago. > > Bug: skia: > Change-Id: Ia4b7c0bb2c7b5dba809d85c69f0b41b473140526 > Reviewed-on: https://skia-review.googlesource.com/138181 > Reviewed-by: Greg Daniel <egdaniel@google.com> > Commit-Queue: Greg Daniel <egdaniel@google.com> TBR=egdaniel@google.com,jvanverth@google.com,bsalomon@google.com Change-Id: I3f7bc4a5e4e09512fe165303685c123ec3527e58 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: skia: Reviewed-on: https://skia-review.googlesource.com/138422 Reviewed-by: Greg Daniel <egdaniel@google.com> Commit-Queue: Greg Daniel <egdaniel@google.com>
This commit is contained in:
parent
3148f802af
commit
007267bdb5
@ -543,6 +543,7 @@ skia_vk_sources = [
|
|||||||
"$_include/private/GrVkTypesPriv.h",
|
"$_include/private/GrVkTypesPriv.h",
|
||||||
"$_src/gpu/vk/GrVkAMDMemoryAllocator.cpp",
|
"$_src/gpu/vk/GrVkAMDMemoryAllocator.cpp",
|
||||||
"$_src/gpu/vk/GrVkAMDMemoryAllocator.h",
|
"$_src/gpu/vk/GrVkAMDMemoryAllocator.h",
|
||||||
|
"$_src/gpu/vk/GrVkBackendContext.cpp",
|
||||||
"$_src/gpu/vk/GrVkBuffer.cpp",
|
"$_src/gpu/vk/GrVkBuffer.cpp",
|
||||||
"$_src/gpu/vk/GrVkBuffer.h",
|
"$_src/gpu/vk/GrVkBuffer.h",
|
||||||
"$_src/gpu/vk/GrVkBufferView.cpp",
|
"$_src/gpu/vk/GrVkBufferView.cpp",
|
||||||
|
@ -71,6 +71,10 @@ public:
|
|||||||
#ifdef SK_VULKAN
|
#ifdef SK_VULKAN
|
||||||
static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
|
static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
|
||||||
static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&);
|
static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&);
|
||||||
|
// These calls that take an sk_sp GrVkBackendContext are deprecated. Use the previous calls and
|
||||||
|
// set fOwnsInstanceAndDevice to false on the GrVkBackendContext.
|
||||||
|
static sk_sp<GrContext> MakeVulkan(sk_sp<const GrVkBackendContext>, const GrContextOptions&);
|
||||||
|
static sk_sp<GrContext> MakeVulkan(sk_sp<const GrVkBackendContext>);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef SK_METAL
|
#ifdef SK_METAL
|
||||||
|
@ -34,11 +34,9 @@ enum GrVkFeatureFlags {
|
|||||||
// is that the client will set these up and pass them to the GrVkGpu constructor. The VkDevice
|
// is that the client will set these up and pass them to the GrVkGpu constructor. The VkDevice
|
||||||
// created must support at least one graphics queue, which is passed in as well.
|
// created must support at least one graphics queue, which is passed in as well.
|
||||||
// The QueueFamilyIndex must match the family of the given queue. It is needed for CommandPool
|
// The QueueFamilyIndex must match the family of the given queue. It is needed for CommandPool
|
||||||
// creation, and any GrBackendObjects handed to us (e.g., for wrapped textures) needs to be created
|
// creation, and any GrBackendObjects handed to us (e.g., for wrapped textures) need to be created
|
||||||
// in or transitioned to that family. The refs held by members of this struct must be released
|
// in or transitioned to that family.
|
||||||
// (either by deleting the struct or manually releasing the refs) before the underlying vulkan
|
struct SK_API GrVkBackendContext : public SkRefCnt {
|
||||||
// device and instance are destroyed.
|
|
||||||
struct SK_API GrVkBackendContext {
|
|
||||||
VkInstance fInstance;
|
VkInstance fInstance;
|
||||||
VkPhysicalDevice fPhysicalDevice;
|
VkPhysicalDevice fPhysicalDevice;
|
||||||
VkDevice fDevice;
|
VkDevice fDevice;
|
||||||
@ -50,9 +48,50 @@ struct SK_API GrVkBackendContext {
|
|||||||
sk_sp<const GrVkInterface> fInterface;
|
sk_sp<const GrVkInterface> fInterface;
|
||||||
sk_sp<GrVkMemoryAllocator> fMemoryAllocator;
|
sk_sp<GrVkMemoryAllocator> fMemoryAllocator;
|
||||||
|
|
||||||
// This is deprecated and should be set to false. The client is responsible for managing the
|
/**
|
||||||
// lifetime of the VkInstance and VkDevice objects.
|
* Controls whether this object destroys the instance and device upon destruction. The default
|
||||||
bool fOwnsInstanceAndDevice = false;
|
* is temporarily 'true' to avoid breaking existing clients but will be changed to 'false'.
|
||||||
|
*/
|
||||||
|
bool fOwnsInstanceAndDevice = true;
|
||||||
|
|
||||||
|
#if GR_TEST_UTILS || defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
|
||||||
|
using CanPresentFn = std::function<bool(VkInstance, VkPhysicalDevice,
|
||||||
|
uint32_t queueFamilyIndex)>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to create the Vulkan objects needed for a Vulkan-backed GrContext.
|
||||||
|
* Note that the version that uses the unified "GetProc" instead of separate "GetInstanceProc"
|
||||||
|
* and "GetDeviceProc" functions will be removed.
|
||||||
|
*
|
||||||
|
* If presentQueueIndex is non-NULL, will try to set up presentQueue as part of device
|
||||||
|
* creation using the platform-specific canPresent() function.
|
||||||
|
*
|
||||||
|
* This will set fOwnsInstanceAndDevice to 'true'. If it is subsequently set to 'false' then
|
||||||
|
* the client owns the lifetime of the created VkDevice and VkInstance.
|
||||||
|
*/
|
||||||
|
static const GrVkBackendContext* Create(uint32_t* presentQueueIndex = nullptr,
|
||||||
|
CanPresentFn = CanPresentFn(),
|
||||||
|
GrVkInterface::GetProc getProc = nullptr);
|
||||||
|
|
||||||
|
static const GrVkBackendContext* Create(const GrVkInterface::GetInstanceProc& getInstanceProc,
|
||||||
|
const GrVkInterface::GetDeviceProc& getDeviceProc,
|
||||||
|
uint32_t* presentQueueIndex = nullptr,
|
||||||
|
CanPresentFn canPresent = CanPresentFn()) {
|
||||||
|
if (!getInstanceProc || !getDeviceProc) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
auto getProc = [&getInstanceProc, &getDeviceProc](const char* proc_name,
|
||||||
|
VkInstance instance, VkDevice device) {
|
||||||
|
if (device != VK_NULL_HANDLE) {
|
||||||
|
return getDeviceProc(device, proc_name);
|
||||||
|
}
|
||||||
|
return getInstanceProc(instance, proc_name);
|
||||||
|
};
|
||||||
|
return Create(presentQueueIndex, canPresent, getProc);
|
||||||
|
}
|
||||||
|
#endif // GR_TEST_UTILS || defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
|
||||||
|
|
||||||
|
~GrVkBackendContext() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -156,7 +156,43 @@ sk_sp<GrContext> GrContext::MakeVulkan(const GrVkBackendContext& backendContext,
|
|||||||
const GrContextOptions& options) {
|
const GrContextOptions& options) {
|
||||||
sk_sp<GrContext> context(new GrDirectContext(kVulkan_GrBackend));
|
sk_sp<GrContext> context(new GrDirectContext(kVulkan_GrBackend));
|
||||||
|
|
||||||
context->fGpu = GrVkGpu::Make(backendContext, options, context.get());
|
sk_sp<GrVkBackendContext> backendContextRef(new GrVkBackendContext());
|
||||||
|
backendContextRef->fInstance = backendContext.fInstance;
|
||||||
|
backendContextRef->fPhysicalDevice = backendContext.fPhysicalDevice;
|
||||||
|
backendContextRef->fDevice = backendContext.fDevice;
|
||||||
|
backendContextRef->fQueue = backendContext.fQueue;
|
||||||
|
backendContextRef->fGraphicsQueueIndex = backendContext.fGraphicsQueueIndex;
|
||||||
|
backendContextRef->fMinAPIVersion = backendContext.fMinAPIVersion;
|
||||||
|
backendContextRef->fExtensions = backendContext.fExtensions;
|
||||||
|
backendContextRef->fFeatures = backendContext.fFeatures;
|
||||||
|
backendContextRef->fInterface = backendContext.fInterface;
|
||||||
|
backendContextRef->fMemoryAllocator = backendContext.fMemoryAllocator;
|
||||||
|
|
||||||
|
SkASSERT(!backendContext.fOwnsInstanceAndDevice);
|
||||||
|
backendContextRef->fOwnsInstanceAndDevice = false;
|
||||||
|
|
||||||
|
context->fGpu = GrVkGpu::Make(std::move(backendContextRef), options, context.get());
|
||||||
|
if (!context->fGpu) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
context->fCaps = context->fGpu->refCaps();
|
||||||
|
if (!context->init(options)) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
sk_sp<GrContext> GrContext::MakeVulkan(sk_sp<const GrVkBackendContext> backendContext) {
|
||||||
|
GrContextOptions defaultOptions;
|
||||||
|
return MakeVulkan(std::move(backendContext), defaultOptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
sk_sp<GrContext> GrContext::MakeVulkan(sk_sp<const GrVkBackendContext> backendContext,
|
||||||
|
const GrContextOptions& options) {
|
||||||
|
sk_sp<GrContext> context(new GrDirectContext(kVulkan_GrBackend));
|
||||||
|
|
||||||
|
context->fGpu = GrVkGpu::Make(std::move(backendContext), options, context.get());
|
||||||
if (!context->fGpu) {
|
if (!context->fGpu) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
338
src/gpu/vk/GrVkBackendContext.cpp
Normal file
338
src/gpu/vk/GrVkBackendContext.cpp
Normal file
@ -0,0 +1,338 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 Google Inc.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license that can be
|
||||||
|
* found in the LICENSE file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "SkAutoMalloc.h"
|
||||||
|
#include "vk/GrVkBackendContext.h"
|
||||||
|
#include "vk/GrVkExtensions.h"
|
||||||
|
#include "vk/GrVkUtil.h"
|
||||||
|
|
||||||
|
#if GR_TEST_UTILS || defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
// Helper code to set up Vulkan context objects
|
||||||
|
|
||||||
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
|
const char* kDebugLayerNames[] = {
|
||||||
|
// elements of VK_LAYER_LUNARG_standard_validation
|
||||||
|
"VK_LAYER_GOOGLE_threading",
|
||||||
|
"VK_LAYER_LUNARG_parameter_validation",
|
||||||
|
"VK_LAYER_LUNARG_object_tracker",
|
||||||
|
"VK_LAYER_LUNARG_image",
|
||||||
|
"VK_LAYER_LUNARG_core_validation",
|
||||||
|
"VK_LAYER_LUNARG_swapchain",
|
||||||
|
"VK_LAYER_GOOGLE_unique_objects",
|
||||||
|
// not included in standard_validation
|
||||||
|
//"VK_LAYER_LUNARG_api_dump",
|
||||||
|
//"VK_LAYER_LUNARG_vktrace",
|
||||||
|
//"VK_LAYER_LUNARG_screenshot",
|
||||||
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// the minimum version of Vulkan supported
|
||||||
|
#ifdef SK_BUILD_FOR_ANDROID
|
||||||
|
const uint32_t kGrVkMinimumVersion = VK_MAKE_VERSION(1, 0, 3);
|
||||||
|
#else
|
||||||
|
const uint32_t kGrVkMinimumVersion = VK_MAKE_VERSION(1, 0, 8);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define ACQUIRE_VK_PROC(name, instance, device) \
|
||||||
|
PFN_vk##name grVk##name = \
|
||||||
|
reinterpret_cast<PFN_vk##name>(getProc("vk" #name, instance, device)); \
|
||||||
|
if (grVk##name == nullptr) { \
|
||||||
|
SkDebugf("Function ptr for vk%s could not be acquired\n", #name); \
|
||||||
|
return nullptr; \
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the base Vulkan objects needed by the GrVkGpu object
|
||||||
|
const GrVkBackendContext* GrVkBackendContext::Create(uint32_t* presentQueueIndexPtr,
|
||||||
|
CanPresentFn canPresent,
|
||||||
|
GrVkInterface::GetProc getProc) {
|
||||||
|
if (!getProc) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
SkASSERT(getProc);
|
||||||
|
|
||||||
|
VkPhysicalDevice physDev;
|
||||||
|
VkDevice device;
|
||||||
|
VkInstance inst;
|
||||||
|
VkResult err;
|
||||||
|
|
||||||
|
const VkApplicationInfo app_info = {
|
||||||
|
VK_STRUCTURE_TYPE_APPLICATION_INFO, // sType
|
||||||
|
nullptr, // pNext
|
||||||
|
"vktest", // pApplicationName
|
||||||
|
0, // applicationVersion
|
||||||
|
"vktest", // pEngineName
|
||||||
|
0, // engineVerison
|
||||||
|
kGrVkMinimumVersion, // apiVersion
|
||||||
|
};
|
||||||
|
|
||||||
|
GrVkExtensions extensions(getProc);
|
||||||
|
extensions.initInstance(kGrVkMinimumVersion);
|
||||||
|
|
||||||
|
SkTArray<const char*> instanceLayerNames;
|
||||||
|
SkTArray<const char*> instanceExtensionNames;
|
||||||
|
uint32_t extensionFlags = 0;
|
||||||
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
|
for (size_t i = 0; i < SK_ARRAY_COUNT(kDebugLayerNames); ++i) {
|
||||||
|
if (extensions.hasInstanceLayer(kDebugLayerNames[i])) {
|
||||||
|
instanceLayerNames.push_back(kDebugLayerNames[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (extensions.hasInstanceExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kEXT_debug_report_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if (extensions.hasInstanceExtension(VK_KHR_SURFACE_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_surface_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
if (extensions.hasInstanceExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_swapchain_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
#ifdef SK_BUILD_FOR_WIN
|
||||||
|
if (extensions.hasInstanceExtension(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_win32_surface_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
#elif defined(SK_BUILD_FOR_ANDROID)
|
||||||
|
if (extensions.hasInstanceExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_android_surface_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
#elif defined(SK_BUILD_FOR_UNIX) && !defined(__Fuchsia__)
|
||||||
|
if (extensions.hasInstanceExtension(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
|
||||||
|
instanceExtensionNames.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_xcb_surface_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
const VkInstanceCreateInfo instance_create = {
|
||||||
|
VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, // sType
|
||||||
|
nullptr, // pNext
|
||||||
|
0, // flags
|
||||||
|
&app_info, // pApplicationInfo
|
||||||
|
(uint32_t) instanceLayerNames.count(), // enabledLayerNameCount
|
||||||
|
instanceLayerNames.begin(), // ppEnabledLayerNames
|
||||||
|
(uint32_t) instanceExtensionNames.count(), // enabledExtensionNameCount
|
||||||
|
instanceExtensionNames.begin(), // ppEnabledExtensionNames
|
||||||
|
};
|
||||||
|
|
||||||
|
ACQUIRE_VK_PROC(CreateInstance, VK_NULL_HANDLE, VK_NULL_HANDLE);
|
||||||
|
err = grVkCreateInstance(&instance_create, nullptr, &inst);
|
||||||
|
if (err < 0) {
|
||||||
|
SkDebugf("vkCreateInstance failed: %d\n", err);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
ACQUIRE_VK_PROC(DestroyInstance, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(EnumeratePhysicalDevices, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(GetPhysicalDeviceQueueFamilyProperties, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(GetPhysicalDeviceFeatures, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(CreateDevice, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(GetDeviceQueue, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(DeviceWaitIdle, inst, VK_NULL_HANDLE);
|
||||||
|
ACQUIRE_VK_PROC(DestroyDevice, inst, VK_NULL_HANDLE);
|
||||||
|
|
||||||
|
uint32_t gpuCount;
|
||||||
|
err = grVkEnumeratePhysicalDevices(inst, &gpuCount, nullptr);
|
||||||
|
if (err) {
|
||||||
|
SkDebugf("vkEnumeratePhysicalDevices failed: %d\n", err);
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
if (!gpuCount) {
|
||||||
|
SkDebugf("vkEnumeratePhysicalDevices returned no supported devices.\n");
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
// Just returning the first physical device instead of getting the whole array.
|
||||||
|
// TODO: find best match for our needs
|
||||||
|
gpuCount = 1;
|
||||||
|
err = grVkEnumeratePhysicalDevices(inst, &gpuCount, &physDev);
|
||||||
|
// VK_INCOMPLETE is returned when the count we provide is less than the total device count.
|
||||||
|
if (err && VK_INCOMPLETE != err) {
|
||||||
|
SkDebugf("vkEnumeratePhysicalDevices failed: %d\n", err);
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
// query to get the initial queue props size
|
||||||
|
uint32_t queueCount;
|
||||||
|
grVkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, nullptr);
|
||||||
|
if (!queueCount) {
|
||||||
|
SkDebugf("vkGetPhysicalDeviceQueueFamilyProperties returned no queues.\n");
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
SkAutoMalloc queuePropsAlloc(queueCount * sizeof(VkQueueFamilyProperties));
|
||||||
|
// now get the actual queue props
|
||||||
|
VkQueueFamilyProperties* queueProps = (VkQueueFamilyProperties*)queuePropsAlloc.get();
|
||||||
|
|
||||||
|
grVkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, queueProps);
|
||||||
|
|
||||||
|
// iterate to find the graphics queue
|
||||||
|
uint32_t graphicsQueueIndex = queueCount;
|
||||||
|
for (uint32_t i = 0; i < queueCount; i++) {
|
||||||
|
if (queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
|
||||||
|
graphicsQueueIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (graphicsQueueIndex == queueCount) {
|
||||||
|
SkDebugf("Could not find any supported graphics queues.\n");
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate to find the present queue, if needed
|
||||||
|
uint32_t presentQueueIndex = queueCount;
|
||||||
|
if (presentQueueIndexPtr && canPresent) {
|
||||||
|
for (uint32_t i = 0; i < queueCount; i++) {
|
||||||
|
if (canPresent(inst, physDev, i)) {
|
||||||
|
presentQueueIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (presentQueueIndex == queueCount) {
|
||||||
|
SkDebugf("Could not find any supported present queues.\n");
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
*presentQueueIndexPtr = presentQueueIndex;
|
||||||
|
} else {
|
||||||
|
// Just setting this so we end up make a single queue for graphics since there was no
|
||||||
|
// request for a present queue.
|
||||||
|
presentQueueIndex = graphicsQueueIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
extensions.initDevice(kGrVkMinimumVersion, inst, physDev);
|
||||||
|
|
||||||
|
SkTArray<const char*> deviceLayerNames;
|
||||||
|
SkTArray<const char*> deviceExtensionNames;
|
||||||
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
|
for (size_t i = 0; i < SK_ARRAY_COUNT(kDebugLayerNames); ++i) {
|
||||||
|
if (extensions.hasDeviceLayer(kDebugLayerNames[i])) {
|
||||||
|
deviceLayerNames.push_back(kDebugLayerNames[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
if (extensions.hasDeviceExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
|
||||||
|
deviceExtensionNames.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
|
||||||
|
extensionFlags |= kKHR_swapchain_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
if (extensions.hasDeviceExtension("VK_NV_glsl_shader")) {
|
||||||
|
deviceExtensionNames.push_back("VK_NV_glsl_shader");
|
||||||
|
extensionFlags |= kNV_glsl_shader_GrVkExtensionFlag;
|
||||||
|
}
|
||||||
|
|
||||||
|
// query to get the physical device properties
|
||||||
|
VkPhysicalDeviceFeatures deviceFeatures;
|
||||||
|
grVkGetPhysicalDeviceFeatures(physDev, &deviceFeatures);
|
||||||
|
// this looks like it would slow things down,
|
||||||
|
// and we can't depend on it on all platforms
|
||||||
|
deviceFeatures.robustBufferAccess = VK_FALSE;
|
||||||
|
|
||||||
|
uint32_t featureFlags = 0;
|
||||||
|
if (deviceFeatures.geometryShader) {
|
||||||
|
featureFlags |= kGeometryShader_GrVkFeatureFlag;
|
||||||
|
}
|
||||||
|
if (deviceFeatures.dualSrcBlend) {
|
||||||
|
featureFlags |= kDualSrcBlend_GrVkFeatureFlag;
|
||||||
|
}
|
||||||
|
if (deviceFeatures.sampleRateShading) {
|
||||||
|
featureFlags |= kSampleRateShading_GrVkFeatureFlag;
|
||||||
|
}
|
||||||
|
|
||||||
|
float queuePriorities[1] = { 0.0 };
|
||||||
|
// Here we assume no need for swapchain queue
|
||||||
|
// If one is needed, the client will need its own setup code
|
||||||
|
const VkDeviceQueueCreateInfo queueInfo[2] = {
|
||||||
|
{
|
||||||
|
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // sType
|
||||||
|
nullptr, // pNext
|
||||||
|
0, // VkDeviceQueueCreateFlags
|
||||||
|
graphicsQueueIndex, // queueFamilyIndex
|
||||||
|
1, // queueCount
|
||||||
|
queuePriorities, // pQueuePriorities
|
||||||
|
},
|
||||||
|
{
|
||||||
|
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // sType
|
||||||
|
nullptr, // pNext
|
||||||
|
0, // VkDeviceQueueCreateFlags
|
||||||
|
presentQueueIndex, // queueFamilyIndex
|
||||||
|
1, // queueCount
|
||||||
|
queuePriorities, // pQueuePriorities
|
||||||
|
}
|
||||||
|
};
|
||||||
|
uint32_t queueInfoCount = (presentQueueIndex != graphicsQueueIndex) ? 2 : 1;
|
||||||
|
|
||||||
|
const VkDeviceCreateInfo deviceInfo = {
|
||||||
|
VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, // sType
|
||||||
|
nullptr, // pNext
|
||||||
|
0, // VkDeviceCreateFlags
|
||||||
|
queueInfoCount, // queueCreateInfoCount
|
||||||
|
queueInfo, // pQueueCreateInfos
|
||||||
|
(uint32_t) deviceLayerNames.count(), // layerCount
|
||||||
|
deviceLayerNames.begin(), // ppEnabledLayerNames
|
||||||
|
(uint32_t) deviceExtensionNames.count(), // extensionCount
|
||||||
|
deviceExtensionNames.begin(), // ppEnabledExtensionNames
|
||||||
|
&deviceFeatures // ppEnabledFeatures
|
||||||
|
};
|
||||||
|
|
||||||
|
err = grVkCreateDevice(physDev, &deviceInfo, nullptr, &device);
|
||||||
|
if (err) {
|
||||||
|
SkDebugf("CreateDevice failed: %d\n", err);
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto interface =
|
||||||
|
sk_make_sp<GrVkInterface>(getProc, inst, device, extensionFlags);
|
||||||
|
if (!interface->validate(extensionFlags)) {
|
||||||
|
SkDebugf("Vulkan interface validation failed\n");
|
||||||
|
grVkDeviceWaitIdle(device);
|
||||||
|
grVkDestroyDevice(device, nullptr);
|
||||||
|
grVkDestroyInstance(inst, nullptr);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkQueue queue;
|
||||||
|
grVkGetDeviceQueue(device, graphicsQueueIndex, 0, &queue);
|
||||||
|
|
||||||
|
GrVkBackendContext* ctx = new GrVkBackendContext();
|
||||||
|
ctx->fInstance = inst;
|
||||||
|
ctx->fPhysicalDevice = physDev;
|
||||||
|
ctx->fDevice = device;
|
||||||
|
ctx->fQueue = queue;
|
||||||
|
ctx->fGraphicsQueueIndex = graphicsQueueIndex;
|
||||||
|
ctx->fMinAPIVersion = kGrVkMinimumVersion;
|
||||||
|
ctx->fExtensions = extensionFlags;
|
||||||
|
ctx->fFeatures = featureFlags;
|
||||||
|
ctx->fInterface.reset(interface.release());
|
||||||
|
ctx->fOwnsInstanceAndDevice = true;
|
||||||
|
|
||||||
|
return ctx;
|
||||||
|
}
|
||||||
|
#endif // GR_TEST_UTILS || defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
|
||||||
|
|
||||||
|
GrVkBackendContext::~GrVkBackendContext() {
|
||||||
|
fMemoryAllocator.reset();
|
||||||
|
if (fInterface == nullptr || !fOwnsInstanceAndDevice) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
fInterface->fFunctions.fDeviceWaitIdle(fDevice);
|
||||||
|
fInterface->fFunctions.fDestroyDevice(fDevice, nullptr);
|
||||||
|
fDevice = VK_NULL_HANDLE;
|
||||||
|
fInterface->fFunctions.fDestroyInstance(fInstance, nullptr);
|
||||||
|
fInstance = VK_NULL_HANDLE;
|
||||||
|
}
|
@ -73,38 +73,33 @@ VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
sk_sp<GrGpu> GrVkGpu::Make(const GrVkBackendContext& backendContext,
|
sk_sp<GrGpu> GrVkGpu::Make(sk_sp<const GrVkBackendContext> backendContext,
|
||||||
const GrContextOptions& options, GrContext* context) {
|
const GrContextOptions& options, GrContext* context) {
|
||||||
if (backendContext.fInstance == VK_NULL_HANDLE ||
|
if (!backendContext) {
|
||||||
backendContext.fPhysicalDevice == VK_NULL_HANDLE ||
|
|
||||||
backendContext.fDevice == VK_NULL_HANDLE ||
|
|
||||||
backendContext.fQueue == VK_NULL_HANDLE) {
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
if (!backendContext.fInterface ||
|
|
||||||
!backendContext.fInterface->validate(backendContext.fExtensions)) {
|
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
return sk_sp<GrGpu>(new GrVkGpu(context, options, backendContext));
|
if (!backendContext->fInterface->validate(backendContext->fExtensions)) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
return sk_sp<GrGpu>(new GrVkGpu(context, options, std::move(backendContext)));
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
|
GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
|
||||||
const GrVkBackendContext& backendContext)
|
sk_sp<const GrVkBackendContext> backendCtx)
|
||||||
: INHERITED(context)
|
: INHERITED(context)
|
||||||
, fInterface(std::move(backendContext.fInterface))
|
, fBackendContext(std::move(backendCtx))
|
||||||
, fMemoryAllocator(backendContext.fMemoryAllocator)
|
, fMemoryAllocator(fBackendContext->fMemoryAllocator)
|
||||||
, fInstance(backendContext.fInstance)
|
, fDevice(fBackendContext->fDevice)
|
||||||
, fDevice(backendContext.fDevice)
|
, fQueue(fBackendContext->fQueue)
|
||||||
, fQueue(backendContext.fQueue)
|
|
||||||
, fResourceProvider(this)
|
, fResourceProvider(this)
|
||||||
, fDisconnected(false) {
|
, fDisconnected(false) {
|
||||||
SkASSERT(!backendContext.fOwnsInstanceAndDevice);
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
fCallback = VK_NULL_HANDLE;
|
fCallback = VK_NULL_HANDLE;
|
||||||
if (backendContext.fExtensions & kEXT_debug_report_GrVkExtensionFlag) {
|
if (fBackendContext->fExtensions & kEXT_debug_report_GrVkExtensionFlag) {
|
||||||
// Setup callback creation information
|
// Setup callback creation information
|
||||||
VkDebugReportCallbackCreateInfoEXT callbackCreateInfo;
|
VkDebugReportCallbackCreateInfoEXT callbackCreateInfo;
|
||||||
callbackCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
|
callbackCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
|
||||||
@ -119,32 +114,32 @@ GrVkGpu::GrVkGpu(GrContext* context, const GrContextOptions& options,
|
|||||||
|
|
||||||
// Register the callback
|
// Register the callback
|
||||||
GR_VK_CALL_ERRCHECK(this->vkInterface(),
|
GR_VK_CALL_ERRCHECK(this->vkInterface(),
|
||||||
CreateDebugReportCallbackEXT(backendContext.fInstance,
|
CreateDebugReportCallbackEXT(fBackendContext->fInstance,
|
||||||
&callbackCreateInfo, nullptr, &fCallback));
|
&callbackCreateInfo, nullptr, &fCallback));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (!fMemoryAllocator) {
|
if (!fMemoryAllocator) {
|
||||||
// We were not given a memory allocator at creation
|
// We were not given a memory allocator at creation
|
||||||
fMemoryAllocator.reset(new GrVkAMDMemoryAllocator(backendContext.fPhysicalDevice,
|
fMemoryAllocator.reset(new GrVkAMDMemoryAllocator(fBackendContext->fPhysicalDevice,
|
||||||
fDevice, backendContext.fInterface));
|
fDevice, fBackendContext->fInterface));
|
||||||
}
|
}
|
||||||
|
|
||||||
fCompiler = new SkSL::Compiler();
|
fCompiler = new SkSL::Compiler();
|
||||||
|
|
||||||
fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), backendContext.fPhysicalDevice,
|
fVkCaps.reset(new GrVkCaps(options, this->vkInterface(), fBackendContext->fPhysicalDevice,
|
||||||
backendContext.fFeatures, backendContext.fExtensions));
|
fBackendContext->fFeatures, fBackendContext->fExtensions));
|
||||||
fCaps.reset(SkRef(fVkCaps.get()));
|
fCaps.reset(SkRef(fVkCaps.get()));
|
||||||
|
|
||||||
VK_CALL(GetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &fPhysDevProps));
|
VK_CALL(GetPhysicalDeviceProperties(fBackendContext->fPhysicalDevice, &fPhysDevProps));
|
||||||
VK_CALL(GetPhysicalDeviceMemoryProperties(backendContext.fPhysicalDevice, &fPhysDevMemProps));
|
VK_CALL(GetPhysicalDeviceMemoryProperties(fBackendContext->fPhysicalDevice, &fPhysDevMemProps));
|
||||||
|
|
||||||
const VkCommandPoolCreateInfo cmdPoolInfo = {
|
const VkCommandPoolCreateInfo cmdPoolInfo = {
|
||||||
VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, // sType
|
VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, // sType
|
||||||
nullptr, // pNext
|
nullptr, // pNext
|
||||||
VK_COMMAND_POOL_CREATE_TRANSIENT_BIT |
|
VK_COMMAND_POOL_CREATE_TRANSIENT_BIT |
|
||||||
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, // CmdPoolCreateFlags
|
VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, // CmdPoolCreateFlags
|
||||||
backendContext.fGraphicsQueueIndex, // queueFamilyIndex
|
fBackendContext->fGraphicsQueueIndex, // queueFamilyIndex
|
||||||
};
|
};
|
||||||
GR_VK_CALL_ERRCHECK(this->vkInterface(), CreateCommandPool(fDevice, &cmdPoolInfo, nullptr,
|
GR_VK_CALL_ERRCHECK(this->vkInterface(), CreateCommandPool(fDevice, &cmdPoolInfo, nullptr,
|
||||||
&fCmdPool));
|
&fCmdPool));
|
||||||
@ -206,15 +201,10 @@ void GrVkGpu::destroyResources() {
|
|||||||
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
if (fCallback) {
|
if (fCallback) {
|
||||||
VK_CALL(DestroyDebugReportCallbackEXT(fInstance, fCallback, nullptr));
|
VK_CALL(DestroyDebugReportCallbackEXT(fBackendContext->fInstance, fCallback, nullptr));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
fMemoryAllocator.reset();
|
|
||||||
|
|
||||||
fQueue = VK_NULL_HANDLE;
|
|
||||||
fDevice = VK_NULL_HANDLE;
|
|
||||||
fInstance = VK_NULL_HANDLE;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GrVkGpu::~GrVkGpu() {
|
GrVkGpu::~GrVkGpu() {
|
||||||
|
@ -38,13 +38,13 @@ namespace SkSL {
|
|||||||
|
|
||||||
class GrVkGpu : public GrGpu {
|
class GrVkGpu : public GrGpu {
|
||||||
public:
|
public:
|
||||||
static sk_sp<GrGpu> Make(const GrVkBackendContext&, const GrContextOptions&, GrContext*);
|
static sk_sp<GrGpu> Make(sk_sp<const GrVkBackendContext>, const GrContextOptions&, GrContext*);
|
||||||
|
|
||||||
~GrVkGpu() override;
|
~GrVkGpu() override;
|
||||||
|
|
||||||
void disconnect(DisconnectType) override;
|
void disconnect(DisconnectType) override;
|
||||||
|
|
||||||
const GrVkInterface* vkInterface() const { return fInterface.get(); }
|
const GrVkInterface* vkInterface() const { return fBackendContext->fInterface.get(); }
|
||||||
const GrVkCaps& vkCaps() const { return *fVkCaps; }
|
const GrVkCaps& vkCaps() const { return *fVkCaps; }
|
||||||
|
|
||||||
GrVkMemoryAllocator* memoryAllocator() const { return fMemoryAllocator.get(); }
|
GrVkMemoryAllocator* memoryAllocator() const { return fMemoryAllocator.get(); }
|
||||||
@ -144,7 +144,7 @@ public:
|
|||||||
bool updateBuffer(GrVkBuffer* buffer, const void* src, VkDeviceSize offset, VkDeviceSize size);
|
bool updateBuffer(GrVkBuffer* buffer, const void* src, VkDeviceSize offset, VkDeviceSize size);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
GrVkGpu(GrContext*, const GrContextOptions&, const GrVkBackendContext& backendContext);
|
GrVkGpu(GrContext*, const GrContextOptions&, sk_sp<const GrVkBackendContext> backendContext);
|
||||||
|
|
||||||
void onResetContext(uint32_t resetBits) override {}
|
void onResetContext(uint32_t resetBits) override {}
|
||||||
|
|
||||||
@ -222,27 +222,28 @@ private:
|
|||||||
GrVkImageInfo* info);
|
GrVkImageInfo* info);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
sk_sp<const GrVkInterface> fInterface;
|
sk_sp<const GrVkBackendContext> fBackendContext;
|
||||||
sk_sp<GrVkMemoryAllocator> fMemoryAllocator;
|
sk_sp<GrVkMemoryAllocator> fMemoryAllocator;
|
||||||
sk_sp<GrVkCaps> fVkCaps;
|
sk_sp<GrVkCaps> fVkCaps;
|
||||||
|
|
||||||
VkInstance fInstance;
|
// These Vulkan objects are provided by the client, and also stored in fBackendContext.
|
||||||
VkDevice fDevice;
|
// They're copied here for convenient access.
|
||||||
VkQueue fQueue; // Must be Graphics queue
|
VkDevice fDevice;
|
||||||
|
VkQueue fQueue; // Must be Graphics queue
|
||||||
|
|
||||||
// Created by GrVkGpu
|
// Created by GrVkGpu
|
||||||
GrVkResourceProvider fResourceProvider;
|
GrVkResourceProvider fResourceProvider;
|
||||||
VkCommandPool fCmdPool;
|
VkCommandPool fCmdPool;
|
||||||
|
|
||||||
GrVkPrimaryCommandBuffer* fCurrentCmdBuffer;
|
GrVkPrimaryCommandBuffer* fCurrentCmdBuffer;
|
||||||
|
|
||||||
SkSTArray<1, GrVkSemaphore::Resource*> fSemaphoresToWaitOn;
|
SkSTArray<1, GrVkSemaphore::Resource*> fSemaphoresToWaitOn;
|
||||||
SkSTArray<1, GrVkSemaphore::Resource*> fSemaphoresToSignal;
|
SkSTArray<1, GrVkSemaphore::Resource*> fSemaphoresToSignal;
|
||||||
|
|
||||||
VkPhysicalDeviceProperties fPhysDevProps;
|
VkPhysicalDeviceProperties fPhysDevProps;
|
||||||
VkPhysicalDeviceMemoryProperties fPhysDevMemProps;
|
VkPhysicalDeviceMemoryProperties fPhysDevMemProps;
|
||||||
|
|
||||||
GrVkCopyManager fCopyManager;
|
GrVkCopyManager fCopyManager;
|
||||||
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
#ifdef SK_ENABLE_VK_LAYERS
|
||||||
// For reporting validation layer errors
|
// For reporting validation layer errors
|
||||||
@ -251,11 +252,11 @@ private:
|
|||||||
|
|
||||||
// compiler used for compiling sksl into spirv. We only want to create the compiler once since
|
// compiler used for compiling sksl into spirv. We only want to create the compiler once since
|
||||||
// there is significant overhead to the first compile of any compiler.
|
// there is significant overhead to the first compile of any compiler.
|
||||||
SkSL::Compiler* fCompiler;
|
SkSL::Compiler* fCompiler;
|
||||||
|
|
||||||
// We need a bool to track whether or not we've already disconnected all the gpu resources from
|
// We need a bool to track whether or not we've already disconnected all the gpu resources from
|
||||||
// vulkan context.
|
// vulkan context.
|
||||||
bool fDisconnected;
|
bool fDisconnected;
|
||||||
|
|
||||||
typedef GrGpu INHERITED;
|
typedef GrGpu INHERITED;
|
||||||
};
|
};
|
||||||
|
@ -110,24 +110,21 @@ GR_STATIC_ASSERT(sizeof(VkFence) <= sizeof(sk_gpu_test::PlatformFence));
|
|||||||
class VkTestContextImpl : public sk_gpu_test::VkTestContext {
|
class VkTestContextImpl : public sk_gpu_test::VkTestContext {
|
||||||
public:
|
public:
|
||||||
static VkTestContext* Create(VkTestContext* sharedContext) {
|
static VkTestContext* Create(VkTestContext* sharedContext) {
|
||||||
GrVkBackendContext backendContext;
|
sk_sp<const GrVkBackendContext> backendContext;
|
||||||
bool ownsContext = true;
|
|
||||||
if (sharedContext) {
|
if (sharedContext) {
|
||||||
backendContext = sharedContext->getVkBackendContext();
|
backendContext = sharedContext->getVkBackendContext();
|
||||||
// We always delete the parent context last so make sure the child does not think they
|
|
||||||
// own the vulkan context.
|
|
||||||
ownsContext = false;
|
|
||||||
} else {
|
} else {
|
||||||
PFN_vkGetInstanceProcAddr instProc;
|
PFN_vkGetInstanceProcAddr instProc;
|
||||||
PFN_vkGetDeviceProcAddr devProc;
|
PFN_vkGetDeviceProcAddr devProc;
|
||||||
if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
|
if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
if (!sk_gpu_test::CreateVkBackendContext(instProc, devProc, &backendContext)) {
|
backendContext.reset(GrVkBackendContext::Create(instProc, devProc));
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return new VkTestContextImpl(backendContext, ownsContext);
|
if (!backendContext) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
return new VkTestContextImpl(std::move(backendContext));
|
||||||
}
|
}
|
||||||
|
|
||||||
~VkTestContextImpl() override { this->teardown(); }
|
~VkTestContextImpl() override { this->teardown(); }
|
||||||
@ -146,19 +143,14 @@ public:
|
|||||||
protected:
|
protected:
|
||||||
void teardown() override {
|
void teardown() override {
|
||||||
INHERITED::teardown();
|
INHERITED::teardown();
|
||||||
fVk.fMemoryAllocator.reset();
|
fVk.reset(nullptr);
|
||||||
if (fOwnsContext) {
|
|
||||||
GR_VK_CALL(this->vk(), DeviceWaitIdle(fVk.fDevice));
|
|
||||||
GR_VK_CALL(this->vk(), DestroyDevice(fVk.fDevice, nullptr));
|
|
||||||
GR_VK_CALL(this->vk(), DestroyInstance(fVk.fInstance, nullptr));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
VkTestContextImpl(const GrVkBackendContext& backendContext, bool ownsContext)
|
VkTestContextImpl(sk_sp<const GrVkBackendContext> backendContext)
|
||||||
: VkTestContext(backendContext, ownsContext) {
|
: VkTestContext(std::move(backendContext)) {
|
||||||
fFenceSync.reset(new VkFenceSync(fVk.fInterface, fVk.fDevice, fVk.fQueue,
|
fFenceSync.reset(new VkFenceSync(fVk->fInterface, fVk->fDevice, fVk->fQueue,
|
||||||
fVk.fGraphicsQueueIndex));
|
fVk->fGraphicsQueueIndex));
|
||||||
}
|
}
|
||||||
|
|
||||||
void onPlatformMakeCurrent() const override {}
|
void onPlatformMakeCurrent() const override {}
|
||||||
|
@ -19,18 +19,16 @@ class VkTestContext : public TestContext {
|
|||||||
public:
|
public:
|
||||||
virtual GrBackend backend() override { return kVulkan_GrBackend; }
|
virtual GrBackend backend() override { return kVulkan_GrBackend; }
|
||||||
|
|
||||||
const GrVkBackendContext& getVkBackendContext() {
|
sk_sp<const GrVkBackendContext> getVkBackendContext() {
|
||||||
return fVk;
|
return fVk;
|
||||||
}
|
}
|
||||||
|
|
||||||
const GrVkInterface* vk() const { return fVk.fInterface.get(); }
|
const GrVkInterface* vk() const { return fVk->fInterface.get(); }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
VkTestContext(const GrVkBackendContext& vk, bool ownsContext)
|
VkTestContext(sk_sp<const GrVkBackendContext> vk) : fVk(std::move(vk)) {}
|
||||||
: fVk(vk), fOwnsContext(ownsContext) {}
|
|
||||||
|
|
||||||
GrVkBackendContext fVk;
|
sk_sp<const GrVkBackendContext> fVk;
|
||||||
bool fOwnsContext;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
typedef TestContext INHERITED;
|
typedef TestContext INHERITED;
|
||||||
|
@ -9,9 +9,6 @@
|
|||||||
|
|
||||||
#ifdef SK_VULKAN
|
#ifdef SK_VULKAN
|
||||||
|
|
||||||
#include "SkAutoMalloc.h"
|
|
||||||
#include "vk/GrVkBackendContext.h"
|
|
||||||
#include "vk/GrVkExtensions.h"
|
|
||||||
#include "../ports/SkOSLibrary.h"
|
#include "../ports/SkOSLibrary.h"
|
||||||
|
|
||||||
namespace sk_gpu_test {
|
namespace sk_gpu_test {
|
||||||
@ -49,323 +46,6 @@ bool LoadVkLibraryAndGetProcAddrFuncs(PFN_vkGetInstanceProcAddr* instProc,
|
|||||||
return true;
|
return true;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
|
||||||
// Helper code to set up Vulkan context objects
|
|
||||||
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
|
||||||
const char* kDebugLayerNames[] = {
|
|
||||||
// elements of VK_LAYER_LUNARG_standard_validation
|
|
||||||
"VK_LAYER_GOOGLE_threading",
|
|
||||||
"VK_LAYER_LUNARG_parameter_validation",
|
|
||||||
"VK_LAYER_LUNARG_object_tracker",
|
|
||||||
"VK_LAYER_LUNARG_image",
|
|
||||||
"VK_LAYER_LUNARG_core_validation",
|
|
||||||
"VK_LAYER_LUNARG_swapchain",
|
|
||||||
"VK_LAYER_GOOGLE_unique_objects",
|
|
||||||
// not included in standard_validation
|
|
||||||
//"VK_LAYER_LUNARG_api_dump",
|
|
||||||
//"VK_LAYER_LUNARG_vktrace",
|
|
||||||
//"VK_LAYER_LUNARG_screenshot",
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// the minimum version of Vulkan supported
|
|
||||||
#ifdef SK_BUILD_FOR_ANDROID
|
|
||||||
const uint32_t kGrVkMinimumVersion = VK_MAKE_VERSION(1, 0, 3);
|
|
||||||
#else
|
|
||||||
const uint32_t kGrVkMinimumVersion = VK_MAKE_VERSION(1, 0, 8);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define ACQUIRE_VK_PROC(name, instance, device) \
|
|
||||||
PFN_vk##name grVk##name = \
|
|
||||||
reinterpret_cast<PFN_vk##name>(getProc("vk" #name, instance, device)); \
|
|
||||||
if (grVk##name == nullptr) { \
|
|
||||||
SkDebugf("Function ptr for vk%s could not be acquired\n", #name); \
|
|
||||||
return false; \
|
|
||||||
}
|
|
||||||
|
|
||||||
bool CreateVkBackendContext(const GrVkInterface::GetInstanceProc& getInstanceProc,
|
|
||||||
const GrVkInterface::GetDeviceProc& getDeviceProc,
|
|
||||||
GrVkBackendContext* ctx,
|
|
||||||
uint32_t* presentQueueIndexPtr,
|
|
||||||
CanPresentFn canPresent) {
|
|
||||||
auto getProc = [&getInstanceProc, &getDeviceProc](const char* proc_name,
|
|
||||||
VkInstance instance, VkDevice device) {
|
|
||||||
if (device != VK_NULL_HANDLE) {
|
|
||||||
return getDeviceProc(device, proc_name);
|
|
||||||
}
|
|
||||||
return getInstanceProc(instance, proc_name);
|
|
||||||
};
|
|
||||||
|
|
||||||
VkPhysicalDevice physDev;
|
|
||||||
VkDevice device;
|
|
||||||
VkInstance inst;
|
|
||||||
VkResult err;
|
|
||||||
|
|
||||||
const VkApplicationInfo app_info = {
|
|
||||||
VK_STRUCTURE_TYPE_APPLICATION_INFO, // sType
|
|
||||||
nullptr, // pNext
|
|
||||||
"vktest", // pApplicationName
|
|
||||||
0, // applicationVersion
|
|
||||||
"vktest", // pEngineName
|
|
||||||
0, // engineVerison
|
|
||||||
kGrVkMinimumVersion, // apiVersion
|
|
||||||
};
|
|
||||||
|
|
||||||
GrVkExtensions extensions(getProc);
|
|
||||||
extensions.initInstance(kGrVkMinimumVersion);
|
|
||||||
|
|
||||||
SkTArray<const char*> instanceLayerNames;
|
|
||||||
SkTArray<const char*> instanceExtensionNames;
|
|
||||||
uint32_t extensionFlags = 0;
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
|
||||||
for (size_t i = 0; i < SK_ARRAY_COUNT(kDebugLayerNames); ++i) {
|
|
||||||
if (extensions.hasInstanceLayer(kDebugLayerNames[i])) {
|
|
||||||
instanceLayerNames.push_back(kDebugLayerNames[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (extensions.hasInstanceExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kEXT_debug_report_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (extensions.hasInstanceExtension(VK_KHR_SURFACE_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_surface_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
if (extensions.hasInstanceExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_swapchain_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
#ifdef SK_BUILD_FOR_WIN
|
|
||||||
if (extensions.hasInstanceExtension(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_win32_surface_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
#elif defined(SK_BUILD_FOR_ANDROID)
|
|
||||||
if (extensions.hasInstanceExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_android_surface_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
#elif defined(SK_BUILD_FOR_UNIX) && !defined(__Fuchsia__)
|
|
||||||
if (extensions.hasInstanceExtension(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
|
|
||||||
instanceExtensionNames.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_xcb_surface_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
const VkInstanceCreateInfo instance_create = {
|
|
||||||
VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, // sType
|
|
||||||
nullptr, // pNext
|
|
||||||
0, // flags
|
|
||||||
&app_info, // pApplicationInfo
|
|
||||||
(uint32_t) instanceLayerNames.count(), // enabledLayerNameCount
|
|
||||||
instanceLayerNames.begin(), // ppEnabledLayerNames
|
|
||||||
(uint32_t) instanceExtensionNames.count(), // enabledExtensionNameCount
|
|
||||||
instanceExtensionNames.begin(), // ppEnabledExtensionNames
|
|
||||||
};
|
|
||||||
|
|
||||||
ACQUIRE_VK_PROC(CreateInstance, VK_NULL_HANDLE, VK_NULL_HANDLE);
|
|
||||||
err = grVkCreateInstance(&instance_create, nullptr, &inst);
|
|
||||||
if (err < 0) {
|
|
||||||
SkDebugf("vkCreateInstance failed: %d\n", err);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
ACQUIRE_VK_PROC(DestroyInstance, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(EnumeratePhysicalDevices, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(GetPhysicalDeviceQueueFamilyProperties, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(GetPhysicalDeviceFeatures, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(CreateDevice, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(GetDeviceQueue, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(DeviceWaitIdle, inst, VK_NULL_HANDLE);
|
|
||||||
ACQUIRE_VK_PROC(DestroyDevice, inst, VK_NULL_HANDLE);
|
|
||||||
|
|
||||||
uint32_t gpuCount;
|
|
||||||
err = grVkEnumeratePhysicalDevices(inst, &gpuCount, nullptr);
|
|
||||||
if (err) {
|
|
||||||
SkDebugf("vkEnumeratePhysicalDevices failed: %d\n", err);
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!gpuCount) {
|
|
||||||
SkDebugf("vkEnumeratePhysicalDevices returned no supported devices.\n");
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// Just returning the first physical device instead of getting the whole array.
|
|
||||||
// TODO: find best match for our needs
|
|
||||||
gpuCount = 1;
|
|
||||||
err = grVkEnumeratePhysicalDevices(inst, &gpuCount, &physDev);
|
|
||||||
// VK_INCOMPLETE is returned when the count we provide is less than the total device count.
|
|
||||||
if (err && VK_INCOMPLETE != err) {
|
|
||||||
SkDebugf("vkEnumeratePhysicalDevices failed: %d\n", err);
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// query to get the initial queue props size
|
|
||||||
uint32_t queueCount;
|
|
||||||
grVkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, nullptr);
|
|
||||||
if (!queueCount) {
|
|
||||||
SkDebugf("vkGetPhysicalDeviceQueueFamilyProperties returned no queues.\n");
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
SkAutoMalloc queuePropsAlloc(queueCount * sizeof(VkQueueFamilyProperties));
|
|
||||||
// now get the actual queue props
|
|
||||||
VkQueueFamilyProperties* queueProps = (VkQueueFamilyProperties*)queuePropsAlloc.get();
|
|
||||||
|
|
||||||
grVkGetPhysicalDeviceQueueFamilyProperties(physDev, &queueCount, queueProps);
|
|
||||||
|
|
||||||
// iterate to find the graphics queue
|
|
||||||
uint32_t graphicsQueueIndex = queueCount;
|
|
||||||
for (uint32_t i = 0; i < queueCount; i++) {
|
|
||||||
if (queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
|
|
||||||
graphicsQueueIndex = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (graphicsQueueIndex == queueCount) {
|
|
||||||
SkDebugf("Could not find any supported graphics queues.\n");
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// iterate to find the present queue, if needed
|
|
||||||
uint32_t presentQueueIndex = queueCount;
|
|
||||||
if (presentQueueIndexPtr && canPresent) {
|
|
||||||
for (uint32_t i = 0; i < queueCount; i++) {
|
|
||||||
if (canPresent(inst, physDev, i)) {
|
|
||||||
presentQueueIndex = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (presentQueueIndex == queueCount) {
|
|
||||||
SkDebugf("Could not find any supported present queues.\n");
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
*presentQueueIndexPtr = presentQueueIndex;
|
|
||||||
} else {
|
|
||||||
// Just setting this so we end up make a single queue for graphics since there was no
|
|
||||||
// request for a present queue.
|
|
||||||
presentQueueIndex = graphicsQueueIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
extensions.initDevice(kGrVkMinimumVersion, inst, physDev);
|
|
||||||
|
|
||||||
SkTArray<const char*> deviceLayerNames;
|
|
||||||
SkTArray<const char*> deviceExtensionNames;
|
|
||||||
#ifdef SK_ENABLE_VK_LAYERS
|
|
||||||
for (size_t i = 0; i < SK_ARRAY_COUNT(kDebugLayerNames); ++i) {
|
|
||||||
if (extensions.hasDeviceLayer(kDebugLayerNames[i])) {
|
|
||||||
deviceLayerNames.push_back(kDebugLayerNames[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
if (extensions.hasDeviceExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
|
|
||||||
deviceExtensionNames.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
|
|
||||||
extensionFlags |= kKHR_swapchain_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
if (extensions.hasDeviceExtension("VK_NV_glsl_shader")) {
|
|
||||||
deviceExtensionNames.push_back("VK_NV_glsl_shader");
|
|
||||||
extensionFlags |= kNV_glsl_shader_GrVkExtensionFlag;
|
|
||||||
}
|
|
||||||
|
|
||||||
// query to get the physical device properties
|
|
||||||
VkPhysicalDeviceFeatures deviceFeatures;
|
|
||||||
grVkGetPhysicalDeviceFeatures(physDev, &deviceFeatures);
|
|
||||||
// this looks like it would slow things down,
|
|
||||||
// and we can't depend on it on all platforms
|
|
||||||
deviceFeatures.robustBufferAccess = VK_FALSE;
|
|
||||||
|
|
||||||
uint32_t featureFlags = 0;
|
|
||||||
if (deviceFeatures.geometryShader) {
|
|
||||||
featureFlags |= kGeometryShader_GrVkFeatureFlag;
|
|
||||||
}
|
|
||||||
if (deviceFeatures.dualSrcBlend) {
|
|
||||||
featureFlags |= kDualSrcBlend_GrVkFeatureFlag;
|
|
||||||
}
|
|
||||||
if (deviceFeatures.sampleRateShading) {
|
|
||||||
featureFlags |= kSampleRateShading_GrVkFeatureFlag;
|
|
||||||
}
|
|
||||||
|
|
||||||
float queuePriorities[1] = { 0.0 };
|
|
||||||
// Here we assume no need for swapchain queue
|
|
||||||
// If one is needed, the client will need its own setup code
|
|
||||||
const VkDeviceQueueCreateInfo queueInfo[2] = {
|
|
||||||
{
|
|
||||||
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // sType
|
|
||||||
nullptr, // pNext
|
|
||||||
0, // VkDeviceQueueCreateFlags
|
|
||||||
graphicsQueueIndex, // queueFamilyIndex
|
|
||||||
1, // queueCount
|
|
||||||
queuePriorities, // pQueuePriorities
|
|
||||||
},
|
|
||||||
{
|
|
||||||
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, // sType
|
|
||||||
nullptr, // pNext
|
|
||||||
0, // VkDeviceQueueCreateFlags
|
|
||||||
presentQueueIndex, // queueFamilyIndex
|
|
||||||
1, // queueCount
|
|
||||||
queuePriorities, // pQueuePriorities
|
|
||||||
}
|
|
||||||
};
|
|
||||||
uint32_t queueInfoCount = (presentQueueIndex != graphicsQueueIndex) ? 2 : 1;
|
|
||||||
|
|
||||||
const VkDeviceCreateInfo deviceInfo = {
|
|
||||||
VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, // sType
|
|
||||||
nullptr, // pNext
|
|
||||||
0, // VkDeviceCreateFlags
|
|
||||||
queueInfoCount, // queueCreateInfoCount
|
|
||||||
queueInfo, // pQueueCreateInfos
|
|
||||||
(uint32_t) deviceLayerNames.count(), // layerCount
|
|
||||||
deviceLayerNames.begin(), // ppEnabledLayerNames
|
|
||||||
(uint32_t) deviceExtensionNames.count(), // extensionCount
|
|
||||||
deviceExtensionNames.begin(), // ppEnabledExtensionNames
|
|
||||||
&deviceFeatures // ppEnabledFeatures
|
|
||||||
};
|
|
||||||
|
|
||||||
err = grVkCreateDevice(physDev, &deviceInfo, nullptr, &device);
|
|
||||||
if (err) {
|
|
||||||
SkDebugf("CreateDevice failed: %d\n", err);
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto interface =
|
|
||||||
sk_make_sp<GrVkInterface>(getProc, inst, device, extensionFlags);
|
|
||||||
if (!interface->validate(extensionFlags)) {
|
|
||||||
SkDebugf("Vulkan interface validation failed\n");
|
|
||||||
grVkDeviceWaitIdle(device);
|
|
||||||
grVkDestroyDevice(device, nullptr);
|
|
||||||
grVkDestroyInstance(inst, nullptr);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
VkQueue queue;
|
|
||||||
grVkGetDeviceQueue(device, graphicsQueueIndex, 0, &queue);
|
|
||||||
|
|
||||||
ctx->fInstance = inst;
|
|
||||||
ctx->fPhysicalDevice = physDev;
|
|
||||||
ctx->fDevice = device;
|
|
||||||
ctx->fQueue = queue;
|
|
||||||
ctx->fGraphicsQueueIndex = graphicsQueueIndex;
|
|
||||||
ctx->fMinAPIVersion = kGrVkMinimumVersion;
|
|
||||||
ctx->fExtensions = extensionFlags;
|
|
||||||
ctx->fFeatures = featureFlags;
|
|
||||||
ctx->fInterface.reset(interface.release());
|
|
||||||
ctx->fOwnsInstanceAndDevice = false;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -13,21 +13,9 @@
|
|||||||
#ifdef SK_VULKAN
|
#ifdef SK_VULKAN
|
||||||
|
|
||||||
#include "vk/GrVkDefines.h"
|
#include "vk/GrVkDefines.h"
|
||||||
#include "vk/GrVkInterface.h"
|
|
||||||
|
|
||||||
struct GrVkBackendContext;
|
|
||||||
|
|
||||||
namespace sk_gpu_test {
|
namespace sk_gpu_test {
|
||||||
bool LoadVkLibraryAndGetProcAddrFuncs(PFN_vkGetInstanceProcAddr*, PFN_vkGetDeviceProcAddr*);
|
bool LoadVkLibraryAndGetProcAddrFuncs(PFN_vkGetInstanceProcAddr*, PFN_vkGetDeviceProcAddr*);
|
||||||
|
|
||||||
using CanPresentFn = std::function<bool(VkInstance, VkPhysicalDevice,
|
|
||||||
uint32_t queueFamilyIndex)>;
|
|
||||||
|
|
||||||
bool CreateVkBackendContext(const GrVkInterface::GetInstanceProc& getInstanceProc,
|
|
||||||
const GrVkInterface::GetDeviceProc& getDeviceProc,
|
|
||||||
GrVkBackendContext* ctx,
|
|
||||||
uint32_t* presentQueueIndexPtr = nullptr,
|
|
||||||
CanPresentFn canPresent = CanPresentFn());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
#include "VulkanWindowContext.h"
|
#include "VulkanWindowContext.h"
|
||||||
|
|
||||||
#include "vk/GrVkImage.h"
|
#include "vk/GrVkImage.h"
|
||||||
|
#include "vk/GrVkInterface.h"
|
||||||
#include "vk/GrVkUtil.h"
|
#include "vk/GrVkUtil.h"
|
||||||
#include "vk/GrVkTypes.h"
|
#include "vk/GrVkTypes.h"
|
||||||
|
|
||||||
@ -21,8 +22,8 @@
|
|||||||
#undef CreateSemaphore
|
#undef CreateSemaphore
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define GET_PROC(F) f ## F = (PFN_vk ## F) fGetInstanceProcAddr(fInstance, "vk" #F)
|
#define GET_PROC(F) f ## F = (PFN_vk ## F) fGetInstanceProcAddr(instance, "vk" #F)
|
||||||
#define GET_DEV_PROC(F) f ## F = (PFN_vk ## F) fGetDeviceProcAddr(fDevice, "vk" #F)
|
#define GET_DEV_PROC(F) f ## F = (PFN_vk ## F) fGetDeviceProcAddr(device, "vk" #F)
|
||||||
|
|
||||||
namespace sk_app {
|
namespace sk_app {
|
||||||
|
|
||||||
@ -48,34 +49,22 @@ VulkanWindowContext::VulkanWindowContext(const DisplayParams& params,
|
|||||||
|
|
||||||
void VulkanWindowContext::initializeContext() {
|
void VulkanWindowContext::initializeContext() {
|
||||||
// any config code here (particularly for msaa)?
|
// any config code here (particularly for msaa)?
|
||||||
|
fBackendContext.reset(GrVkBackendContext::Create(fGetInstanceProcAddr, fGetDeviceProcAddr,
|
||||||
|
&fPresentQueueIndex, fCanPresentFn));
|
||||||
|
|
||||||
GrVkBackendContext backendContext;
|
if (!(fBackendContext->fExtensions & kKHR_surface_GrVkExtensionFlag) ||
|
||||||
if (!sk_gpu_test::CreateVkBackendContext(fGetInstanceProcAddr, fGetDeviceProcAddr,
|
!(fBackendContext->fExtensions & kKHR_swapchain_GrVkExtensionFlag)) {
|
||||||
&backendContext, &fPresentQueueIndex, fCanPresentFn)) {
|
fBackendContext.reset(nullptr);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(backendContext.fExtensions & kKHR_surface_GrVkExtensionFlag) ||
|
VkInstance instance = fBackendContext->fInstance;
|
||||||
!(backendContext.fExtensions & kKHR_swapchain_GrVkExtensionFlag)) {
|
VkDevice device = fBackendContext->fDevice;
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
fInstance = backendContext.fInstance;
|
|
||||||
fPhysicalDevice = backendContext.fPhysicalDevice;
|
|
||||||
fDevice = backendContext.fDevice;
|
|
||||||
fGraphicsQueueIndex = backendContext.fGraphicsQueueIndex;
|
|
||||||
fGraphicsQueue = backendContext.fQueue;
|
|
||||||
fInterface = backendContext.fInterface;
|
|
||||||
|
|
||||||
GET_PROC(DestroyInstance);
|
|
||||||
GET_PROC(DestroySurfaceKHR);
|
GET_PROC(DestroySurfaceKHR);
|
||||||
GET_PROC(GetPhysicalDeviceSurfaceSupportKHR);
|
GET_PROC(GetPhysicalDeviceSurfaceSupportKHR);
|
||||||
GET_PROC(GetPhysicalDeviceSurfaceCapabilitiesKHR);
|
GET_PROC(GetPhysicalDeviceSurfaceCapabilitiesKHR);
|
||||||
GET_PROC(GetPhysicalDeviceSurfaceFormatsKHR);
|
GET_PROC(GetPhysicalDeviceSurfaceFormatsKHR);
|
||||||
GET_PROC(GetPhysicalDeviceSurfacePresentModesKHR);
|
GET_PROC(GetPhysicalDeviceSurfacePresentModesKHR);
|
||||||
GET_DEV_PROC(DeviceWaitIdle);
|
|
||||||
GET_DEV_PROC(QueueWaitIdle);
|
|
||||||
GET_DEV_PROC(DestroyDevice);
|
|
||||||
GET_DEV_PROC(CreateSwapchainKHR);
|
GET_DEV_PROC(CreateSwapchainKHR);
|
||||||
GET_DEV_PROC(DestroySwapchainKHR);
|
GET_DEV_PROC(DestroySwapchainKHR);
|
||||||
GET_DEV_PROC(GetSwapchainImagesKHR);
|
GET_DEV_PROC(GetSwapchainImagesKHR);
|
||||||
@ -83,17 +72,18 @@ void VulkanWindowContext::initializeContext() {
|
|||||||
GET_DEV_PROC(QueuePresentKHR);
|
GET_DEV_PROC(QueuePresentKHR);
|
||||||
GET_DEV_PROC(GetDeviceQueue);
|
GET_DEV_PROC(GetDeviceQueue);
|
||||||
|
|
||||||
fContext = GrContext::MakeVulkan(backendContext, fDisplayParams.fGrContextOptions);
|
fContext = GrContext::MakeVulkan(fBackendContext, fDisplayParams.fGrContextOptions);
|
||||||
|
|
||||||
fSurface = fCreateVkSurfaceFn(fInstance);
|
fSurface = fCreateVkSurfaceFn(instance);
|
||||||
if (VK_NULL_HANDLE == fSurface) {
|
if (VK_NULL_HANDLE == fSurface) {
|
||||||
this->destroyContext();
|
fBackendContext.reset(nullptr);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
VkBool32 supported;
|
VkBool32 supported;
|
||||||
VkResult res = fGetPhysicalDeviceSurfaceSupportKHR(fPhysicalDevice, fPresentQueueIndex,
|
VkResult res = fGetPhysicalDeviceSurfaceSupportKHR(fBackendContext->fPhysicalDevice,
|
||||||
fSurface, &supported);
|
fPresentQueueIndex, fSurface,
|
||||||
|
&supported);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
this->destroyContext();
|
this->destroyContext();
|
||||||
return;
|
return;
|
||||||
@ -105,44 +95,45 @@ void VulkanWindowContext::initializeContext() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// create presentQueue
|
// create presentQueue
|
||||||
fGetDeviceQueue(fDevice, fPresentQueueIndex, 0, &fPresentQueue);
|
fGetDeviceQueue(fBackendContext->fDevice, fPresentQueueIndex, 0, &fPresentQueue);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VulkanWindowContext::createSwapchain(int width, int height,
|
bool VulkanWindowContext::createSwapchain(int width, int height,
|
||||||
const DisplayParams& params) {
|
const DisplayParams& params) {
|
||||||
// check for capabilities
|
// check for capabilities
|
||||||
VkSurfaceCapabilitiesKHR caps;
|
VkSurfaceCapabilitiesKHR caps;
|
||||||
VkResult res = fGetPhysicalDeviceSurfaceCapabilitiesKHR(fPhysicalDevice, fSurface, &caps);
|
VkResult res = fGetPhysicalDeviceSurfaceCapabilitiesKHR(fBackendContext->fPhysicalDevice,
|
||||||
|
fSurface, &caps);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t surfaceFormatCount;
|
uint32_t surfaceFormatCount;
|
||||||
res = fGetPhysicalDeviceSurfaceFormatsKHR(fPhysicalDevice, fSurface, &surfaceFormatCount,
|
res = fGetPhysicalDeviceSurfaceFormatsKHR(fBackendContext->fPhysicalDevice, fSurface,
|
||||||
nullptr);
|
&surfaceFormatCount, nullptr);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
SkAutoMalloc surfaceFormatAlloc(surfaceFormatCount * sizeof(VkSurfaceFormatKHR));
|
SkAutoMalloc surfaceFormatAlloc(surfaceFormatCount * sizeof(VkSurfaceFormatKHR));
|
||||||
VkSurfaceFormatKHR* surfaceFormats = (VkSurfaceFormatKHR*)surfaceFormatAlloc.get();
|
VkSurfaceFormatKHR* surfaceFormats = (VkSurfaceFormatKHR*)surfaceFormatAlloc.get();
|
||||||
res = fGetPhysicalDeviceSurfaceFormatsKHR(fPhysicalDevice, fSurface, &surfaceFormatCount,
|
res = fGetPhysicalDeviceSurfaceFormatsKHR(fBackendContext->fPhysicalDevice, fSurface,
|
||||||
surfaceFormats);
|
&surfaceFormatCount, surfaceFormats);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t presentModeCount;
|
uint32_t presentModeCount;
|
||||||
res = fGetPhysicalDeviceSurfacePresentModesKHR(fPhysicalDevice, fSurface, &presentModeCount,
|
res = fGetPhysicalDeviceSurfacePresentModesKHR(fBackendContext->fPhysicalDevice, fSurface,
|
||||||
nullptr);
|
&presentModeCount, nullptr);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
SkAutoMalloc presentModeAlloc(presentModeCount * sizeof(VkPresentModeKHR));
|
SkAutoMalloc presentModeAlloc(presentModeCount * sizeof(VkPresentModeKHR));
|
||||||
VkPresentModeKHR* presentModes = (VkPresentModeKHR*)presentModeAlloc.get();
|
VkPresentModeKHR* presentModes = (VkPresentModeKHR*)presentModeAlloc.get();
|
||||||
res = fGetPhysicalDeviceSurfacePresentModesKHR(fPhysicalDevice, fSurface, &presentModeCount,
|
res = fGetPhysicalDeviceSurfacePresentModesKHR(fBackendContext->fPhysicalDevice, fSurface,
|
||||||
presentModes);
|
&presentModeCount, presentModes);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -243,8 +234,8 @@ bool VulkanWindowContext::createSwapchain(int width, int height,
|
|||||||
swapchainCreateInfo.imageArrayLayers = 1;
|
swapchainCreateInfo.imageArrayLayers = 1;
|
||||||
swapchainCreateInfo.imageUsage = usageFlags;
|
swapchainCreateInfo.imageUsage = usageFlags;
|
||||||
|
|
||||||
uint32_t queueFamilies[] = { fGraphicsQueueIndex, fPresentQueueIndex };
|
uint32_t queueFamilies[] = { fBackendContext->fGraphicsQueueIndex, fPresentQueueIndex };
|
||||||
if (fGraphicsQueueIndex != fPresentQueueIndex) {
|
if (fBackendContext->fGraphicsQueueIndex != fPresentQueueIndex) {
|
||||||
swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
|
swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
|
||||||
swapchainCreateInfo.queueFamilyIndexCount = 2;
|
swapchainCreateInfo.queueFamilyIndexCount = 2;
|
||||||
swapchainCreateInfo.pQueueFamilyIndices = queueFamilies;
|
swapchainCreateInfo.pQueueFamilyIndices = queueFamilies;
|
||||||
@ -260,18 +251,18 @@ bool VulkanWindowContext::createSwapchain(int width, int height,
|
|||||||
swapchainCreateInfo.clipped = true;
|
swapchainCreateInfo.clipped = true;
|
||||||
swapchainCreateInfo.oldSwapchain = fSwapchain;
|
swapchainCreateInfo.oldSwapchain = fSwapchain;
|
||||||
|
|
||||||
res = fCreateSwapchainKHR(fDevice, &swapchainCreateInfo, nullptr, &fSwapchain);
|
res = fCreateSwapchainKHR(fBackendContext->fDevice, &swapchainCreateInfo, nullptr, &fSwapchain);
|
||||||
if (VK_SUCCESS != res) {
|
if (VK_SUCCESS != res) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// destroy the old swapchain
|
// destroy the old swapchain
|
||||||
if (swapchainCreateInfo.oldSwapchain != VK_NULL_HANDLE) {
|
if (swapchainCreateInfo.oldSwapchain != VK_NULL_HANDLE) {
|
||||||
fDeviceWaitIdle(fDevice);
|
GR_VK_CALL(fBackendContext->fInterface, DeviceWaitIdle(fBackendContext->fDevice));
|
||||||
|
|
||||||
this->destroyBuffers();
|
this->destroyBuffers();
|
||||||
|
|
||||||
fDestroySwapchainKHR(fDevice, swapchainCreateInfo.oldSwapchain, nullptr);
|
fDestroySwapchainKHR(fBackendContext->fDevice, swapchainCreateInfo.oldSwapchain, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
this->createBuffers(swapchainCreateInfo.imageFormat, colorType);
|
this->createBuffers(swapchainCreateInfo.imageFormat, colorType);
|
||||||
@ -280,10 +271,10 @@ bool VulkanWindowContext::createSwapchain(int width, int height,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void VulkanWindowContext::createBuffers(VkFormat format, SkColorType colorType) {
|
void VulkanWindowContext::createBuffers(VkFormat format, SkColorType colorType) {
|
||||||
fGetSwapchainImagesKHR(fDevice, fSwapchain, &fImageCount, nullptr);
|
fGetSwapchainImagesKHR(fBackendContext->fDevice, fSwapchain, &fImageCount, nullptr);
|
||||||
SkASSERT(fImageCount);
|
SkASSERT(fImageCount);
|
||||||
fImages = new VkImage[fImageCount];
|
fImages = new VkImage[fImageCount];
|
||||||
fGetSwapchainImagesKHR(fDevice, fSwapchain, &fImageCount, fImages);
|
fGetSwapchainImagesKHR(fBackendContext->fDevice, fSwapchain, &fImageCount, fImages);
|
||||||
|
|
||||||
// set up initial image layouts and create surfaces
|
// set up initial image layouts and create surfaces
|
||||||
fImageLayouts = new VkImageLayout[fImageCount];
|
fImageLayouts = new VkImageLayout[fImageCount];
|
||||||
@ -315,10 +306,10 @@ void VulkanWindowContext::createBuffers(VkFormat format, SkColorType colorType)
|
|||||||
memset(&commandPoolInfo, 0, sizeof(VkCommandPoolCreateInfo));
|
memset(&commandPoolInfo, 0, sizeof(VkCommandPoolCreateInfo));
|
||||||
commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
|
commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
|
||||||
// this needs to be on the render queue
|
// this needs to be on the render queue
|
||||||
commandPoolInfo.queueFamilyIndex = fGraphicsQueueIndex;
|
commandPoolInfo.queueFamilyIndex = fBackendContext->fGraphicsQueueIndex;
|
||||||
commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
|
commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
CreateCommandPool(fDevice, &commandPoolInfo,
|
CreateCommandPool(fBackendContext->fDevice, &commandPoolInfo,
|
||||||
nullptr, &fCommandPool));
|
nullptr, &fCommandPool));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -346,20 +337,20 @@ void VulkanWindowContext::createBuffers(VkFormat format, SkColorType colorType)
|
|||||||
fBackbuffers = new BackbufferInfo[fImageCount + 1];
|
fBackbuffers = new BackbufferInfo[fImageCount + 1];
|
||||||
for (uint32_t i = 0; i < fImageCount + 1; ++i) {
|
for (uint32_t i = 0; i < fImageCount + 1; ++i) {
|
||||||
fBackbuffers[i].fImageIndex = -1;
|
fBackbuffers[i].fImageIndex = -1;
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
CreateSemaphore(fDevice, &semaphoreInfo,
|
CreateSemaphore(fBackendContext->fDevice, &semaphoreInfo,
|
||||||
nullptr, &fBackbuffers[i].fAcquireSemaphore));
|
nullptr, &fBackbuffers[i].fAcquireSemaphore));
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
CreateSemaphore(fDevice, &semaphoreInfo,
|
CreateSemaphore(fBackendContext->fDevice, &semaphoreInfo,
|
||||||
nullptr, &fBackbuffers[i].fRenderSemaphore));
|
nullptr, &fBackbuffers[i].fRenderSemaphore));
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
AllocateCommandBuffers(fDevice, &commandBuffersInfo,
|
AllocateCommandBuffers(fBackendContext->fDevice, &commandBuffersInfo,
|
||||||
fBackbuffers[i].fTransitionCmdBuffers));
|
fBackbuffers[i].fTransitionCmdBuffers));
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
CreateFence(fDevice, &fenceInfo, nullptr,
|
CreateFence(fBackendContext->fDevice, &fenceInfo, nullptr,
|
||||||
&fBackbuffers[i].fUsageFences[0]));
|
&fBackbuffers[i].fUsageFences[0]));
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
CreateFence(fDevice, &fenceInfo, nullptr,
|
CreateFence(fBackendContext->fDevice, &fenceInfo, nullptr,
|
||||||
&fBackbuffers[i].fUsageFences[1]));
|
&fBackbuffers[i].fUsageFences[1]));
|
||||||
}
|
}
|
||||||
fCurrentBackbufferIndex = fImageCount;
|
fCurrentBackbufferIndex = fImageCount;
|
||||||
@ -369,26 +360,26 @@ void VulkanWindowContext::destroyBuffers() {
|
|||||||
|
|
||||||
if (fBackbuffers) {
|
if (fBackbuffers) {
|
||||||
for (uint32_t i = 0; i < fImageCount + 1; ++i) {
|
for (uint32_t i = 0; i < fImageCount + 1; ++i) {
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
WaitForFences(fDevice, 2,
|
WaitForFences(fBackendContext->fDevice, 2,
|
||||||
fBackbuffers[i].fUsageFences,
|
fBackbuffers[i].fUsageFences,
|
||||||
true, UINT64_MAX));
|
true, UINT64_MAX));
|
||||||
fBackbuffers[i].fImageIndex = -1;
|
fBackbuffers[i].fImageIndex = -1;
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
DestroySemaphore(fDevice,
|
DestroySemaphore(fBackendContext->fDevice,
|
||||||
fBackbuffers[i].fAcquireSemaphore,
|
fBackbuffers[i].fAcquireSemaphore,
|
||||||
nullptr));
|
nullptr));
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
DestroySemaphore(fDevice,
|
DestroySemaphore(fBackendContext->fDevice,
|
||||||
fBackbuffers[i].fRenderSemaphore,
|
fBackbuffers[i].fRenderSemaphore,
|
||||||
nullptr));
|
nullptr));
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
FreeCommandBuffers(fDevice, fCommandPool, 2,
|
FreeCommandBuffers(fBackendContext->fDevice, fCommandPool, 2,
|
||||||
fBackbuffers[i].fTransitionCmdBuffers));
|
fBackbuffers[i].fTransitionCmdBuffers));
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
DestroyFence(fDevice, fBackbuffers[i].fUsageFences[0], 0));
|
DestroyFence(fBackendContext->fDevice, fBackbuffers[i].fUsageFences[0], 0));
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
DestroyFence(fDevice, fBackbuffers[i].fUsageFences[1], 0));
|
DestroyFence(fBackendContext->fDevice, fBackbuffers[i].fUsageFences[1], 0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -409,43 +400,34 @@ VulkanWindowContext::~VulkanWindowContext() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void VulkanWindowContext::destroyContext() {
|
void VulkanWindowContext::destroyContext() {
|
||||||
if (!this->isValid()) {
|
if (!fBackendContext.get()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
fQueueWaitIdle(fPresentQueue);
|
GR_VK_CALL(fBackendContext->fInterface, QueueWaitIdle(fPresentQueue));
|
||||||
fDeviceWaitIdle(fDevice);
|
GR_VK_CALL(fBackendContext->fInterface, DeviceWaitIdle(fBackendContext->fDevice));
|
||||||
|
|
||||||
this->destroyBuffers();
|
this->destroyBuffers();
|
||||||
|
|
||||||
if (VK_NULL_HANDLE != fCommandPool) {
|
if (VK_NULL_HANDLE != fCommandPool) {
|
||||||
GR_VK_CALL(fInterface, DestroyCommandPool(fDevice, fCommandPool, nullptr));
|
GR_VK_CALL(fBackendContext->fInterface, DestroyCommandPool(fBackendContext->fDevice,
|
||||||
|
fCommandPool, nullptr));
|
||||||
fCommandPool = VK_NULL_HANDLE;
|
fCommandPool = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (VK_NULL_HANDLE != fSwapchain) {
|
if (VK_NULL_HANDLE != fSwapchain) {
|
||||||
fDestroySwapchainKHR(fDevice, fSwapchain, nullptr);
|
fDestroySwapchainKHR(fBackendContext->fDevice, fSwapchain, nullptr);
|
||||||
fSwapchain = VK_NULL_HANDLE;
|
fSwapchain = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (VK_NULL_HANDLE != fSurface) {
|
if (VK_NULL_HANDLE != fSurface) {
|
||||||
fDestroySurfaceKHR(fInstance, fSurface, nullptr);
|
fDestroySurfaceKHR(fBackendContext->fInstance, fSurface, nullptr);
|
||||||
fSurface = VK_NULL_HANDLE;
|
fSurface = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
fContext.reset();
|
fContext.reset();
|
||||||
fInterface.reset();
|
|
||||||
|
|
||||||
if (VK_NULL_HANDLE != fDevice) {
|
fBackendContext.reset(nullptr);
|
||||||
fDestroyDevice(fDevice, nullptr);
|
|
||||||
fDevice = VK_NULL_HANDLE;
|
|
||||||
}
|
|
||||||
fPhysicalDevice = VK_NULL_HANDLE;
|
|
||||||
|
|
||||||
if (VK_NULL_HANDLE != fInstance) {
|
|
||||||
fDestroyInstance(fInstance, nullptr);
|
|
||||||
fInstance = VK_NULL_HANDLE;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VulkanWindowContext::BackbufferInfo* VulkanWindowContext::getAvailableBackbuffer() {
|
VulkanWindowContext::BackbufferInfo* VulkanWindowContext::getAvailableBackbuffer() {
|
||||||
@ -457,8 +439,8 @@ VulkanWindowContext::BackbufferInfo* VulkanWindowContext::getAvailableBackbuffer
|
|||||||
}
|
}
|
||||||
|
|
||||||
BackbufferInfo* backbuffer = fBackbuffers + fCurrentBackbufferIndex;
|
BackbufferInfo* backbuffer = fBackbuffers + fCurrentBackbufferIndex;
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
WaitForFences(fDevice, 2, backbuffer->fUsageFences,
|
WaitForFences(fBackendContext->fDevice, 2, backbuffer->fUsageFences,
|
||||||
true, UINT64_MAX));
|
true, UINT64_MAX));
|
||||||
return backbuffer;
|
return backbuffer;
|
||||||
}
|
}
|
||||||
@ -468,12 +450,12 @@ sk_sp<SkSurface> VulkanWindowContext::getBackbufferSurface() {
|
|||||||
SkASSERT(backbuffer);
|
SkASSERT(backbuffer);
|
||||||
|
|
||||||
// reset the fence
|
// reset the fence
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
ResetFences(fDevice, 2, backbuffer->fUsageFences));
|
ResetFences(fBackendContext->fDevice, 2, backbuffer->fUsageFences));
|
||||||
// semaphores should be in unsignaled state
|
// semaphores should be in unsignaled state
|
||||||
|
|
||||||
// acquire the image
|
// acquire the image
|
||||||
VkResult res = fAcquireNextImageKHR(fDevice, fSwapchain, UINT64_MAX,
|
VkResult res = fAcquireNextImageKHR(fBackendContext->fDevice, fSwapchain, UINT64_MAX,
|
||||||
backbuffer->fAcquireSemaphore, VK_NULL_HANDLE,
|
backbuffer->fAcquireSemaphore, VK_NULL_HANDLE,
|
||||||
&backbuffer->fImageIndex);
|
&backbuffer->fImageIndex);
|
||||||
if (VK_ERROR_SURFACE_LOST_KHR == res) {
|
if (VK_ERROR_SURFACE_LOST_KHR == res) {
|
||||||
@ -487,11 +469,11 @@ sk_sp<SkSurface> VulkanWindowContext::getBackbufferSurface() {
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
backbuffer = this->getAvailableBackbuffer();
|
backbuffer = this->getAvailableBackbuffer();
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
ResetFences(fDevice, 2, backbuffer->fUsageFences));
|
ResetFences(fBackendContext->fDevice, 2, backbuffer->fUsageFences));
|
||||||
|
|
||||||
// acquire the image
|
// acquire the image
|
||||||
res = fAcquireNextImageKHR(fDevice, fSwapchain, UINT64_MAX,
|
res = fAcquireNextImageKHR(fBackendContext->fDevice, fSwapchain, UINT64_MAX,
|
||||||
backbuffer->fAcquireSemaphore, VK_NULL_HANDLE,
|
backbuffer->fAcquireSemaphore, VK_NULL_HANDLE,
|
||||||
&backbuffer->fImageIndex);
|
&backbuffer->fImageIndex);
|
||||||
|
|
||||||
@ -519,27 +501,27 @@ sk_sp<SkSurface> VulkanWindowContext::getBackbufferSurface() {
|
|||||||
layout, // oldLayout
|
layout, // oldLayout
|
||||||
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // newLayout
|
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // newLayout
|
||||||
fPresentQueueIndex, // srcQueueFamilyIndex
|
fPresentQueueIndex, // srcQueueFamilyIndex
|
||||||
fGraphicsQueueIndex, // dstQueueFamilyIndex
|
fBackendContext->fGraphicsQueueIndex, // dstQueueFamilyIndex
|
||||||
fImages[backbuffer->fImageIndex], // image
|
fImages[backbuffer->fImageIndex], // image
|
||||||
{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 } // subresourceRange
|
{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 } // subresourceRange
|
||||||
};
|
};
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
ResetCommandBuffer(backbuffer->fTransitionCmdBuffers[0], 0));
|
ResetCommandBuffer(backbuffer->fTransitionCmdBuffers[0], 0));
|
||||||
VkCommandBufferBeginInfo info;
|
VkCommandBufferBeginInfo info;
|
||||||
memset(&info, 0, sizeof(VkCommandBufferBeginInfo));
|
memset(&info, 0, sizeof(VkCommandBufferBeginInfo));
|
||||||
info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
||||||
info.flags = 0;
|
info.flags = 0;
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
BeginCommandBuffer(backbuffer->fTransitionCmdBuffers[0], &info));
|
BeginCommandBuffer(backbuffer->fTransitionCmdBuffers[0], &info));
|
||||||
|
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
CmdPipelineBarrier(backbuffer->fTransitionCmdBuffers[0],
|
CmdPipelineBarrier(backbuffer->fTransitionCmdBuffers[0],
|
||||||
srcStageMask, dstStageMask, 0,
|
srcStageMask, dstStageMask, 0,
|
||||||
0, nullptr,
|
0, nullptr,
|
||||||
0, nullptr,
|
0, nullptr,
|
||||||
1, &imageMemoryBarrier));
|
1, &imageMemoryBarrier));
|
||||||
|
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
EndCommandBuffer(backbuffer->fTransitionCmdBuffers[0]));
|
EndCommandBuffer(backbuffer->fTransitionCmdBuffers[0]));
|
||||||
|
|
||||||
VkPipelineStageFlags waitDstStageFlags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
VkPipelineStageFlags waitDstStageFlags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||||
@ -554,8 +536,8 @@ sk_sp<SkSurface> VulkanWindowContext::getBackbufferSurface() {
|
|||||||
submitInfo.pCommandBuffers = &backbuffer->fTransitionCmdBuffers[0];
|
submitInfo.pCommandBuffers = &backbuffer->fTransitionCmdBuffers[0];
|
||||||
submitInfo.signalSemaphoreCount = 0;
|
submitInfo.signalSemaphoreCount = 0;
|
||||||
|
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
QueueSubmit(fGraphicsQueue, 1, &submitInfo,
|
QueueSubmit(fBackendContext->fQueue, 1, &submitInfo,
|
||||||
backbuffer->fUsageFences[0]));
|
backbuffer->fUsageFences[0]));
|
||||||
|
|
||||||
SkSurface* surface = fSurfaces[backbuffer->fImageIndex].get();
|
SkSurface* surface = fSurfaces[backbuffer->fImageIndex].get();
|
||||||
@ -592,26 +574,26 @@ void VulkanWindowContext::swapBuffers() {
|
|||||||
dstAccessMask, // inputMask
|
dstAccessMask, // inputMask
|
||||||
layout, // oldLayout
|
layout, // oldLayout
|
||||||
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, // newLayout
|
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, // newLayout
|
||||||
fGraphicsQueueIndex, // srcQueueFamilyIndex
|
fBackendContext->fGraphicsQueueIndex, // srcQueueFamilyIndex
|
||||||
fPresentQueueIndex, // dstQueueFamilyIndex
|
fPresentQueueIndex, // dstQueueFamilyIndex
|
||||||
fImages[backbuffer->fImageIndex], // image
|
fImages[backbuffer->fImageIndex], // image
|
||||||
{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 } // subresourceRange
|
{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 } // subresourceRange
|
||||||
};
|
};
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
ResetCommandBuffer(backbuffer->fTransitionCmdBuffers[1], 0));
|
ResetCommandBuffer(backbuffer->fTransitionCmdBuffers[1], 0));
|
||||||
VkCommandBufferBeginInfo info;
|
VkCommandBufferBeginInfo info;
|
||||||
memset(&info, 0, sizeof(VkCommandBufferBeginInfo));
|
memset(&info, 0, sizeof(VkCommandBufferBeginInfo));
|
||||||
info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
||||||
info.flags = 0;
|
info.flags = 0;
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
BeginCommandBuffer(backbuffer->fTransitionCmdBuffers[1], &info));
|
BeginCommandBuffer(backbuffer->fTransitionCmdBuffers[1], &info));
|
||||||
GR_VK_CALL(fInterface,
|
GR_VK_CALL(fBackendContext->fInterface,
|
||||||
CmdPipelineBarrier(backbuffer->fTransitionCmdBuffers[1],
|
CmdPipelineBarrier(backbuffer->fTransitionCmdBuffers[1],
|
||||||
srcStageMask, dstStageMask, 0,
|
srcStageMask, dstStageMask, 0,
|
||||||
0, nullptr,
|
0, nullptr,
|
||||||
0, nullptr,
|
0, nullptr,
|
||||||
1, &imageMemoryBarrier));
|
1, &imageMemoryBarrier));
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
EndCommandBuffer(backbuffer->fTransitionCmdBuffers[1]));
|
EndCommandBuffer(backbuffer->fTransitionCmdBuffers[1]));
|
||||||
|
|
||||||
fImageLayouts[backbuffer->fImageIndex] = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
|
fImageLayouts[backbuffer->fImageIndex] = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
|
||||||
@ -627,8 +609,8 @@ void VulkanWindowContext::swapBuffers() {
|
|||||||
submitInfo.signalSemaphoreCount = 1;
|
submitInfo.signalSemaphoreCount = 1;
|
||||||
submitInfo.pSignalSemaphores = &backbuffer->fRenderSemaphore;
|
submitInfo.pSignalSemaphores = &backbuffer->fRenderSemaphore;
|
||||||
|
|
||||||
GR_VK_CALL_ERRCHECK(fInterface,
|
GR_VK_CALL_ERRCHECK(fBackendContext->fInterface,
|
||||||
QueueSubmit(fGraphicsQueue, 1, &submitInfo,
|
QueueSubmit(fBackendContext->fQueue, 1, &submitInfo,
|
||||||
backbuffer->fUsageFences[1]));
|
backbuffer->fUsageFences[1]));
|
||||||
|
|
||||||
// Submit present operation to present queue
|
// Submit present operation to present queue
|
||||||
|
@ -13,8 +13,6 @@
|
|||||||
#ifdef SK_VULKAN
|
#ifdef SK_VULKAN
|
||||||
|
|
||||||
#include "vk/GrVkBackendContext.h"
|
#include "vk/GrVkBackendContext.h"
|
||||||
#include "vk/GrVkInterface.h"
|
|
||||||
#include "vk/VkTestUtils.h"
|
|
||||||
#include "WindowContext.h"
|
#include "WindowContext.h"
|
||||||
|
|
||||||
class GrRenderTarget;
|
class GrRenderTarget;
|
||||||
@ -28,7 +26,7 @@ public:
|
|||||||
sk_sp<SkSurface> getBackbufferSurface() override;
|
sk_sp<SkSurface> getBackbufferSurface() override;
|
||||||
void swapBuffers() override;
|
void swapBuffers() override;
|
||||||
|
|
||||||
bool isValid() override { return fDevice != VK_NULL_HANDLE; }
|
bool isValid() override { return SkToBool(fBackendContext.get()); }
|
||||||
|
|
||||||
void resize(int w, int h) override {
|
void resize(int w, int h) override {
|
||||||
this->createSwapchain(w, h, fDisplayParams);
|
this->createSwapchain(w, h, fDisplayParams);
|
||||||
@ -43,7 +41,7 @@ public:
|
|||||||
/** Platform specific function that creates a VkSurfaceKHR for a window */
|
/** Platform specific function that creates a VkSurfaceKHR for a window */
|
||||||
using CreateVkSurfaceFn = std::function<VkSurfaceKHR(VkInstance)>;
|
using CreateVkSurfaceFn = std::function<VkSurfaceKHR(VkInstance)>;
|
||||||
/** Platform specific function that determines whether presentation will succeed. */
|
/** Platform specific function that determines whether presentation will succeed. */
|
||||||
using CanPresentFn = sk_gpu_test::CanPresentFn;
|
using CanPresentFn = GrVkBackendContext::CanPresentFn;
|
||||||
|
|
||||||
VulkanWindowContext(const DisplayParams&, CreateVkSurfaceFn, CanPresentFn,
|
VulkanWindowContext(const DisplayParams&, CreateVkSurfaceFn, CanPresentFn,
|
||||||
PFN_vkGetInstanceProcAddr, PFN_vkGetDeviceProcAddr);
|
PFN_vkGetInstanceProcAddr, PFN_vkGetDeviceProcAddr);
|
||||||
@ -65,9 +63,7 @@ private:
|
|||||||
void createBuffers(VkFormat format, SkColorType colorType);
|
void createBuffers(VkFormat format, SkColorType colorType);
|
||||||
void destroyBuffers();
|
void destroyBuffers();
|
||||||
|
|
||||||
VkInstance fInstance = VK_NULL_HANDLE;
|
sk_sp<const GrVkBackendContext> fBackendContext;
|
||||||
VkPhysicalDevice fPhysicalDevice = VK_NULL_HANDLE;
|
|
||||||
VkDevice fDevice = VK_NULL_HANDLE;
|
|
||||||
|
|
||||||
// simple wrapper class that exists only to initialize a pointer to NULL
|
// simple wrapper class that exists only to initialize a pointer to NULL
|
||||||
template <typename FNPTR_TYPE> class VkPtr {
|
template <typename FNPTR_TYPE> class VkPtr {
|
||||||
@ -99,19 +95,10 @@ private:
|
|||||||
VkPtr<PFN_vkGetSwapchainImagesKHR> fGetSwapchainImagesKHR;
|
VkPtr<PFN_vkGetSwapchainImagesKHR> fGetSwapchainImagesKHR;
|
||||||
VkPtr<PFN_vkAcquireNextImageKHR> fAcquireNextImageKHR;
|
VkPtr<PFN_vkAcquireNextImageKHR> fAcquireNextImageKHR;
|
||||||
VkPtr<PFN_vkQueuePresentKHR> fQueuePresentKHR;
|
VkPtr<PFN_vkQueuePresentKHR> fQueuePresentKHR;
|
||||||
|
|
||||||
VkPtr<PFN_vkDestroyInstance> fDestroyInstance;
|
|
||||||
VkPtr<PFN_vkDeviceWaitIdle> fDeviceWaitIdle;
|
|
||||||
VkPtr<PFN_vkQueueWaitIdle> fQueueWaitIdle;
|
|
||||||
VkPtr<PFN_vkDestroyDevice> fDestroyDevice;
|
|
||||||
VkPtr<PFN_vkGetDeviceQueue> fGetDeviceQueue;
|
VkPtr<PFN_vkGetDeviceQueue> fGetDeviceQueue;
|
||||||
|
|
||||||
sk_sp<const GrVkInterface> fInterface;
|
|
||||||
|
|
||||||
VkSurfaceKHR fSurface;
|
VkSurfaceKHR fSurface;
|
||||||
VkSwapchainKHR fSwapchain;
|
VkSwapchainKHR fSwapchain;
|
||||||
uint32_t fGraphicsQueueIndex;
|
|
||||||
VkQueue fGraphicsQueue;
|
|
||||||
uint32_t fPresentQueueIndex;
|
uint32_t fPresentQueueIndex;
|
||||||
VkQueue fPresentQueue;
|
VkQueue fPresentQueue;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user