2016-05-04 19:47:41 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2016 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef GrSurfaceProxy_DEFINED
|
|
|
|
#define GrSurfaceProxy_DEFINED
|
|
|
|
|
|
|
|
#include "GrGpuResource.h"
|
2016-10-28 17:25:24 +00:00
|
|
|
#include "GrSurface.h"
|
2016-11-24 00:37:13 +00:00
|
|
|
|
2016-09-06 17:01:06 +00:00
|
|
|
#include "SkRect.h"
|
2016-05-04 19:47:41 +00:00
|
|
|
|
Revert "Revert "Plumb GrBackendTexture throughout skia.""
This reverts commit 7fa5c31c2c9af834bee66d5fcf476e250076c8d6.
Reason for revert: Relanding this change now that other fixes have landed.
Original change's description:
> Revert "Plumb GrBackendTexture throughout skia."
>
> This reverts commit 7da62b9059f3c1d31624a0e4da96ee5f908f9c12.
>
> Reason for revert: fix android roll
>
> Original change's description:
> > Plumb GrBackendTexture throughout skia.
> >
> > Bug: skia:
> > Change-Id: I1bae6768ee7229818a83ba608035a1f7867e6875
> > Reviewed-on: https://skia-review.googlesource.com/13645
> > Commit-Queue: Greg Daniel <egdaniel@google.com>
> > Reviewed-by: Robert Phillips <robertphillips@google.com>
> >
>
> TBR=egdaniel@google.com,bsalomon@google.com,robertphillips@google.com,brianosman@google.com,reviews@skia.org,stani@google.com
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
>
> Change-Id: I5cb8763cc837c83ebc6d10366fe2dd3efe35fb89
> Reviewed-on: https://skia-review.googlesource.com/13773
> Reviewed-by: Stan Iliev <stani@google.com>
> Commit-Queue: Stan Iliev <stani@google.com>
>
TBR=egdaniel@google.com,bsalomon@google.com,robertphillips@google.com,reviews@skia.org,brianosman@google.com,stani@google.com
# Not skipping CQ checks because original CL landed > 1 day ago.
Change-Id: I92bc074e4fe37fa5c83186afadc472c03802e8f2
Reviewed-on: https://skia-review.googlesource.com/13975
Reviewed-by: Greg Daniel <egdaniel@google.com>
Commit-Queue: Greg Daniel <egdaniel@google.com>
2017-04-20 16:41:55 +00:00
|
|
|
class GrBackendTexture;
|
2016-11-09 11:50:43 +00:00
|
|
|
class GrCaps;
|
2017-04-17 11:53:07 +00:00
|
|
|
class GrOpList;
|
2018-01-08 18:40:32 +00:00
|
|
|
class GrProxyProvider;
|
2016-11-23 14:37:01 +00:00
|
|
|
class GrRenderTargetOpList;
|
2016-05-04 19:47:41 +00:00
|
|
|
class GrRenderTargetProxy;
|
2017-03-04 13:12:46 +00:00
|
|
|
class GrResourceProvider;
|
2017-01-25 17:10:37 +00:00
|
|
|
class GrSurfaceContext;
|
2017-01-25 20:48:30 +00:00
|
|
|
class GrSurfaceProxyPriv;
|
2016-11-23 14:37:01 +00:00
|
|
|
class GrTextureOpList;
|
|
|
|
class GrTextureProxy;
|
2016-05-04 19:47:41 +00:00
|
|
|
|
2016-10-28 17:25:24 +00:00
|
|
|
// This class replicates the functionality GrIORef<GrSurface> but tracks the
|
|
|
|
// utilitization for later resource allocation (for the deferred case) and
|
|
|
|
// forwards on the utilization in the wrapped case
|
|
|
|
class GrIORefProxy : public SkNoncopyable {
|
|
|
|
public:
|
|
|
|
void ref() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
++fRefCnt;
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->ref();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void unref() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->unref();
|
|
|
|
}
|
|
|
|
|
2017-05-11 18:14:30 +00:00
|
|
|
--fRefCnt;
|
|
|
|
this->didRemoveRefOrPendingIO();
|
2016-10-28 17:25:24 +00:00
|
|
|
}
|
|
|
|
|
2017-12-05 17:05:21 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
bool isUnique_debugOnly() const { // For asserts.
|
|
|
|
SkASSERT(fRefCnt >= 0 && fPendingWrites >= 0 && fPendingReads >= 0);
|
|
|
|
return 1 == fRefCnt + fPendingWrites + fPendingReads;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2016-10-28 17:25:24 +00:00
|
|
|
void validate() const {
|
2017-05-11 18:14:30 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
SkASSERT(fRefCnt >= 0);
|
2016-11-16 19:17:17 +00:00
|
|
|
SkASSERT(fPendingReads >= 0);
|
|
|
|
SkASSERT(fPendingWrites >= 0);
|
|
|
|
SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 1);
|
|
|
|
|
|
|
|
if (fTarget) {
|
|
|
|
// The backing GrSurface can have more refs than the proxy if the proxy
|
|
|
|
// started off wrapping an external resource (that came in with refs).
|
|
|
|
// The GrSurface should never have fewer refs than the proxy however.
|
|
|
|
SkASSERT(fTarget->fRefCnt >= fRefCnt);
|
2017-05-11 18:14:30 +00:00
|
|
|
SkASSERT(fTarget->fPendingReads >= fPendingReads);
|
|
|
|
SkASSERT(fTarget->fPendingWrites >= fPendingWrites);
|
2016-11-16 19:17:17 +00:00
|
|
|
}
|
2016-10-28 17:25:24 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2016-11-16 19:17:17 +00:00
|
|
|
int32_t getProxyRefCnt_TestOnly() const;
|
|
|
|
int32_t getBackingRefCnt_TestOnly() const;
|
|
|
|
int32_t getPendingReadCnt_TestOnly() const;
|
|
|
|
int32_t getPendingWriteCnt_TestOnly() const;
|
|
|
|
|
2017-08-16 14:42:09 +00:00
|
|
|
void addPendingRead() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
++fPendingReads;
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->addPendingRead();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void completedRead() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->completedRead();
|
|
|
|
}
|
|
|
|
|
|
|
|
--fPendingReads;
|
|
|
|
this->didRemoveRefOrPendingIO();
|
|
|
|
}
|
|
|
|
|
|
|
|
void addPendingWrite() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
++fPendingWrites;
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->addPendingWrite();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void completedWrite() const {
|
|
|
|
this->validate();
|
|
|
|
|
|
|
|
if (fTarget) {
|
|
|
|
fTarget->completedWrite();
|
|
|
|
}
|
|
|
|
|
|
|
|
--fPendingWrites;
|
|
|
|
this->didRemoveRefOrPendingIO();
|
|
|
|
}
|
|
|
|
|
2016-10-28 17:25:24 +00:00
|
|
|
protected:
|
2016-11-16 19:17:17 +00:00
|
|
|
GrIORefProxy() : fTarget(nullptr), fRefCnt(1), fPendingReads(0), fPendingWrites(0) {}
|
|
|
|
GrIORefProxy(sk_sp<GrSurface> surface) : fRefCnt(1), fPendingReads(0), fPendingWrites(0) {
|
2016-10-28 17:25:24 +00:00
|
|
|
// Since we're manually forwarding on refs & unrefs we don't want sk_sp doing
|
|
|
|
// anything extra.
|
|
|
|
fTarget = surface.release();
|
|
|
|
}
|
|
|
|
virtual ~GrIORefProxy() {
|
|
|
|
// We don't unref 'fTarget' here since the 'unref' method will already
|
|
|
|
// have forwarded on the unref call that got use here.
|
|
|
|
}
|
|
|
|
|
2016-11-16 19:17:17 +00:00
|
|
|
// This GrIORefProxy was deferred before but has just been instantiated. To
|
|
|
|
// make all the reffing & unreffing work out we now need to transfer any deferred
|
|
|
|
// refs & unrefs to the new GrSurface
|
|
|
|
void transferRefs() {
|
|
|
|
SkASSERT(fTarget);
|
|
|
|
|
2017-11-08 20:24:31 +00:00
|
|
|
SkASSERT(fTarget->fRefCnt > 0);
|
2017-11-13 13:38:05 +00:00
|
|
|
fTarget->fRefCnt += (fRefCnt-1); // don't xfer the proxy's creation ref
|
2016-11-16 19:17:17 +00:00
|
|
|
fTarget->fPendingReads += fPendingReads;
|
|
|
|
fTarget->fPendingWrites += fPendingWrites;
|
|
|
|
}
|
2016-10-28 17:25:24 +00:00
|
|
|
|
2017-01-25 20:48:30 +00:00
|
|
|
bool internalHasPendingIO() const {
|
|
|
|
if (fTarget) {
|
|
|
|
return fTarget->internalHasPendingIO();
|
|
|
|
}
|
|
|
|
|
|
|
|
return SkToBool(fPendingWrites | fPendingReads);
|
|
|
|
}
|
|
|
|
|
2017-03-30 12:02:11 +00:00
|
|
|
bool internalHasPendingWrite() const {
|
|
|
|
if (fTarget) {
|
|
|
|
return fTarget->internalHasPendingWrite();
|
|
|
|
}
|
|
|
|
|
|
|
|
return SkToBool(fPendingWrites);
|
|
|
|
}
|
|
|
|
|
2016-10-28 17:25:24 +00:00
|
|
|
// For deferred proxies this will be null. For wrapped proxies it will point to the
|
|
|
|
// wrapped resource.
|
|
|
|
GrSurface* fTarget;
|
2016-11-16 19:17:17 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
// This class is used to manage conversion of refs to pending reads/writes.
|
2017-05-11 18:14:30 +00:00
|
|
|
friend class GrSurfaceProxyRef;
|
2016-11-16 19:17:17 +00:00
|
|
|
template <typename, GrIOType> friend class GrPendingIOResource;
|
|
|
|
|
2017-05-11 18:14:30 +00:00
|
|
|
void didRemoveRefOrPendingIO() const {
|
|
|
|
if (0 == fPendingReads && 0 == fPendingWrites && 0 == fRefCnt) {
|
|
|
|
delete this;
|
|
|
|
}
|
2016-11-16 19:17:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
mutable int32_t fRefCnt;
|
|
|
|
mutable int32_t fPendingReads;
|
|
|
|
mutable int32_t fPendingWrites;
|
2016-10-28 17:25:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
class GrSurfaceProxy : public GrIORefProxy {
|
2016-05-04 19:47:41 +00:00
|
|
|
public:
|
2017-07-25 14:16:35 +00:00
|
|
|
static sk_sp<GrSurfaceProxy> MakeWrapped(sk_sp<GrSurface>, GrSurfaceOrigin);
|
|
|
|
static sk_sp<GrTextureProxy> MakeWrapped(sk_sp<GrTexture>, GrSurfaceOrigin);
|
2016-11-09 11:50:43 +00:00
|
|
|
|
2018-01-08 18:40:32 +00:00
|
|
|
static sk_sp<GrTextureProxy> MakeDeferred(GrProxyProvider*,
|
2017-02-28 21:30:28 +00:00
|
|
|
const GrSurfaceDesc&, SkBackingFit,
|
|
|
|
SkBudgeted, uint32_t flags = 0);
|
2016-11-09 11:50:43 +00:00
|
|
|
|
2017-07-10 16:06:05 +00:00
|
|
|
/**
|
|
|
|
* Creates a proxy that will be mipmapped.
|
|
|
|
*
|
|
|
|
* @param desc Description of the texture properties.
|
|
|
|
* @param budgeted Does the texture count against the resource cache budget?
|
|
|
|
* @param texels A contiguous array of mipmap levels
|
|
|
|
* @param mipLevelCount The amount of elements in the texels array
|
|
|
|
*/
|
2018-01-08 18:40:32 +00:00
|
|
|
static sk_sp<GrTextureProxy> MakeDeferredMipMap(GrProxyProvider*,
|
2017-07-10 16:06:05 +00:00
|
|
|
const GrSurfaceDesc& desc, SkBudgeted budgeted,
|
2017-07-11 18:22:35 +00:00
|
|
|
const GrMipLevel texels[], int mipLevelCount,
|
2017-07-10 16:06:05 +00:00
|
|
|
SkDestinationSurfaceColorMode mipColorMode =
|
|
|
|
SkDestinationSurfaceColorMode::kLegacy);
|
|
|
|
|
2017-10-06 19:59:27 +00:00
|
|
|
/**
|
|
|
|
* Like the call above but there are no texels to upload. A texture proxy is returned that
|
|
|
|
* simply has space allocated for the mips. We will allocated the full amount of mip levels
|
|
|
|
* based on the width and height in the GrSurfaceDesc.
|
|
|
|
*/
|
2018-01-08 18:40:32 +00:00
|
|
|
static sk_sp<GrTextureProxy> MakeDeferredMipMap(GrProxyProvider*,
|
2017-10-06 19:59:27 +00:00
|
|
|
const GrSurfaceDesc& desc, SkBudgeted budgeted);
|
|
|
|
|
|
|
|
|
2016-11-09 11:50:43 +00:00
|
|
|
// TODO: need to refine ownership semantics of 'srcData' if we're in completely
|
|
|
|
// deferred mode
|
2018-01-08 18:40:32 +00:00
|
|
|
static sk_sp<GrTextureProxy> MakeDeferred(GrProxyProvider*,
|
2016-11-09 11:50:43 +00:00
|
|
|
const GrSurfaceDesc&, SkBudgeted,
|
|
|
|
const void* srcData, size_t rowBytes);
|
|
|
|
|
2017-12-13 14:22:45 +00:00
|
|
|
static sk_sp<GrTextureProxy> MakeWrappedBackend(GrContext*, const GrBackendTexture&,
|
|
|
|
GrSurfaceOrigin);
|
2017-01-27 15:58:31 +00:00
|
|
|
|
2017-11-30 05:01:06 +00:00
|
|
|
using LazyInstantiateCallback = std::function<sk_sp<GrTexture>(GrResourceProvider*,
|
|
|
|
GrSurfaceOrigin* outOrigin)>;
|
|
|
|
|
|
|
|
enum class Renderable : bool {
|
|
|
|
kNo = false,
|
|
|
|
kYes = true
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Creates a texture proxy that will be instantiated by a user-supplied callback during flush.
|
|
|
|
* (Mipmapping, MSAA, and stencil are not supported by this method.)
|
|
|
|
*/
|
|
|
|
static sk_sp<GrTextureProxy> MakeLazy(LazyInstantiateCallback&&, Renderable, GrPixelConfig);
|
|
|
|
|
|
|
|
GrPixelConfig config() const { return fConfig; }
|
|
|
|
int width() const { SkASSERT(!this->isPendingLazyInstantiation()); return fWidth; }
|
|
|
|
int height() const { SkASSERT(!this->isPendingLazyInstantiation()); return fHeight; }
|
|
|
|
int worstCaseWidth() const;
|
|
|
|
int worstCaseHeight() const;
|
2016-05-04 19:47:41 +00:00
|
|
|
GrSurfaceOrigin origin() const {
|
2017-11-30 05:01:06 +00:00
|
|
|
SkASSERT(!this->isPendingLazyInstantiation());
|
2017-05-17 17:49:59 +00:00
|
|
|
SkASSERT(kTopLeft_GrSurfaceOrigin == fOrigin || kBottomLeft_GrSurfaceOrigin == fOrigin);
|
|
|
|
return fOrigin;
|
2016-05-04 19:47:41 +00:00
|
|
|
}
|
2017-11-30 05:01:06 +00:00
|
|
|
|
|
|
|
// If the client gave us a LazyInstantiateCallback (via MakeLazy), then we will invoke that
|
|
|
|
// callback during flush. fWidth, fHeight, and fOrigin will be undefined until that time.
|
|
|
|
bool isPendingLazyInstantiation() const { return SkToBool(fLazyInstantiateCallback); }
|
2016-05-04 19:47:41 +00:00
|
|
|
|
2016-11-11 17:38:40 +00:00
|
|
|
class UniqueID {
|
|
|
|
public:
|
2017-03-30 12:02:11 +00:00
|
|
|
static UniqueID InvalidID() {
|
|
|
|
return UniqueID(uint32_t(SK_InvalidUniqueID));
|
|
|
|
}
|
|
|
|
|
2016-11-11 17:38:40 +00:00
|
|
|
// wrapped
|
|
|
|
explicit UniqueID(const GrGpuResource::UniqueID& id) : fID(id.asUInt()) { }
|
2017-11-30 05:01:06 +00:00
|
|
|
// deferred and lazy-callback
|
2016-11-11 17:38:40 +00:00
|
|
|
UniqueID() : fID(GrGpuResource::CreateUniqueID()) { }
|
|
|
|
|
|
|
|
uint32_t asUInt() const { return fID; }
|
|
|
|
|
|
|
|
bool operator==(const UniqueID& other) const {
|
|
|
|
return fID == other.fID;
|
|
|
|
}
|
|
|
|
bool operator!=(const UniqueID& other) const {
|
|
|
|
return !(*this == other);
|
|
|
|
}
|
|
|
|
|
2017-03-30 12:02:11 +00:00
|
|
|
void makeInvalid() { fID = SK_InvalidUniqueID; }
|
2016-11-11 17:38:40 +00:00
|
|
|
bool isInvalid() const { return SK_InvalidUniqueID == fID; }
|
|
|
|
|
|
|
|
private:
|
2017-03-30 12:02:11 +00:00
|
|
|
explicit UniqueID(uint32_t id) : fID(id) {}
|
|
|
|
|
|
|
|
uint32_t fID;
|
2016-11-11 17:38:40 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The contract for the uniqueID is:
|
|
|
|
* for wrapped resources:
|
|
|
|
* the uniqueID will match that of the wrapped resource
|
|
|
|
*
|
|
|
|
* for deferred resources:
|
|
|
|
* the uniqueID will be different from the real resource, when it is allocated
|
|
|
|
* the proxy's uniqueID will not change across the instantiate call
|
|
|
|
*
|
|
|
|
* the uniqueIDs of the proxies and the resources draw from the same pool
|
|
|
|
*
|
|
|
|
* What this boils down to is that the uniqueID of a proxy can be used to consistently
|
|
|
|
* track/identify a proxy but should never be used to distinguish between
|
|
|
|
* resources and proxies - beware!
|
|
|
|
*/
|
|
|
|
UniqueID uniqueID() const { return fUniqueID; }
|
2016-05-04 19:47:41 +00:00
|
|
|
|
2017-06-19 16:02:00 +00:00
|
|
|
UniqueID underlyingUniqueID() const {
|
|
|
|
if (fTarget) {
|
|
|
|
return UniqueID(fTarget->uniqueID());
|
|
|
|
}
|
|
|
|
|
|
|
|
return fUniqueID;
|
|
|
|
}
|
|
|
|
|
2017-06-05 13:26:07 +00:00
|
|
|
virtual bool instantiate(GrResourceProvider* resourceProvider) = 0;
|
2016-11-09 11:50:43 +00:00
|
|
|
|
2016-08-31 22:06:24 +00:00
|
|
|
/**
|
|
|
|
* Helper that gets the width and height of the surface as a bounding rectangle.
|
|
|
|
*/
|
2017-11-30 05:01:06 +00:00
|
|
|
SkRect getBoundsRect() const {
|
|
|
|
SkASSERT(!this->isPendingLazyInstantiation());
|
|
|
|
return SkRect::MakeIWH(this->width(), this->height());
|
|
|
|
}
|
2016-12-09 18:35:02 +00:00
|
|
|
|
2016-05-04 19:47:41 +00:00
|
|
|
/**
|
|
|
|
* @return the texture proxy associated with the surface proxy, may be NULL.
|
|
|
|
*/
|
|
|
|
virtual GrTextureProxy* asTextureProxy() { return nullptr; }
|
|
|
|
virtual const GrTextureProxy* asTextureProxy() const { return nullptr; }
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return the render target proxy associated with the surface proxy, may be NULL.
|
|
|
|
*/
|
|
|
|
virtual GrRenderTargetProxy* asRenderTargetProxy() { return nullptr; }
|
|
|
|
virtual const GrRenderTargetProxy* asRenderTargetProxy() const { return nullptr; }
|
|
|
|
|
2016-08-31 22:06:24 +00:00
|
|
|
/**
|
|
|
|
* Does the resource count against the resource budget?
|
|
|
|
*/
|
|
|
|
SkBudgeted isBudgeted() const { return fBudgeted; }
|
|
|
|
|
2016-10-25 18:20:06 +00:00
|
|
|
void setLastOpList(GrOpList* opList);
|
|
|
|
GrOpList* getLastOpList() { return fLastOpList; }
|
|
|
|
|
2016-11-23 14:37:01 +00:00
|
|
|
GrRenderTargetOpList* getLastRenderTargetOpList();
|
|
|
|
GrTextureOpList* getLastTextureOpList();
|
|
|
|
|
2016-11-01 21:28:40 +00:00
|
|
|
/**
|
2017-04-17 18:18:34 +00:00
|
|
|
* Retrieves the amount of GPU memory that will be or currently is used by this resource
|
2016-11-01 21:28:40 +00:00
|
|
|
* in bytes. It is approximate since we aren't aware of additional padding or copies made
|
|
|
|
* by the driver.
|
|
|
|
*
|
|
|
|
* @return the amount of GPU memory used in bytes
|
|
|
|
*/
|
|
|
|
size_t gpuMemorySize() const {
|
2017-11-30 05:01:06 +00:00
|
|
|
SkASSERT(!this->isPendingLazyInstantiation());
|
2017-05-17 17:49:59 +00:00
|
|
|
if (fTarget) {
|
|
|
|
return fTarget->gpuMemorySize();
|
|
|
|
}
|
2016-11-01 21:28:40 +00:00
|
|
|
if (kInvalidGpuMemorySize == fGpuMemorySize) {
|
2017-05-17 17:49:59 +00:00
|
|
|
fGpuMemorySize = this->onUninstantiatedGpuMemorySize();
|
2016-11-01 21:28:40 +00:00
|
|
|
SkASSERT(kInvalidGpuMemorySize != fGpuMemorySize);
|
|
|
|
}
|
|
|
|
return fGpuMemorySize;
|
|
|
|
}
|
|
|
|
|
2016-12-15 14:23:05 +00:00
|
|
|
// Helper function that creates a temporary SurfaceContext to perform the copy
|
2017-03-21 20:22:00 +00:00
|
|
|
// It always returns a kExact-backed proxy bc it is used when converting an SkSpecialImage
|
2017-05-16 01:23:13 +00:00
|
|
|
// to an SkImage. The copy is is not a render target and not multisampled.
|
2017-10-30 17:39:09 +00:00
|
|
|
static sk_sp<GrTextureProxy> Copy(GrContext*, GrSurfaceProxy* src, GrMipMapped,
|
2016-12-15 14:23:05 +00:00
|
|
|
SkIRect srcRect, SkBudgeted);
|
|
|
|
|
|
|
|
// Copy the entire 'src'
|
2017-03-21 20:22:00 +00:00
|
|
|
// It always returns a kExact-backed proxy bc it is used in SkGpuDevice::snapSpecial
|
2017-10-30 17:39:09 +00:00
|
|
|
static sk_sp<GrTextureProxy> Copy(GrContext* context, GrSurfaceProxy* src, GrMipMapped,
|
2017-02-15 19:19:01 +00:00
|
|
|
SkBudgeted budgeted);
|
2016-12-15 14:23:05 +00:00
|
|
|
|
|
|
|
// Test-only entry point - should decrease in use as proxies propagate
|
2017-01-25 17:10:37 +00:00
|
|
|
static sk_sp<GrSurfaceContext> TestCopy(GrContext* context, const GrSurfaceDesc& dstDesc,
|
|
|
|
GrSurfaceProxy* srcProxy);
|
2016-12-15 14:23:05 +00:00
|
|
|
|
2016-11-08 13:49:39 +00:00
|
|
|
bool isWrapped_ForTesting() const;
|
|
|
|
|
2016-11-23 14:37:01 +00:00
|
|
|
SkDEBUGCODE(void validate(GrContext*) const;)
|
|
|
|
|
2017-01-25 20:48:30 +00:00
|
|
|
// Provides access to functions that aren't part of the public API.
|
2017-09-28 13:00:45 +00:00
|
|
|
inline GrSurfaceProxyPriv priv();
|
|
|
|
inline const GrSurfaceProxyPriv priv() const;
|
2017-01-25 20:48:30 +00:00
|
|
|
|
2016-05-04 19:47:41 +00:00
|
|
|
protected:
|
2016-08-31 21:04:06 +00:00
|
|
|
// Deferred version
|
2017-02-28 16:26:32 +00:00
|
|
|
GrSurfaceProxy(const GrSurfaceDesc& desc, SkBackingFit fit, SkBudgeted budgeted, uint32_t flags)
|
2017-05-17 17:49:59 +00:00
|
|
|
: fConfig(desc.fConfig)
|
|
|
|
, fWidth(desc.fWidth)
|
|
|
|
, fHeight(desc.fHeight)
|
|
|
|
, fOrigin(desc.fOrigin)
|
|
|
|
, fFit(fit)
|
|
|
|
, fBudgeted(budgeted)
|
|
|
|
, fFlags(flags)
|
2017-05-23 20:53:47 +00:00
|
|
|
, fNeedsClear(SkToBool(desc.fFlags & kPerformInitialClear_GrSurfaceFlag))
|
2017-05-17 17:49:59 +00:00
|
|
|
, fGpuMemorySize(kInvalidGpuMemorySize)
|
|
|
|
, fLastOpList(nullptr) {
|
2016-11-11 17:38:40 +00:00
|
|
|
// Note: this ctor pulls a new uniqueID from the same pool at the GrGpuResources
|
2016-08-31 21:04:06 +00:00
|
|
|
}
|
|
|
|
|
2017-11-30 05:01:06 +00:00
|
|
|
// Lazy-callback version
|
|
|
|
GrSurfaceProxy(LazyInstantiateCallback&& callback, GrPixelConfig config);
|
|
|
|
|
2016-08-31 21:04:06 +00:00
|
|
|
// Wrapped version
|
2017-07-25 14:16:35 +00:00
|
|
|
GrSurfaceProxy(sk_sp<GrSurface> surface, GrSurfaceOrigin origin, SkBackingFit fit);
|
2016-05-04 19:47:41 +00:00
|
|
|
|
2016-10-25 18:20:06 +00:00
|
|
|
virtual ~GrSurfaceProxy();
|
|
|
|
|
2017-01-25 20:48:30 +00:00
|
|
|
friend class GrSurfaceProxyPriv;
|
|
|
|
|
|
|
|
// Methods made available via GrSurfaceProxyPriv
|
|
|
|
bool hasPendingIO() const {
|
|
|
|
return this->internalHasPendingIO();
|
|
|
|
}
|
|
|
|
|
2017-03-30 12:02:11 +00:00
|
|
|
bool hasPendingWrite() const {
|
|
|
|
return this->internalHasPendingWrite();
|
|
|
|
}
|
|
|
|
|
2017-07-21 15:38:13 +00:00
|
|
|
void computeScratchKey(GrScratchKey*) const;
|
|
|
|
|
2017-07-18 18:49:38 +00:00
|
|
|
virtual sk_sp<GrSurface> createSurface(GrResourceProvider*) const = 0;
|
|
|
|
void assign(sk_sp<GrSurface> surface);
|
|
|
|
|
2017-08-10 12:44:49 +00:00
|
|
|
sk_sp<GrSurface> createSurfaceImpl(GrResourceProvider*, int sampleCnt, bool needsStencil,
|
2017-10-23 20:05:23 +00:00
|
|
|
GrSurfaceFlags flags, GrMipMapped mipMapped,
|
2017-07-18 18:49:38 +00:00
|
|
|
SkDestinationSurfaceColorMode mipColorMode) const;
|
|
|
|
|
2017-08-10 12:44:49 +00:00
|
|
|
bool instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt, bool needsStencil,
|
2017-10-23 20:05:23 +00:00
|
|
|
GrSurfaceFlags flags, GrMipMapped mipMapped,
|
2017-09-21 12:26:08 +00:00
|
|
|
SkDestinationSurfaceColorMode mipColorMode, const GrUniqueKey*);
|
2017-05-17 17:49:59 +00:00
|
|
|
|
2017-11-30 05:01:06 +00:00
|
|
|
private:
|
2017-05-17 17:49:59 +00:00
|
|
|
// For wrapped resources, 'fConfig', 'fWidth', 'fHeight', and 'fOrigin; will always be filled in
|
|
|
|
// from the wrapped resource.
|
|
|
|
GrPixelConfig fConfig;
|
|
|
|
int fWidth;
|
|
|
|
int fHeight;
|
|
|
|
GrSurfaceOrigin fOrigin;
|
2017-11-30 05:01:06 +00:00
|
|
|
SkBackingFit fFit; // always kApprox for lazy-callback resources
|
|
|
|
// always kExact for wrapped resources
|
|
|
|
mutable SkBudgeted fBudgeted; // always kYes for lazy-callback resources
|
|
|
|
// set from the backing resource for wrapped resources
|
2017-03-09 21:36:32 +00:00
|
|
|
// mutable bc of SkSurface/SkImage wishy-washiness
|
2017-02-28 16:26:32 +00:00
|
|
|
const uint32_t fFlags;
|
2017-03-15 17:02:45 +00:00
|
|
|
|
2016-11-11 17:38:40 +00:00
|
|
|
const UniqueID fUniqueID; // set from the backing resource for wrapped resources
|
2016-05-04 19:47:41 +00:00
|
|
|
|
2017-11-30 05:01:06 +00:00
|
|
|
LazyInstantiateCallback fLazyInstantiateCallback;
|
|
|
|
SkDEBUGCODE(virtual void validateLazyTexture(const GrTexture*) = 0;)
|
|
|
|
|
2016-11-01 21:28:40 +00:00
|
|
|
static const size_t kInvalidGpuMemorySize = ~static_cast<size_t>(0);
|
2016-11-03 14:19:14 +00:00
|
|
|
SkDEBUGCODE(size_t getRawGpuMemorySize_debugOnly() const { return fGpuMemorySize; })
|
|
|
|
|
2017-05-17 17:49:59 +00:00
|
|
|
virtual size_t onUninstantiatedGpuMemorySize() const = 0;
|
2016-11-03 14:19:14 +00:00
|
|
|
|
2017-05-23 20:53:47 +00:00
|
|
|
bool fNeedsClear;
|
|
|
|
|
2016-11-01 21:28:40 +00:00
|
|
|
// This entry is lazily evaluated so, when the proxy wraps a resource, the resource
|
|
|
|
// will be called but, when the proxy is deferred, it will compute the answer itself.
|
|
|
|
// If the proxy computes its own answer that answer is checked (in debug mode) in
|
|
|
|
// the instantiation method.
|
|
|
|
mutable size_t fGpuMemorySize;
|
|
|
|
|
2016-10-25 18:20:06 +00:00
|
|
|
// The last opList that wrote to or is currently going to write to this surface
|
2017-05-11 18:14:30 +00:00
|
|
|
// The opList can be closed (e.g., no surface context is currently bound
|
|
|
|
// to this proxy).
|
2016-10-25 18:20:06 +00:00
|
|
|
// This back-pointer is required so that we can add a dependancy between
|
|
|
|
// the opList used to create the current contents of this surface
|
|
|
|
// and the opList of a destination surface to which this one is being drawn or copied.
|
2017-05-11 20:29:14 +00:00
|
|
|
// This pointer is unreffed. OpLists own a ref on their surface proxies.
|
2016-10-25 18:20:06 +00:00
|
|
|
GrOpList* fLastOpList;
|
|
|
|
|
2016-10-28 17:25:24 +00:00
|
|
|
typedef GrIORefProxy INHERITED;
|
2016-05-04 19:47:41 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
#endif
|