b68bcc4398
Actually takes a ref on fOwningPerOpListPaths, instead of just asserting it continues to exist. Removes unnecessary asserts surrounding dangling pointers. Bug: skia:8359 Change-Id: Ie80da55510f320452bd9ee3a4b38bd59d48681a0 Reviewed-on: https://skia-review.googlesource.com/154684 Reviewed-by: Brian Salomon <bsalomon@google.com> Commit-Queue: Chris Dalton <csmartdalton@google.com>
82 lines
2.7 KiB
C++
82 lines
2.7 KiB
C++
/*
|
|
* Copyright 2018 Google Inc.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
* found in the LICENSE file.
|
|
*/
|
|
|
|
#ifndef GrCCClipPath_DEFINED
|
|
#define GrCCClipPath_DEFINED
|
|
|
|
#include "GrTextureProxy.h"
|
|
#include "SkPath.h"
|
|
|
|
struct GrCCPerFlushResourceSpecs;
|
|
class GrCCAtlas;
|
|
class GrCCPerFlushResources;
|
|
class GrOnFlushResourceProvider;
|
|
class GrProxyProvider;
|
|
|
|
/**
|
|
* These are keyed by SkPath generation ID, and store which device-space paths are accessed and
|
|
* where by clip FPs in a given opList. A single GrCCClipPath can be referenced by multiple FPs. At
|
|
* flush time their coverage count masks are packed into atlas(es) alongside normal DrawPathOps.
|
|
*/
|
|
class GrCCClipPath {
|
|
public:
|
|
GrCCClipPath() = default;
|
|
GrCCClipPath(const GrCCClipPath&) = delete;
|
|
|
|
~GrCCClipPath() {
|
|
// Ensure no clip FP exists with a dangling pointer back into this class. This works because
|
|
// a clip FP will have a ref on the proxy if it exists.
|
|
//
|
|
// This assert also guarantees there won't be a lazy proxy callback with a dangling pointer
|
|
// back into this class, since no proxy will exist after we destruct, if the assert passes.
|
|
SkASSERT(!fAtlasLazyProxy || fAtlasLazyProxy->isUnique_debugOnly());
|
|
}
|
|
|
|
bool isInitialized() const { return fAtlasLazyProxy != nullptr; }
|
|
void init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth, int rtHeight,
|
|
const GrCaps&);
|
|
|
|
void addAccess(const SkIRect& accessRect) {
|
|
SkASSERT(this->isInitialized());
|
|
fAccessRect.join(accessRect);
|
|
}
|
|
GrTextureProxy* atlasLazyProxy() const {
|
|
SkASSERT(this->isInitialized());
|
|
return fAtlasLazyProxy.get();
|
|
}
|
|
const SkPath& deviceSpacePath() const {
|
|
SkASSERT(this->isInitialized());
|
|
return fDeviceSpacePath;
|
|
}
|
|
const SkIRect& pathDevIBounds() const {
|
|
SkASSERT(this->isInitialized());
|
|
return fPathDevIBounds;
|
|
}
|
|
|
|
void accountForOwnPath(GrCCPerFlushResourceSpecs*) const;
|
|
void renderPathInAtlas(GrCCPerFlushResources*, GrOnFlushResourceProvider*);
|
|
|
|
const SkVector& atlasScale() const { SkASSERT(fHasAtlasTransform); return fAtlasScale; }
|
|
const SkVector& atlasTranslate() const { SkASSERT(fHasAtlasTransform); return fAtlasTranslate; }
|
|
|
|
private:
|
|
sk_sp<GrTextureProxy> fAtlasLazyProxy;
|
|
SkPath fDeviceSpacePath;
|
|
SkIRect fPathDevIBounds;
|
|
SkIRect fAccessRect;
|
|
|
|
const GrCCAtlas* fAtlas = nullptr;
|
|
SkIVector fDevToAtlasOffset; // Translation from device space to location in atlas.
|
|
SkDEBUGCODE(bool fHasAtlas = false);
|
|
|
|
SkVector fAtlasScale;
|
|
SkVector fAtlasTranslate;
|
|
SkDEBUGCODE(bool fHasAtlasTransform = false);
|
|
};
|
|
|
|
#endif
|