skia2/include/private/GrCCClipPath.h
Mike Klein c0bd9f9fe5 rewrite includes to not need so much -Ifoo
Current strategy: everything from the top

Things to look at first are the manual changes:

   - added tools/rewrite_includes.py
   - removed -Idirectives from BUILD.gn
   - various compile.sh simplifications
   - tweak tools/embed_resources.py
   - update gn/find_headers.py to write paths from the top
   - update gn/gn_to_bp.py SkUserConfig.h layout
     so that #include "include/config/SkUserConfig.h" always
     gets the header we want.

No-Presubmit: true
Change-Id: I73a4b181654e0e38d229bc456c0d0854bae3363e
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/209706
Commit-Queue: Mike Klein <mtklein@google.com>
Reviewed-by: Hal Canary <halcanary@google.com>
Reviewed-by: Brian Osman <brianosman@google.com>
Reviewed-by: Florin Malita <fmalita@chromium.org>
2019-04-24 16:27:11 +00:00

82 lines
2.7 KiB
C++

/*
* Copyright 2018 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef GrCCClipPath_DEFINED
#define GrCCClipPath_DEFINED
#include "include/core/SkPath.h"
#include "include/private/GrTextureProxy.h"
struct GrCCPerFlushResourceSpecs;
class GrCCAtlas;
class GrCCPerFlushResources;
class GrOnFlushResourceProvider;
class GrProxyProvider;
/**
* These are keyed by SkPath generation ID, and store which device-space paths are accessed and
* where by clip FPs in a given opList. A single GrCCClipPath can be referenced by multiple FPs. At
* flush time their coverage count masks are packed into atlas(es) alongside normal DrawPathOps.
*/
class GrCCClipPath {
public:
GrCCClipPath() = default;
GrCCClipPath(const GrCCClipPath&) = delete;
~GrCCClipPath() {
// Ensure no clip FP exists with a dangling pointer back into this class. This works because
// a clip FP will have a ref on the proxy if it exists.
//
// This assert also guarantees there won't be a lazy proxy callback with a dangling pointer
// back into this class, since no proxy will exist after we destruct, if the assert passes.
SkASSERT(!fAtlasLazyProxy || fAtlasLazyProxy->isUnique_debugOnly());
}
bool isInitialized() const { return fAtlasLazyProxy != nullptr; }
void init(const SkPath& deviceSpacePath, const SkIRect& accessRect, int rtWidth, int rtHeight,
const GrCaps&);
void addAccess(const SkIRect& accessRect) {
SkASSERT(this->isInitialized());
fAccessRect.join(accessRect);
}
GrTextureProxy* atlasLazyProxy() const {
SkASSERT(this->isInitialized());
return fAtlasLazyProxy.get();
}
const SkPath& deviceSpacePath() const {
SkASSERT(this->isInitialized());
return fDeviceSpacePath;
}
const SkIRect& pathDevIBounds() const {
SkASSERT(this->isInitialized());
return fPathDevIBounds;
}
void accountForOwnPath(GrCCPerFlushResourceSpecs*) const;
void renderPathInAtlas(GrCCPerFlushResources*, GrOnFlushResourceProvider*);
const SkVector& atlasScale() const { SkASSERT(fHasAtlasTransform); return fAtlasScale; }
const SkVector& atlasTranslate() const { SkASSERT(fHasAtlasTransform); return fAtlasTranslate; }
private:
sk_sp<GrTextureProxy> fAtlasLazyProxy;
SkPath fDeviceSpacePath;
SkIRect fPathDevIBounds;
SkIRect fAccessRect;
const GrCCAtlas* fAtlas = nullptr;
SkIVector fDevToAtlasOffset; // Translation from device space to location in atlas.
SkDEBUGCODE(bool fHasAtlas = false;)
SkVector fAtlasScale;
SkVector fAtlasTranslate;
SkDEBUGCODE(bool fHasAtlasTransform = false;)
};
#endif