2011-07-28 14:26:00 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/*
|
2011-07-28 14:26:00 +00:00
|
|
|
* Copyright 2006 The Android Open Source Project
|
2008-12-17 15:59:43 +00:00
|
|
|
*
|
2011-07-28 14:26:00 +00:00
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
|
|
|
|
2011-07-28 14:26:00 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
#ifndef SkRefCnt_DEFINED
|
|
|
|
#define SkRefCnt_DEFINED
|
|
|
|
|
2014-02-04 18:00:23 +00:00
|
|
|
#include "SkDynamicAnnotations.h"
|
2014-05-28 20:02:17 +00:00
|
|
|
#include "SkThread.h"
|
2012-06-05 19:35:09 +00:00
|
|
|
#include "SkInstCnt.h"
|
2012-07-30 15:03:59 +00:00
|
|
|
#include "SkTemplates.h"
|
2008-12-17 15:59:43 +00:00
|
|
|
|
2013-10-16 15:15:58 +00:00
|
|
|
/** \class SkRefCntBase
|
|
|
|
|
2013-10-25 18:40:24 +00:00
|
|
|
SkRefCntBase is the base class for objects that may be shared by multiple
|
2012-05-16 18:21:56 +00:00
|
|
|
objects. When an existing owner wants to share a reference, it calls ref().
|
|
|
|
When an owner wants to release its reference, it calls unref(). When the
|
|
|
|
shared object's reference count goes to zero as the result of an unref()
|
|
|
|
call, its (virtual) destructor is called. It is an error for the
|
|
|
|
destructor to be called explicitly (or via the object going out of scope on
|
|
|
|
the stack or calling delete) if getRefCnt() > 1.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
2014-04-07 19:34:38 +00:00
|
|
|
class SK_API SkRefCntBase : SkNoncopyable {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2013-10-25 18:40:24 +00:00
|
|
|
SK_DECLARE_INST_COUNT_ROOT(SkRefCntBase)
|
2012-06-05 19:35:09 +00:00
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Default construct, initializing the reference count to 1.
|
|
|
|
*/
|
2013-10-25 18:40:24 +00:00
|
|
|
SkRefCntBase() : fRefCnt(1) {}
|
2008-12-17 15:59:43 +00:00
|
|
|
|
2012-05-16 18:21:56 +00:00
|
|
|
/** Destruct, asserting that the reference count is 1.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
2013-10-25 18:40:24 +00:00
|
|
|
virtual ~SkRefCntBase() {
|
2011-09-12 19:54:12 +00:00
|
|
|
#ifdef SK_DEBUG
|
2014-06-18 14:54:47 +00:00
|
|
|
SkASSERTF(fRefCnt == 1, "fRefCnt was %d", fRefCnt);
|
2011-09-12 19:54:12 +00:00
|
|
|
fRefCnt = 0; // illegal value, to catch us if we reuse after delete
|
|
|
|
#endif
|
|
|
|
}
|
2008-12-17 15:59:43 +00:00
|
|
|
|
2013-07-19 23:18:52 +00:00
|
|
|
/** Return the reference count. Use only for debugging. */
|
2008-12-17 15:59:43 +00:00
|
|
|
int32_t getRefCnt() const { return fRefCnt; }
|
|
|
|
|
2014-02-04 18:00:23 +00:00
|
|
|
/** May return true if the caller is the only owner.
|
2013-07-19 23:18:52 +00:00
|
|
|
* Ensures that all previous owner's actions are complete.
|
|
|
|
*/
|
|
|
|
bool unique() const {
|
2014-02-04 18:00:23 +00:00
|
|
|
// We believe we're reading fRefCnt in a safe way here, so we stifle the TSAN warning about
|
|
|
|
// an unproctected read. Generally, don't read fRefCnt, and don't stifle this warning.
|
|
|
|
bool const unique = (1 == SK_ANNOTATE_UNPROTECTED_READ(fRefCnt));
|
2013-07-19 23:18:52 +00:00
|
|
|
if (unique) {
|
2013-12-18 15:27:39 +00:00
|
|
|
// Acquire barrier (L/SL), if not provided by load of fRefCnt.
|
2013-07-19 23:18:52 +00:00
|
|
|
// Prevents user's 'unique' code from happening before decrements.
|
|
|
|
//TODO: issue the barrier.
|
|
|
|
}
|
|
|
|
return unique;
|
|
|
|
}
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
/** Increment the reference count. Must be balanced by a call to unref().
|
|
|
|
*/
|
|
|
|
void ref() const {
|
2014-04-27 19:21:51 +00:00
|
|
|
SkASSERT(fRefCnt > 0);
|
2012-05-16 18:21:56 +00:00
|
|
|
sk_atomic_inc(&fRefCnt); // No barrier required.
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/** Decrement the reference count. If the reference count is 1 before the
|
2012-05-16 18:21:56 +00:00
|
|
|
decrement, then delete the object. Note that if this is the case, then
|
|
|
|
the object needs to have been allocated via new, and not on the stack.
|
2008-12-17 15:59:43 +00:00
|
|
|
*/
|
|
|
|
void unref() const {
|
2014-04-27 19:21:51 +00:00
|
|
|
SkASSERT(fRefCnt > 0);
|
2012-05-16 18:21:56 +00:00
|
|
|
// Release barrier (SL/S), if not provided below.
|
2008-12-17 15:59:43 +00:00
|
|
|
if (sk_atomic_dec(&fRefCnt) == 1) {
|
2013-12-18 15:27:39 +00:00
|
|
|
// Acquire barrier (L/SL), if not provided above.
|
2012-05-16 18:21:56 +00:00
|
|
|
// Prevents code in dispose from happening before the decrement.
|
2013-12-18 15:27:39 +00:00
|
|
|
sk_membar_acquire__after_atomic_dec();
|
2012-05-16 18:21:56 +00:00
|
|
|
internal_dispose();
|
2008-12-17 15:59:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-02 16:42:21 +00:00
|
|
|
#ifdef SK_DEBUG
|
2011-07-15 15:25:22 +00:00
|
|
|
void validate() const {
|
2014-04-27 19:21:51 +00:00
|
|
|
SkASSERT(fRefCnt > 0);
|
2011-07-15 15:25:22 +00:00
|
|
|
}
|
2013-10-02 16:42:21 +00:00
|
|
|
#endif
|
2011-07-15 15:25:22 +00:00
|
|
|
|
2012-07-23 14:50:38 +00:00
|
|
|
protected:
|
|
|
|
/**
|
|
|
|
* Allow subclasses to call this if they've overridden internal_dispose
|
|
|
|
* so they can reset fRefCnt before the destructor is called. Should only
|
|
|
|
* be called right before calling through to inherited internal_dispose()
|
|
|
|
* or before calling the destructor.
|
|
|
|
*/
|
|
|
|
void internal_dispose_restore_refcnt_to_1() const {
|
2012-05-16 18:21:56 +00:00
|
|
|
#ifdef SK_DEBUG
|
2012-07-23 14:50:38 +00:00
|
|
|
SkASSERT(0 == fRefCnt);
|
2012-05-16 18:21:56 +00:00
|
|
|
fRefCnt = 1;
|
|
|
|
#endif
|
2012-07-23 14:50:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
/**
|
|
|
|
* Called when the ref count goes to 0.
|
|
|
|
*/
|
|
|
|
virtual void internal_dispose() const {
|
|
|
|
this->internal_dispose_restore_refcnt_to_1();
|
2012-05-16 18:21:56 +00:00
|
|
|
SkDELETE(this);
|
|
|
|
}
|
2012-06-22 12:41:43 +00:00
|
|
|
|
2013-07-19 23:18:52 +00:00
|
|
|
// The following friends are those which override internal_dispose()
|
|
|
|
// and conditionally call SkRefCnt::internal_dispose().
|
2012-05-16 18:21:56 +00:00
|
|
|
friend class SkWeakRefCnt;
|
|
|
|
|
2008-12-17 15:59:43 +00:00
|
|
|
mutable int32_t fRefCnt;
|
2012-06-13 18:54:08 +00:00
|
|
|
|
2013-10-25 18:40:24 +00:00
|
|
|
typedef SkNoncopyable INHERITED;
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
|
|
|
|
2013-10-25 18:40:24 +00:00
|
|
|
#ifdef SK_REF_CNT_MIXIN_INCLUDE
|
|
|
|
// It is the responsibility of the following include to define the type SkRefCnt.
|
|
|
|
// This SkRefCnt should normally derive from SkRefCntBase.
|
|
|
|
#include SK_REF_CNT_MIXIN_INCLUDE
|
|
|
|
#else
|
|
|
|
class SK_API SkRefCnt : public SkRefCntBase { };
|
|
|
|
#endif
|
|
|
|
|
2011-07-15 15:25:22 +00:00
|
|
|
///////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
|
|
|
/** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for
|
|
|
|
null in on each side of the assignment, and ensuring that ref() is called
|
|
|
|
before unref(), in case the two pointers point to the same object.
|
|
|
|
*/
|
|
|
|
#define SkRefCnt_SafeAssign(dst, src) \
|
|
|
|
do { \
|
|
|
|
if (src) src->ref(); \
|
|
|
|
if (dst) dst->unref(); \
|
|
|
|
dst = src; \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
|
2012-08-22 18:56:56 +00:00
|
|
|
/** Call obj->ref() and return obj. The obj must not be NULL.
|
2011-07-15 15:25:22 +00:00
|
|
|
*/
|
2012-08-22 18:56:56 +00:00
|
|
|
template <typename T> static inline T* SkRef(T* obj) {
|
|
|
|
SkASSERT(obj);
|
|
|
|
obj->ref();
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** Check if the argument is non-null, and if so, call obj->ref() and return obj.
|
|
|
|
*/
|
|
|
|
template <typename T> static inline T* SkSafeRef(T* obj) {
|
2011-07-15 15:25:22 +00:00
|
|
|
if (obj) {
|
|
|
|
obj->ref();
|
|
|
|
}
|
2012-08-22 18:56:56 +00:00
|
|
|
return obj;
|
2011-07-15 15:25:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/** Check if the argument is non-null, and if so, call obj->unref()
|
|
|
|
*/
|
|
|
|
template <typename T> static inline void SkSafeUnref(T* obj) {
|
|
|
|
if (obj) {
|
|
|
|
obj->unref();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-09-09 13:38:37 +00:00
|
|
|
template<typename T> static inline void SkSafeSetNull(T*& obj) {
|
|
|
|
if (NULL != obj) {
|
|
|
|
obj->unref();
|
|
|
|
obj = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-07-15 15:25:22 +00:00
|
|
|
///////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
2011-02-25 18:10:29 +00:00
|
|
|
/**
|
2012-07-30 15:03:59 +00:00
|
|
|
* Utility class that simply unref's its argument in the destructor.
|
2011-02-25 18:10:29 +00:00
|
|
|
*/
|
2012-07-30 15:03:59 +00:00
|
|
|
template <typename T> class SkAutoTUnref : SkNoncopyable {
|
2008-12-17 15:59:43 +00:00
|
|
|
public:
|
2012-07-30 15:03:59 +00:00
|
|
|
explicit SkAutoTUnref(T* obj = NULL) : fObj(obj) {}
|
|
|
|
~SkAutoTUnref() { SkSafeUnref(fObj); }
|
2011-02-25 18:10:29 +00:00
|
|
|
|
|
|
|
T* get() const { return fObj; }
|
|
|
|
|
2013-03-13 21:28:44 +00:00
|
|
|
T* reset(T* obj) {
|
2012-11-27 18:37:52 +00:00
|
|
|
SkSafeUnref(fObj);
|
|
|
|
fObj = obj;
|
2013-03-13 21:28:44 +00:00
|
|
|
return obj;
|
2011-08-09 15:30:41 +00:00
|
|
|
}
|
|
|
|
|
2012-10-03 13:46:20 +00:00
|
|
|
void swap(SkAutoTUnref* other) {
|
|
|
|
T* tmp = fObj;
|
|
|
|
fObj = other->fObj;
|
|
|
|
other->fObj = tmp;
|
|
|
|
}
|
|
|
|
|
2011-02-25 18:10:29 +00:00
|
|
|
/**
|
|
|
|
* Return the hosted object (which may be null), transferring ownership.
|
|
|
|
* The reference count is not modified, and the internal ptr is set to NULL
|
|
|
|
* so unref() will not be called in our destructor. A subsequent call to
|
|
|
|
* detach() will do nothing and return null.
|
|
|
|
*/
|
|
|
|
T* detach() {
|
|
|
|
T* obj = fObj;
|
|
|
|
fObj = NULL;
|
|
|
|
return obj;
|
|
|
|
}
|
2008-12-17 15:59:43 +00:00
|
|
|
|
2012-06-01 19:38:19 +00:00
|
|
|
/**
|
2013-04-16 15:24:31 +00:00
|
|
|
* BlockRef<B> is a type which inherits from B, cannot be created,
|
|
|
|
* cannot be deleted, and makes ref and unref private.
|
2012-06-01 19:38:19 +00:00
|
|
|
*/
|
2012-06-01 19:47:51 +00:00
|
|
|
template<typename B> class BlockRef : public B {
|
2012-06-01 19:38:19 +00:00
|
|
|
private:
|
|
|
|
BlockRef();
|
2013-04-16 15:24:31 +00:00
|
|
|
~BlockRef();
|
2012-06-01 19:38:19 +00:00
|
|
|
void ref() const;
|
|
|
|
void unref() const;
|
|
|
|
};
|
2012-07-27 13:27:35 +00:00
|
|
|
|
2012-07-30 15:03:59 +00:00
|
|
|
/** If T is const, the type returned from operator-> will also be const. */
|
|
|
|
typedef typename SkTConstType<BlockRef<T>, SkTIsConst<T>::value>::type BlockRefType;
|
2012-07-27 13:27:35 +00:00
|
|
|
|
2012-06-01 19:38:19 +00:00
|
|
|
/**
|
|
|
|
* SkAutoTUnref assumes ownership of the ref. As a result, it is an error
|
|
|
|
* for the user to ref or unref through SkAutoTUnref. Therefore
|
|
|
|
* SkAutoTUnref::operator-> returns BlockRef<T>*. This prevents use of
|
|
|
|
* skAutoTUnrefInstance->ref() and skAutoTUnrefInstance->unref().
|
|
|
|
*/
|
2012-07-30 15:03:59 +00:00
|
|
|
BlockRefType *operator->() const {
|
|
|
|
return static_cast<BlockRefType*>(fObj);
|
2012-06-01 19:38:19 +00:00
|
|
|
}
|
2012-07-30 15:03:59 +00:00
|
|
|
operator T*() { return fObj; }
|
2012-07-27 13:27:35 +00:00
|
|
|
|
2012-07-30 15:03:59 +00:00
|
|
|
private:
|
|
|
|
T* fObj;
|
2011-02-25 18:10:29 +00:00
|
|
|
};
|
2013-11-18 16:03:59 +00:00
|
|
|
// Can't use the #define trick below to guard a bare SkAutoTUnref(...) because it's templated. :(
|
2011-02-25 18:10:29 +00:00
|
|
|
|
|
|
|
class SkAutoUnref : public SkAutoTUnref<SkRefCnt> {
|
|
|
|
public:
|
|
|
|
SkAutoUnref(SkRefCnt* obj) : SkAutoTUnref<SkRefCnt>(obj) {}
|
2008-12-17 15:59:43 +00:00
|
|
|
};
|
2013-11-18 16:03:59 +00:00
|
|
|
#define SkAutoUnref(...) SK_REQUIRE_LOCAL_VAR(SkAutoUnref)
|
2008-12-17 15:59:43 +00:00
|
|
|
|
|
|
|
#endif
|