2012-05-16 18:21:56 +00:00
|
|
|
/*
|
|
|
|
* Copyright 2012 Google Inc.
|
|
|
|
*
|
|
|
|
* Use of this source code is governed by a BSD-style license that can be
|
|
|
|
* found in the LICENSE file.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef SkWeakRefCnt_DEFINED
|
|
|
|
#define SkWeakRefCnt_DEFINED
|
|
|
|
|
|
|
|
#include "SkRefCnt.h"
|
2016-04-08 13:58:51 +00:00
|
|
|
#include <atomic>
|
2012-05-16 18:21:56 +00:00
|
|
|
|
|
|
|
/** \class SkWeakRefCnt
|
|
|
|
|
|
|
|
SkWeakRefCnt is the base class for objects that may be shared by multiple
|
|
|
|
objects. When an existing strong owner wants to share a reference, it calls
|
|
|
|
ref(). When a strong owner wants to release its reference, it calls
|
|
|
|
unref(). When the shared object's strong reference count goes to zero as
|
|
|
|
the result of an unref() call, its (virtual) weak_dispose method is called.
|
|
|
|
It is an error for the destructor to be called explicitly (or via the
|
|
|
|
object going out of scope on the stack or calling delete) if
|
|
|
|
getRefCnt() > 1.
|
|
|
|
|
|
|
|
In addition to strong ownership, an owner may instead obtain a weak
|
2013-12-02 19:12:04 +00:00
|
|
|
reference by calling weak_ref(). A call to weak_ref() must be balanced by a
|
2012-05-16 18:21:56 +00:00
|
|
|
call to weak_unref(). To obtain a strong reference from a weak reference,
|
|
|
|
call try_ref(). If try_ref() returns true, the owner's pointer is now also
|
|
|
|
a strong reference on which unref() must be called. Note that this does not
|
|
|
|
affect the original weak reference, weak_unref() must still be called. When
|
|
|
|
the weak reference count goes to zero, the object is deleted. While the
|
|
|
|
weak reference count is positive and the strong reference count is zero the
|
|
|
|
object still exists, but will be in the disposed state. It is up to the
|
|
|
|
object to define what this means.
|
|
|
|
|
|
|
|
Note that a strong reference implicitly implies a weak reference. As a
|
|
|
|
result, it is allowable for the owner of a strong ref to call try_ref().
|
|
|
|
This will have the same effect as calling ref(), but may be more expensive.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
|
|
|
SkWeakRefCnt myRef = strongRef.weak_ref();
|
|
|
|
... // strongRef.unref() may or may not be called
|
|
|
|
if (myRef.try_ref()) {
|
|
|
|
... // use myRef
|
|
|
|
myRef.unref();
|
|
|
|
} else {
|
|
|
|
// myRef is in the disposed state
|
|
|
|
}
|
|
|
|
myRef.weak_unref();
|
|
|
|
*/
|
|
|
|
class SK_API SkWeakRefCnt : public SkRefCnt {
|
|
|
|
public:
|
|
|
|
/** Default construct, initializing the reference counts to 1.
|
|
|
|
The strong references collectively hold one weak reference. When the
|
|
|
|
strong reference count goes to zero, the collectively held weak
|
|
|
|
reference is released.
|
|
|
|
*/
|
|
|
|
SkWeakRefCnt() : SkRefCnt(), fWeakCnt(1) {}
|
|
|
|
|
|
|
|
/** Destruct, asserting that the weak reference count is 1.
|
|
|
|
*/
|
|
|
|
virtual ~SkWeakRefCnt() {
|
2014-07-09 13:04:07 +00:00
|
|
|
#ifdef SK_DEBUG
|
2016-04-08 13:58:51 +00:00
|
|
|
SkASSERT(getWeakCnt() == 1);
|
|
|
|
fWeakCnt.store(0, std::memory_order_relaxed);
|
2014-07-09 13:04:07 +00:00
|
|
|
#endif
|
2012-05-16 18:21:56 +00:00
|
|
|
}
|
|
|
|
|
2014-07-09 13:04:07 +00:00
|
|
|
#ifdef SK_DEBUG
|
2016-04-08 13:58:51 +00:00
|
|
|
/** Return the weak reference count. */
|
|
|
|
int32_t getWeakCnt() const {
|
|
|
|
return fWeakCnt.load(std::memory_order_relaxed);
|
|
|
|
}
|
|
|
|
|
2012-05-16 18:21:56 +00:00
|
|
|
void validate() const {
|
2013-10-02 16:42:21 +00:00
|
|
|
this->INHERITED::validate();
|
2016-04-08 13:58:51 +00:00
|
|
|
SkASSERT(getWeakCnt() > 0);
|
2012-05-16 18:21:56 +00:00
|
|
|
}
|
2013-10-02 16:42:21 +00:00
|
|
|
#endif
|
2012-05-16 18:21:56 +00:00
|
|
|
|
2016-04-08 13:58:51 +00:00
|
|
|
private:
|
|
|
|
/** If fRefCnt is 0, returns 0.
|
|
|
|
* Otherwise increments fRefCnt, acquires, and returns the old value.
|
|
|
|
*/
|
|
|
|
int32_t atomic_conditional_acquire_strong_ref() const {
|
|
|
|
int32_t prev = fRefCnt.load(std::memory_order_relaxed);
|
|
|
|
do {
|
|
|
|
if (0 == prev) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
} while(!fRefCnt.compare_exchange_weak(prev, prev+1, std::memory_order_acquire,
|
|
|
|
std::memory_order_relaxed));
|
|
|
|
return prev;
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
2012-05-16 18:21:56 +00:00
|
|
|
/** Creates a strong reference from a weak reference, if possible. The
|
|
|
|
caller must already be an owner. If try_ref() returns true the owner
|
|
|
|
is in posession of an additional strong reference. Both the original
|
|
|
|
reference and new reference must be properly unreferenced. If try_ref()
|
|
|
|
returns false, no strong reference could be created and the owner's
|
|
|
|
reference is in the same state as before the call.
|
|
|
|
*/
|
|
|
|
bool SK_WARN_UNUSED_RESULT try_ref() const {
|
2016-04-08 13:58:51 +00:00
|
|
|
if (atomic_conditional_acquire_strong_ref() != 0) {
|
2013-12-18 15:27:39 +00:00
|
|
|
// Acquire barrier (L/SL), if not provided above.
|
2012-05-16 18:21:56 +00:00
|
|
|
// Prevents subsequent code from happening before the increment.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** Increment the weak reference count. Must be balanced by a call to
|
|
|
|
weak_unref().
|
|
|
|
*/
|
|
|
|
void weak_ref() const {
|
2016-04-08 13:58:51 +00:00
|
|
|
SkASSERT(getRefCnt() > 0);
|
|
|
|
SkASSERT(getWeakCnt() > 0);
|
|
|
|
// No barrier required.
|
|
|
|
(void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed);
|
2012-05-16 18:21:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/** Decrement the weak reference count. If the weak reference count is 1
|
|
|
|
before the decrement, then call delete on the object. Note that if this
|
|
|
|
is the case, then the object needs to have been allocated via new, and
|
|
|
|
not on the stack.
|
|
|
|
*/
|
|
|
|
void weak_unref() const {
|
2016-04-08 13:58:51 +00:00
|
|
|
SkASSERT(getWeakCnt() > 0);
|
|
|
|
// A release here acts in place of all releases we "should" have been doing in ref().
|
|
|
|
if (1 == fWeakCnt.fetch_add(-1, std::memory_order_acq_rel)) {
|
|
|
|
// Like try_ref(), the acquire is only needed on success, to make sure
|
|
|
|
// code in internal_dispose() doesn't happen before the decrement.
|
2012-05-16 18:21:56 +00:00
|
|
|
#ifdef SK_DEBUG
|
|
|
|
// so our destructor won't complain
|
2016-04-08 13:58:51 +00:00
|
|
|
fWeakCnt.store(1, std::memory_order_relaxed);
|
2012-05-16 18:21:56 +00:00
|
|
|
#endif
|
2013-12-02 19:12:04 +00:00
|
|
|
this->INHERITED::internal_dispose();
|
2012-05-16 18:21:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** Returns true if there are no strong references to the object. When this
|
|
|
|
is the case all future calls to try_ref() will return false.
|
|
|
|
*/
|
|
|
|
bool weak_expired() const {
|
2016-04-08 13:58:51 +00:00
|
|
|
return fRefCnt.load(std::memory_order_relaxed) == 0;
|
2012-05-16 18:21:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
protected:
|
|
|
|
/** Called when the strong reference count goes to zero. This allows the
|
|
|
|
object to free any resources it may be holding. Weak references may
|
|
|
|
still exist and their level of allowed access to the object is defined
|
|
|
|
by the object's class.
|
|
|
|
*/
|
|
|
|
virtual void weak_dispose() const {
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
/** Called when the strong reference count goes to zero. Calls weak_dispose
|
|
|
|
on the object and releases the implicit weak reference held
|
|
|
|
collectively by the strong references.
|
|
|
|
*/
|
2015-03-26 01:17:31 +00:00
|
|
|
void internal_dispose() const override {
|
2012-05-16 18:21:56 +00:00
|
|
|
weak_dispose();
|
|
|
|
weak_unref();
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */
|
2016-04-08 13:58:51 +00:00
|
|
|
mutable std::atomic<int32_t> fWeakCnt;
|
2012-06-21 20:25:03 +00:00
|
|
|
|
|
|
|
typedef SkRefCnt INHERITED;
|
2012-05-16 18:21:56 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
#endif
|