Implement sk_atomic_conditional_inc with sk_atomic_cas.
Now that we have sk_atomic_cas, we can replace all the platform-specific CAS loops with one. BUG=skia: R=bungeman@google.com, mtklein@google.com, reed@google.com Author: mtklein@chromium.org Review URL: https://codereview.chromium.org/300553003 git-svn-id: http://skia.googlecode.com/svn/trunk@14892 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
29239a0f4b
commit
81da061f72
@ -27,12 +27,6 @@ static int32_t sk_atomic_add(int32_t* addr, int32_t inc);
|
||||
*/
|
||||
static int32_t sk_atomic_dec(int32_t* addr);
|
||||
|
||||
/** Atomically adds one to the int referenced by addr iff the referenced int was not 0
|
||||
* and returns the previous value.
|
||||
* No additional memory barrier is required; this must act as a compiler barrier.
|
||||
*/
|
||||
static int32_t sk_atomic_conditional_inc(int32_t* addr);
|
||||
|
||||
/** Atomic compare and set.
|
||||
* If *addr == before, set *addr to after and return true, otherwise return false.
|
||||
* This must act as a release (SL/S) memory barrier and as a compiler barrier.
|
||||
@ -51,6 +45,21 @@ static void sk_membar_acquire__after_atomic_conditional_inc();
|
||||
|
||||
#include SK_ATOMICS_PLATFORM_H
|
||||
|
||||
/** Atomically adds one to the int referenced by addr iff the referenced int was not 0
|
||||
* and returns the previous value.
|
||||
* No additional memory barrier is required; this must act as a compiler barrier.
|
||||
*/
|
||||
static inline int32_t sk_atomic_conditional_inc(int32_t* addr) {
|
||||
int32_t prev;
|
||||
do {
|
||||
prev = *addr;
|
||||
if (0 == prev) {
|
||||
break;
|
||||
}
|
||||
} while (!sk_atomic_cas(addr, prev, prev+1));
|
||||
return prev;
|
||||
}
|
||||
|
||||
/** SK_MUTEX_PLATFORM_H must provide the following (or equivalent) declarations.
|
||||
|
||||
class SkBaseMutex {
|
||||
|
@ -32,18 +32,6 @@ static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomi
|
||||
//android_atomic_acquire_store(0, &dummy);
|
||||
}
|
||||
|
||||
static inline __attribute__((always_inline)) int32_t sk_atomic_conditional_inc(int32_t* addr) {
|
||||
while (true) {
|
||||
int32_t value = *addr;
|
||||
if (value == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (0 == android_atomic_release_cas(value, value + 1, addr)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr,
|
||||
int32_t before,
|
||||
int32_t after) {
|
||||
|
@ -32,12 +32,6 @@ static inline int32_t sk_atomic_dec(int32_t* addr) {
|
||||
|
||||
static inline void sk_membar_acquire__after_atomic_dec() { }
|
||||
|
||||
static inline int32_t sk_atomic_conditional_inc(int32_t* addr) {
|
||||
int32_t value = *addr;
|
||||
if (value != 0) ++*addr;
|
||||
return value;
|
||||
}
|
||||
|
||||
static inline bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after) {
|
||||
if (*addr != before) return false;
|
||||
*addr = after;
|
||||
|
@ -26,24 +26,6 @@ static inline __attribute__((always_inline)) int32_t sk_atomic_dec(int32_t* addr
|
||||
|
||||
static inline __attribute__((always_inline)) void sk_membar_acquire__after_atomic_dec() { }
|
||||
|
||||
static inline __attribute__((always_inline)) int32_t sk_atomic_conditional_inc(int32_t* addr) {
|
||||
int32_t value = *addr;
|
||||
|
||||
while (true) {
|
||||
if (value == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t before = __sync_val_compare_and_swap(addr, value, value + 1);
|
||||
|
||||
if (before == value) {
|
||||
return value;
|
||||
} else {
|
||||
value = before;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static inline __attribute__((always_inline)) bool sk_atomic_cas(int32_t* addr,
|
||||
int32_t before,
|
||||
int32_t after) {
|
||||
|
@ -36,23 +36,6 @@ static inline int32_t sk_atomic_dec(int32_t* addr) {
|
||||
|
||||
static inline void sk_membar_acquire__after_atomic_dec() { }
|
||||
|
||||
static inline int32_t sk_atomic_conditional_inc(int32_t* addr) {
|
||||
long value = *addr;
|
||||
while (true) {
|
||||
if (value == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
long before = _InterlockedCompareExchange(reinterpret_cast<long*>(addr), value + 1, value);
|
||||
|
||||
if (before == value) {
|
||||
return value;
|
||||
} else {
|
||||
value = before;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool sk_atomic_cas(int32_t* addr, int32_t before, int32_t after) {
|
||||
return _InterlockedCompareExchange(reinterpret_cast<long*>(addr), after, before) == before;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user