diff --git a/src/elements.cc b/src/elements.cc index ab16169516..3ef1ac880a 100644 --- a/src/elements.cc +++ b/src/elements.cc @@ -457,9 +457,11 @@ static void SortIndices( // store operations that are safe for concurrent marking. AtomicSlot start(indices->GetFirstElementAddress()); std::sort(start, start + sort_size, - [isolate](Address elementA, Address elementB) { - const Object* a = reinterpret_cast(elementA); - const Object* b = reinterpret_cast(elementB); + [isolate](Tagged_t elementA, Tagged_t elementB) { + // TODO(ishell): revisit the code below + STATIC_ASSERT(kTaggedSize == kSystemPointerSize); + ObjectPtr a(elementA); + ObjectPtr b(elementB); if (a->IsSmi() || !a->IsUndefined(isolate)) { if (!b->IsSmi() && b->IsUndefined(isolate)) { return true; diff --git a/src/feedback-vector-inl.h b/src/feedback-vector-inl.h index 92bed37d7a..2b4b2d8d58 100644 --- a/src/feedback-vector-inl.h +++ b/src/feedback-vector-inl.h @@ -166,7 +166,7 @@ void FeedbackVector::set(int index, MaybeObject value, WriteBarrierMode mode) { DCHECK_GE(index, 0); DCHECK_LT(index, this->length()); int offset = kFeedbackSlotsOffset + index * kPointerSize; - RELAXED_WRITE_FIELD(this, offset, value); + RELAXED_WRITE_WEAK_FIELD(this, offset, value); CONDITIONAL_WEAK_WRITE_BARRIER(this, offset, value, mode); } diff --git a/src/globals.h b/src/globals.h index c1d5f0084f..aa392e6f41 100644 --- a/src/globals.h +++ b/src/globals.h @@ -12,6 +12,7 @@ #include #include "include/v8-internal.h" +#include "src/base/atomic-utils.h" #include "src/base/build_config.h" #include "src/base/flags.h" #include "src/base/logging.h" @@ -207,6 +208,14 @@ constexpr int kTaggedSize = kSystemPointerSize; constexpr int kTaggedSizeLog2 = kSystemPointerSizeLog2; STATIC_ASSERT(kTaggedSize == (1 << kTaggedSizeLog2)); +// These types define raw and atomic storage types for tagged values stored +// on V8 heap. +using Tagged_t = Address; +using AtomicTagged_t = base::AtomicWord; +using AsAtomicTagged = base::AsAtomicPointerImpl; +STATIC_ASSERT(sizeof(Tagged_t) == kTaggedSize); +STATIC_ASSERT(sizeof(AtomicTagged_t) == kTaggedSize); + // TODO(ishell): use kTaggedSize or kSystemPointerSize instead. constexpr int kPointerSize = kSystemPointerSize; constexpr int kPointerSizeLog2 = kSystemPointerSizeLog2; diff --git a/src/objects-inl.h b/src/objects-inl.h index b9d80f556b..fc093c33b4 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -14,7 +14,6 @@ #include "src/objects.h" -#include "src/base/atomicops.h" #include "src/base/bits.h" #include "src/base/tsan.h" #include "src/builtins/builtins.h" @@ -889,11 +888,9 @@ ObjectSlot HeapObject::map_slot() { } MapWord HeapObject::map_word() const { - return MapWord( - reinterpret_cast(RELAXED_READ_FIELD(this, kMapOffset))); + return MapWord(RELAXED_READ_FIELD(this, kMapOffset).ptr()); } - void HeapObject::set_map_word(MapWord map_word) { RELAXED_WRITE_FIELD(this, kMapOffset, reinterpret_cast(map_word.value_)); @@ -1408,7 +1405,7 @@ MaybeObject DescriptorArray::get(int index) const { void DescriptorArray::set(int index, MaybeObject value) { DCHECK(index >= 0 && index < this->length()); - RELAXED_WRITE_FIELD(this, offset(index), value); + RELAXED_WRITE_WEAK_FIELD(this, offset(index), value); WEAK_WRITE_BARRIER(this, offset(index), value); } diff --git a/src/objects.cc b/src/objects.cc index 383bdeeae4..e88a8a2c2d 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -18041,7 +18041,9 @@ int Dictionary::NumberOfEnumerableProperties() { template struct EnumIndexComparator { explicit EnumIndexComparator(Dictionary dict) : dict(dict) {} - bool operator()(Address a, Address b) { + bool operator()(Tagged_t a, Tagged_t b) { + // TODO(ishell): revisit the code below + STATIC_ASSERT(kTaggedSize == kSystemPointerSize); PropertyDetails da(dict->DetailsAt(Smi(a).value())); PropertyDetails db(dict->DetailsAt(Smi(b).value())); return da.dictionary_index() < db.dictionary_index(); diff --git a/src/objects/embedder-data-slot.h b/src/objects/embedder-data-slot.h index df78367912..821406bf92 100644 --- a/src/objects/embedder-data-slot.h +++ b/src/objects/embedder-data-slot.h @@ -30,7 +30,7 @@ class Object; // Storing heap object through this slot may require triggering write barriers // so this operation must be done via static store_tagged() methods. class EmbedderDataSlot - : public SlotBase { + : public SlotBase { public: EmbedderDataSlot() : SlotBase(kNullAddress) {} V8_INLINE EmbedderDataSlot(EmbedderDataArray array, int entry_index); diff --git a/src/objects/fixed-array-inl.h b/src/objects/fixed-array-inl.h index 39f10855c0..7f8ad2f4ed 100644 --- a/src/objects/fixed-array-inl.h +++ b/src/objects/fixed-array-inl.h @@ -277,7 +277,7 @@ void WeakFixedArray::Set(int index, MaybeObject value) { DCHECK_GE(index, 0); DCHECK_LT(index, length()); int offset = OffsetOfElementAt(index); - RELAXED_WRITE_FIELD(this, offset, value); + RELAXED_WRITE_WEAK_FIELD(this, offset, value); WEAK_WRITE_BARRIER(this, offset, value); } @@ -285,7 +285,7 @@ void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) { DCHECK_GE(index, 0); DCHECK_LT(index, length()); int offset = OffsetOfElementAt(index); - RELAXED_WRITE_FIELD(this, offset, value); + RELAXED_WRITE_WEAK_FIELD(this, offset, value); CONDITIONAL_WEAK_WRITE_BARRIER(this, offset, value, mode); } @@ -306,7 +306,7 @@ void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) { DCHECK_GE(index, 0); DCHECK_LT(index, this->capacity()); int offset = OffsetOfElementAt(index); - RELAXED_WRITE_FIELD(this, offset, value); + RELAXED_WRITE_WEAK_FIELD(this, offset, value); CONDITIONAL_WEAK_WRITE_BARRIER(this, offset, value, mode); } diff --git a/src/objects/heap-object-inl.h b/src/objects/heap-object-inl.h index 6ca2ffc19a..bd00a27eb7 100644 --- a/src/objects/heap-object-inl.h +++ b/src/objects/heap-object-inl.h @@ -118,8 +118,7 @@ ObjectSlot HeapObjectPtr::map_slot() { } MapWord HeapObjectPtr::map_word() const { - return MapWord( - reinterpret_cast
(RELAXED_READ_FIELD(this, kMapOffset))); + return MapWord(RELAXED_READ_FIELD(this, kMapOffset).ptr()); } void HeapObjectPtr::set_map_word(MapWord map_word) { diff --git a/src/objects/object-macros.h b/src/objects/object-macros.h index f013878945..c6c2638177 100644 --- a/src/objects/object-macros.h +++ b/src/objects/object-macros.h @@ -296,49 +296,39 @@ #define FIELD_ADDR(p, offset) ((p)->ptr() + offset - kHeapObjectTag) -#define READ_FIELD(p, offset) \ - (*reinterpret_cast(FIELD_ADDR(p, offset))) +#define READ_FIELD(p, offset) (*ObjectSlot(FIELD_ADDR(p, offset))) -#define READ_WEAK_FIELD(p, offset) \ - MaybeObject(*reinterpret_cast(FIELD_ADDR(p, offset))) +#define READ_WEAK_FIELD(p, offset) (*MaybeObjectSlot(FIELD_ADDR(p, offset))) -#define ACQUIRE_READ_FIELD(p, offset) \ - reinterpret_cast(base::Acquire_Load( \ - reinterpret_cast(FIELD_ADDR(p, offset)))) +#define ACQUIRE_READ_FIELD(p, offset) \ + ObjectSlot(FIELD_ADDR(p, offset)).Acquire_Load1() -#define RELAXED_READ_FIELD(p, offset) \ - reinterpret_cast(base::Relaxed_Load( \ - reinterpret_cast(FIELD_ADDR(p, offset)))) +#define RELAXED_READ_FIELD(p, offset) \ + ObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Load() #define RELAXED_READ_WEAK_FIELD(p, offset) \ - MaybeObject(base::Relaxed_Load( \ - reinterpret_cast(FIELD_ADDR(p, offset)))) + MaybeObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Load() #ifdef V8_CONCURRENT_MARKING -#define WRITE_FIELD(p, offset, value) \ - base::Relaxed_Store( \ - reinterpret_cast(FIELD_ADDR(p, offset)), \ - static_cast((value)->ptr())); -#define WRITE_WEAK_FIELD(p, offset, value) \ - base::Relaxed_Store( \ - reinterpret_cast(FIELD_ADDR(p, offset)), \ - static_cast(value.ptr())); +#define WRITE_FIELD(p, offset, value) \ + ObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Store1(value) +#define WRITE_WEAK_FIELD(p, offset, value) \ + MaybeObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Store(value) #else #define WRITE_FIELD(p, offset, value) \ - (*reinterpret_cast(FIELD_ADDR(p, offset)) = value) + ObjectSlot(FIELD_ADDR(p, offset)).store(value) #define WRITE_WEAK_FIELD(p, offset, value) \ - (*reinterpret_cast(FIELD_ADDR(p, offset)) = value.ptr()) + MaybeObjectSlot(FIELD_ADDR(p, offset)).store(value) #endif -#define RELEASE_WRITE_FIELD(p, offset, value) \ - base::Release_Store( \ - reinterpret_cast(FIELD_ADDR(p, offset)), \ - static_cast((value)->ptr())); +#define RELEASE_WRITE_FIELD(p, offset, value) \ + ObjectSlot(FIELD_ADDR(p, offset)).Release_Store1(value) -#define RELAXED_WRITE_FIELD(p, offset, value) \ - base::Relaxed_Store( \ - reinterpret_cast(FIELD_ADDR(p, offset)), \ - static_cast((value)->ptr())); +#define RELAXED_WRITE_FIELD(p, offset, value) \ + ObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Store1(value) + +#define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \ + MaybeObjectSlot(FIELD_ADDR(p, offset)).Relaxed_Store(value) #define WRITE_BARRIER(object, offset, value) \ do { \ diff --git a/src/objects/slots-atomic-inl.h b/src/objects/slots-atomic-inl.h index 5150705311..67385824c8 100644 --- a/src/objects/slots-atomic-inl.h +++ b/src/objects/slots-atomic-inl.h @@ -18,41 +18,40 @@ namespace internal { // FixedArray array; // AtomicSlot start(array->GetFirstElementAddress()); // std::sort(start, start + given_length, -// [](Address a, Address b) { +// [](Tagged_t a, Tagged_t b) { +// // Decompress a and b if necessary. // return my_comparison(a, b); // }); // Note how the comparator operates on Address values, representing the raw // data found at the given heap location, so you probably want to construct // an Object from it. -class AtomicSlot : public SlotBase { +class AtomicSlot : public SlotBase { public: // This class is a stand-in for "Address&" that uses custom atomic // read/write operations for the actual memory accesses. class Reference { public: - explicit Reference(Address* address) : address_(address) {} + explicit Reference(Tagged_t* address) : address_(address) {} Reference(const Reference& other) : address_(other.address_) {} Reference& operator=(const Reference& other) { - base::AsAtomicWord::Relaxed_Store( - address_, base::AsAtomicWord::Relaxed_Load(other.address_)); + AsAtomicTagged::Relaxed_Store( + address_, AsAtomicTagged::Relaxed_Load(other.address_)); return *this; } - Reference& operator=(Address value) { - base::AsAtomicWord::Relaxed_Store(address_, value); + Reference& operator=(Tagged_t value) { + AsAtomicTagged::Relaxed_Store(address_, value); return *this; } // Values of type AtomicSlot::reference must be implicitly convertible // to AtomicSlot::value_type. - operator Address() const { - return base::AsAtomicWord::Relaxed_Load(address_); - } + operator Tagged_t() const { return AsAtomicTagged::Relaxed_Load(address_); } void swap(Reference& other) { Address tmp = value(); - base::AsAtomicWord::Relaxed_Store(address_, other.value()); - base::AsAtomicWord::Relaxed_Store(other.address_, tmp); + AsAtomicTagged::Relaxed_Store(address_, other.value()); + AsAtomicTagged::Relaxed_Store(other.address_, tmp); } bool operator<(const Reference& other) const { @@ -64,15 +63,15 @@ class AtomicSlot : public SlotBase { } private: - Address value() const { return base::AsAtomicWord::Relaxed_Load(address_); } + Address value() const { return AsAtomicTagged::Relaxed_Load(address_); } - Address* address_; + Tagged_t* address_; }; // The rest of this class follows C++'s "RandomAccessIterator" requirements. // Most of the heavy lifting is inherited from SlotBase. typedef int difference_type; - typedef Address value_type; + typedef Tagged_t value_type; typedef Reference reference; typedef void* pointer; // Must be present, but should not be used. typedef std::random_access_iterator_tag iterator_category; @@ -82,16 +81,16 @@ class AtomicSlot : public SlotBase { explicit AtomicSlot(ObjectSlot slot) : SlotBase(slot.address()) {} Reference operator*() const { - return Reference(reinterpret_cast(address())); + return Reference(reinterpret_cast(address())); } Reference operator[](difference_type i) const { - return Reference(reinterpret_cast(address() + i * kPointerSize)); + return Reference(reinterpret_cast(address() + i * kTaggedSize)); } friend void swap(Reference lhs, Reference rhs) { lhs.swap(rhs); } friend difference_type operator-(AtomicSlot a, AtomicSlot b) { - return static_cast(a.address() - b.address()) / kPointerSize; + return static_cast(a.address() - b.address()) / kTaggedSize; } }; diff --git a/src/objects/slots-inl.h b/src/objects/slots-inl.h index 4135a465c8..8ce7354afe 100644 --- a/src/objects/slots-inl.h +++ b/src/objects/slots-inl.h @@ -18,49 +18,69 @@ namespace internal { ObjectSlot::ObjectSlot(ObjectPtr* object) : SlotBase(reinterpret_cast
(&object->ptr_)) {} -void ObjectSlot::store(Object* value) { *location() = value->ptr(); } +void ObjectSlot::store(Object* value) const { *location() = value->ptr(); } ObjectPtr ObjectSlot::Acquire_Load() const { - return ObjectPtr(base::AsAtomicWord::Acquire_Load(location())); + return ObjectPtr(AsAtomicTagged::Acquire_Load(location())); +} + +Object* ObjectSlot::Acquire_Load1() const { + return reinterpret_cast(AsAtomicTagged::Acquire_Load(location())); } ObjectPtr ObjectSlot::Relaxed_Load() const { - return ObjectPtr(base::AsAtomicWord::Relaxed_Load(location())); + return ObjectPtr(AsAtomicTagged::Relaxed_Load(location())); } void ObjectSlot::Relaxed_Store(ObjectPtr value) const { - base::AsAtomicWord::Relaxed_Store(location(), value->ptr()); + AsAtomicTagged::Relaxed_Store(location(), value->ptr()); +} + +void ObjectSlot::Relaxed_Store1(Object* value) const { + AsAtomicTagged::Relaxed_Store(location(), value->ptr()); +} + +void ObjectSlot::Release_Store1(Object* value) const { + AsAtomicTagged::Release_Store(location(), value->ptr()); } void ObjectSlot::Release_Store(ObjectPtr value) const { - base::AsAtomicWord::Release_Store(location(), value->ptr()); + AsAtomicTagged::Release_Store(location(), value->ptr()); } ObjectPtr ObjectSlot::Release_CompareAndSwap(ObjectPtr old, ObjectPtr target) const { - Address result = base::AsAtomicWord::Release_CompareAndSwap( + Address result = AsAtomicTagged::Release_CompareAndSwap( location(), old->ptr(), target->ptr()); return ObjectPtr(result); } -MaybeObject MaybeObjectSlot::operator*() { return MaybeObject(*location()); } +MaybeObject MaybeObjectSlot::operator*() const { + return MaybeObject(*location()); +} -void MaybeObjectSlot::store(MaybeObject value) { *location() = value.ptr(); } +void MaybeObjectSlot::store(MaybeObject value) const { + *location() = value.ptr(); +} MaybeObject MaybeObjectSlot::Relaxed_Load() const { - return MaybeObject(base::AsAtomicWord::Relaxed_Load(location())); + return MaybeObject(AsAtomicTagged::Relaxed_Load(location())); +} + +void MaybeObjectSlot::Relaxed_Store(MaybeObject value) const { + AsAtomicTagged::Relaxed_Store(location(), value->ptr()); } void MaybeObjectSlot::Release_CompareAndSwap(MaybeObject old, MaybeObject target) const { - base::AsAtomicWord::Release_CompareAndSwap(location(), old.ptr(), - target.ptr()); + AsAtomicTagged::Release_CompareAndSwap(location(), old.ptr(), target.ptr()); } -HeapObjectReference HeapObjectSlot::operator*() { +HeapObjectReference HeapObjectSlot::operator*() const { return HeapObjectReference(*location()); } -void HeapObjectSlot::store(HeapObjectReference value) { + +void HeapObjectSlot::store(HeapObjectReference value) const { *location() = value.ptr(); } diff --git a/src/objects/slots.h b/src/objects/slots.h index 2f022cdcc1..5136052c07 100644 --- a/src/objects/slots.h +++ b/src/objects/slots.h @@ -12,9 +12,12 @@ namespace internal { class ObjectPtr; -template +template class SlotBase { public: + using TData = Data; + // TODO(ishell): This should eventually become just sizeof(TData) once + // pointer compression is implemented. static constexpr size_t kSlotDataSize = SlotDataSize; Subclass& operator++() { // Prefix increment. @@ -65,7 +68,7 @@ class SlotBase { Address address() const { return ptr_; } // For symmetry with Handle. - Address* location() const { return reinterpret_cast(ptr_); } + TData* location() const { return reinterpret_cast(ptr_); } protected: STATIC_ASSERT(IsAligned(kSlotDataSize, kTaggedSize)); @@ -80,11 +83,11 @@ class SlotBase { Address ptr_; }; -// An ObjectSlot instance describes a pointer-sized field ("slot") holding +// An ObjectSlot instance describes a kTaggedSize-sized field ("slot") holding // a tagged pointer (smi or heap object). // Its address() is the address of the slot. // The slot's contents can be read and written using operator* and store(). -class ObjectSlot : public SlotBase { +class ObjectSlot : public SlotBase { public: ObjectSlot() : SlotBase(kNullAddress) {} explicit ObjectSlot(Address ptr) : SlotBase(ptr) {} @@ -94,11 +97,11 @@ class ObjectSlot : public SlotBase { explicit ObjectSlot(Object const* const* ptr) : SlotBase(reinterpret_cast
(ptr)) {} template - explicit ObjectSlot(SlotBase slot) + explicit ObjectSlot(SlotBase slot) : SlotBase(slot.address()) {} Object* operator*() const { return *reinterpret_cast(address()); } - inline void store(Object* value); + inline void store(Object* value) const; inline ObjectPtr Acquire_Load() const; inline ObjectPtr Relaxed_Load() const; @@ -106,51 +109,60 @@ class ObjectSlot : public SlotBase { inline void Release_Store(ObjectPtr value) const; inline ObjectPtr Release_CompareAndSwap(ObjectPtr old, ObjectPtr target) const; + // Old-style alternative for the above, temporarily separate to allow + // incremental transition. + // TODO(3770): Get rid of the duplication when the migration is complete. + inline Object* Acquire_Load1() const; + inline void Relaxed_Store1(Object* value) const; + inline void Release_Store1(Object* value) const; }; -// A MaybeObjectSlot instance describes a pointer-sized field ("slot") holding -// a possibly-weak tagged pointer (think: MaybeObject). +// A MaybeObjectSlot instance describes a kTaggedSize-sized field ("slot") +// holding a possibly-weak tagged pointer (think: MaybeObject). // Its address() is the address of the slot. // The slot's contents can be read and written using operator* and store(). -class MaybeObjectSlot : public SlotBase { +class MaybeObjectSlot + : public SlotBase { public: explicit MaybeObjectSlot(Address ptr) : SlotBase(ptr) {} explicit MaybeObjectSlot(Object** ptr) : SlotBase(reinterpret_cast
(ptr)) {} template - explicit MaybeObjectSlot(SlotBase slot) + explicit MaybeObjectSlot(SlotBase slot) : SlotBase(slot.address()) {} - inline MaybeObject operator*(); - inline void store(MaybeObject value); + inline MaybeObject operator*() const; + inline void store(MaybeObject value) const; inline MaybeObject Relaxed_Load() const; + inline void Relaxed_Store(MaybeObject value) const; inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const; }; -// A HeapObjectSlot instance describes a pointer-sized field ("slot") holding -// a weak or strong pointer to a heap object (think: HeapObjectReference). +// A HeapObjectSlot instance describes a kTaggedSize-sized field ("slot") +// holding a weak or strong pointer to a heap object (think: +// HeapObjectReference). // Its address() is the address of the slot. // The slot's contents can be read and written using operator* and store(). // In case it is known that that slot contains a strong heap object pointer, // ToHeapObject() can be used to retrieve that heap object. -class HeapObjectSlot : public SlotBase { +class HeapObjectSlot : public SlotBase { public: HeapObjectSlot() : SlotBase(kNullAddress) {} explicit HeapObjectSlot(Address ptr) : SlotBase(ptr) {} template - explicit HeapObjectSlot(SlotBase slot) + explicit HeapObjectSlot(SlotBase slot) : SlotBase(slot.address()) {} - inline HeapObjectReference operator*(); - inline void store(HeapObjectReference value); + inline HeapObjectReference operator*() const; + inline void store(HeapObjectReference value) const; - HeapObject* ToHeapObject() { + HeapObject* ToHeapObject() const { DCHECK((*location() & kHeapObjectTagMask) == kHeapObjectTag); return reinterpret_cast(*location()); } - void StoreHeapObject(HeapObject* value) { + void StoreHeapObject(HeapObject* value) const { *reinterpret_cast(address()) = value; } };