[ubsan] Introduce ObjectPtr and port PropertyArray
This CL gives a first look at the new way to represent tagged object pointers in C++. It adds infrastructure in Handles and the garbage collector to deal with the new object type, and ports a first class to the new world. Design overview: https://goo.gl/Ph4CGz Bug: v8:3770 Change-Id: I3e37fbf399612f95540cb386710a595069fb9d55 Reviewed-on: https://chromium-review.googlesource.com/c/1292673 Reviewed-by: Michael Starzinger <mstarzinger@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Commit-Queue: Jakob Kummerow <jkummerow@chromium.org> Cr-Commit-Position: refs/heads/master@{#56964}
This commit is contained in:
parent
e76e44f088
commit
76968a2ff3
4
BUILD.gn
4
BUILD.gn
@ -880,6 +880,8 @@ action("postmortem-metadata") {
|
||||
"src/objects/data-handler-inl.h",
|
||||
"src/objects/fixed-array-inl.h",
|
||||
"src/objects/fixed-array.h",
|
||||
"src/objects/heap-object-inl.h",
|
||||
"src/objects/heap-object.h",
|
||||
"src/objects/js-array-inl.h",
|
||||
"src/objects/js-array.h",
|
||||
"src/objects/js-array-buffer-inl.h",
|
||||
@ -2212,6 +2214,8 @@ v8_source_set("v8_base") {
|
||||
"src/objects/frame-array.h",
|
||||
"src/objects/hash-table-inl.h",
|
||||
"src/objects/hash-table.h",
|
||||
"src/objects/heap-object-inl.h",
|
||||
"src/objects/heap-object.h",
|
||||
"src/objects/intl-objects.cc",
|
||||
"src/objects/intl-objects.h",
|
||||
"src/objects/js-array-buffer-inl.h",
|
||||
|
@ -190,6 +190,7 @@ struct MachineRepresentationOf<
|
||||
template <class T>
|
||||
struct is_valid_type_tag {
|
||||
static const bool value = std::is_base_of<Object, T>::value ||
|
||||
std::is_base_of<ObjectPtr, T>::value ||
|
||||
std::is_base_of<UntaggedT, T>::value ||
|
||||
std::is_base_of<MaybeObject, T>::value ||
|
||||
std::is_same<ExternalReference, T>::value;
|
||||
@ -314,9 +315,15 @@ typedef ZoneVector<CodeAssemblerVariable*> CodeAssemblerVariableList;
|
||||
|
||||
typedef std::function<void()> CodeAssemblerCallback;
|
||||
|
||||
// TODO(3770): The HeapObject/HeapObjectPtr dance is temporary (while the
|
||||
// incremental transition is in progress, we want to pretend that subclasses
|
||||
// of HeapObjectPtr are also subclasses of Object/HeapObject); it can be
|
||||
// removed when the migration is complete.
|
||||
template <class T, class U>
|
||||
struct is_subtype {
|
||||
static const bool value = std::is_base_of<U, T>::value;
|
||||
static const bool value = std::is_base_of<U, T>::value ||
|
||||
(std::is_base_of<U, HeapObject>::value &&
|
||||
std::is_base_of<HeapObjectPtr, T>::value);
|
||||
};
|
||||
template <class T1, class T2, class U>
|
||||
struct is_subtype<UnionT<T1, T2>, U> {
|
||||
@ -395,6 +402,7 @@ struct types_have_common_values<MaybeObject, T> {
|
||||
// TNode<T> is an SSA value with the static type tag T, which is one of the
|
||||
// following:
|
||||
// - a subclass of internal::Object represents a tagged type
|
||||
// - a subclass of internal::ObjectPtr represents a tagged type
|
||||
// - a subclass of internal::UntaggedT represents an untagged type
|
||||
// - ExternalReference
|
||||
// - PairT<T1, T2> for an operation returning two values, with types T1
|
||||
@ -630,7 +638,8 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
|
||||
static_assert(types_have_common_values<A, PreviousType>::value,
|
||||
"Incompatible types: this cast can never succeed.");
|
||||
static_assert(std::is_convertible<TNode<A>, TNode<Object>>::value,
|
||||
static_assert(std::is_convertible<TNode<A>, TNode<Object>>::value ||
|
||||
std::is_convertible<TNode<A>, TNode<ObjectPtr>>::value,
|
||||
"Coercion to untagged values cannot be "
|
||||
"checked.");
|
||||
static_assert(
|
||||
|
@ -1545,6 +1545,10 @@ V8_INLINE static bool HasWeakHeapObjectTag(const Object* value) {
|
||||
kWeakHeapObjectTag);
|
||||
}
|
||||
|
||||
V8_INLINE static bool HasWeakHeapObjectTag(const Address value) {
|
||||
return (value & kHeapObjectTagMask) == kWeakHeapObjectTag;
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsClearedWeakHeapObject(const MaybeObject* value) {
|
||||
return reinterpret_cast<intptr_t>(value) == kClearedWeakHeapObject;
|
||||
}
|
||||
|
@ -17,10 +17,16 @@ HandleBase::HandleBase(Address object, Isolate* isolate)
|
||||
|
||||
// Allocate a new handle for the object, do not canonicalize.
|
||||
template <typename T>
|
||||
template <typename T1, typename>
|
||||
Handle<T> Handle<T>::New(T* object, Isolate* isolate) {
|
||||
return Handle(reinterpret_cast<T**>(
|
||||
HandleScope::CreateHandle(isolate, reinterpret_cast<Address>(object))));
|
||||
}
|
||||
template <typename T>
|
||||
template <typename T1, typename>
|
||||
Handle<T> Handle<T>::New(T object, Isolate* isolate) {
|
||||
return Handle(HandleScope::CreateHandle(isolate, object.ptr()));
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
template <typename S>
|
||||
@ -38,14 +44,27 @@ HandleScope::HandleScope(Isolate* isolate) {
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
template <typename T1, typename>
|
||||
Handle<T>::Handle(T* object, Isolate* isolate)
|
||||
: HandleBase(reinterpret_cast<Address>(object), isolate) {}
|
||||
|
||||
template <typename T>
|
||||
template <typename T1, typename>
|
||||
Handle<T>::Handle(T object, Isolate* isolate)
|
||||
: HandleBase(object.ptr(), isolate) {}
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T>::value>::type>
|
||||
V8_INLINE Handle<T> handle(T* object, Isolate* isolate) {
|
||||
return Handle<T>(object, isolate);
|
||||
}
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T>::value>::type>
|
||||
V8_INLINE Handle<T> handle(T object, Isolate* isolate) {
|
||||
return Handle<T>(object, isolate);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline std::ostream& operator<<(std::ostream& os, Handle<T> handle) {
|
||||
return os << Brief(*handle);
|
||||
|
@ -12,6 +12,11 @@
|
||||
#include "src/base/macros.h"
|
||||
#include "src/checks.h"
|
||||
#include "src/globals.h"
|
||||
// TODO(3770): The objects.h include is required to make the
|
||||
// std::enable_if<std::is_base_of<...>> conditions below work. Once the
|
||||
// migration is complete, we should be able to get by with just forward
|
||||
// declarations.
|
||||
#include "src/objects.h"
|
||||
#include "src/zone/zone.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -23,7 +28,7 @@ class HandleScopeImplementer;
|
||||
class Isolate;
|
||||
template <typename T>
|
||||
class MaybeHandle;
|
||||
class Object;
|
||||
class ObjectPtr;
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Base class for Handle instantiations. Don't use directly.
|
||||
@ -105,14 +110,29 @@ class Handle final : public HandleBase {
|
||||
}
|
||||
V8_INLINE explicit Handle(Address* location) : HandleBase(location) {
|
||||
// Type check:
|
||||
static_assert(std::is_convertible<T*, Object*>::value,
|
||||
static_assert(std::is_convertible<T*, Object*>::value ||
|
||||
std::is_convertible<T, ObjectPtr>::value,
|
||||
"static type violation");
|
||||
}
|
||||
|
||||
// Here and below: for object types T that still derive from Object,
|
||||
// enable the overloads that consume/produce a T*; for types already
|
||||
// ported to deriving from ObjectPtr, use non-pointer T values.
|
||||
// TODO(3770): The T* versions should disappear eventually.
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T1>::value>::type>
|
||||
V8_INLINE Handle(T* object, Isolate* isolate);
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T1>::value>::type>
|
||||
V8_INLINE Handle(T object, Isolate* isolate);
|
||||
|
||||
// Allocate a new handle for the object, do not canonicalize.
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T1>::value>::type>
|
||||
V8_INLINE static Handle<T> New(T* object, Isolate* isolate);
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T1>::value>::type>
|
||||
V8_INLINE static Handle<T> New(T object, Isolate* isolate);
|
||||
|
||||
// Constructor for handling automatic up casting.
|
||||
// Ex. Handle<JSFunction> can be passed when Handle<Object> is expected.
|
||||
@ -120,12 +140,34 @@ class Handle final : public HandleBase {
|
||||
std::is_convertible<S*, T*>::value>::type>
|
||||
V8_INLINE Handle(Handle<S> handle) : HandleBase(handle) {}
|
||||
|
||||
V8_INLINE T* operator->() const { return operator*(); }
|
||||
// The NeverReadOnlySpaceObject special-case is needed for the
|
||||
// ContextFromNeverReadOnlySpaceObject helper function in api.cc.
|
||||
template <typename T1 = T,
|
||||
typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T1>::value ||
|
||||
std::is_base_of<NeverReadOnlySpaceObject, T1>::value>::type>
|
||||
V8_INLINE T* operator->() const {
|
||||
return operator*();
|
||||
}
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T1>::value>::type>
|
||||
V8_INLINE T operator->() const {
|
||||
return operator*();
|
||||
}
|
||||
|
||||
// Provides the C++ dereference operator.
|
||||
template <typename T1 = T,
|
||||
typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T1>::value ||
|
||||
std::is_base_of<NeverReadOnlySpaceObject, T1>::value>::type>
|
||||
V8_INLINE T* operator*() const {
|
||||
return reinterpret_cast<T*>(HandleBase::operator*());
|
||||
}
|
||||
template <typename T1 = T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T1>::value>::type>
|
||||
V8_INLINE T operator*() const {
|
||||
return T::cast(ObjectPtr(HandleBase::operator*()));
|
||||
}
|
||||
|
||||
// Returns the address to where the raw pointer is stored.
|
||||
V8_INLINE T** location() const {
|
||||
|
@ -92,11 +92,18 @@ class ConcurrentMarkingVisitor final
|
||||
task_id_(task_id),
|
||||
embedder_tracing_enabled_(embedder_tracing_enabled) {}
|
||||
|
||||
template <typename T>
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T>::value>::type>
|
||||
static V8_INLINE T* Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T>::value>::type>
|
||||
static V8_INLINE T Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
bool ShouldVisit(HeapObject* object) {
|
||||
return marking_state_.GreyToBlack(object);
|
||||
}
|
||||
|
@ -1934,7 +1934,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
|
||||
|
||||
// Update properties if necessary.
|
||||
if (source->HasFastProperties()) {
|
||||
PropertyArray* properties = source->property_array();
|
||||
PropertyArray properties = source->property_array();
|
||||
if (properties->length() > 0) {
|
||||
// TODO(gsathya): Do not copy hash code.
|
||||
Handle<PropertyArray> prop = CopyArrayWithMap(
|
||||
@ -1952,12 +1952,12 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
|
||||
|
||||
namespace {
|
||||
template <typename T>
|
||||
void initialize_length(T* array, int length) {
|
||||
void initialize_length(Handle<T> array, int length) {
|
||||
array->set_length(length);
|
||||
}
|
||||
|
||||
template <>
|
||||
void initialize_length<PropertyArray>(PropertyArray* array, int length) {
|
||||
void initialize_length<PropertyArray>(Handle<PropertyArray> array, int length) {
|
||||
array->initialize_length(length);
|
||||
}
|
||||
|
||||
@ -1969,7 +1969,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
|
||||
HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED);
|
||||
obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
|
||||
|
||||
T* result = T::cast(obj);
|
||||
Handle<T> result(T::cast(obj), isolate());
|
||||
DisallowHeapAllocation no_gc;
|
||||
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
|
||||
|
||||
@ -1983,7 +1983,7 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
|
||||
initialize_length(result, len);
|
||||
for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
|
||||
}
|
||||
return Handle<T>(result, isolate());
|
||||
return result;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
@ -1996,7 +1996,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
|
||||
HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
|
||||
|
||||
T* result = T::cast(obj);
|
||||
Handle<T> result(T::cast(obj), isolate());
|
||||
initialize_length(result, new_len);
|
||||
|
||||
// Copy the content.
|
||||
@ -2004,7 +2004,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
|
||||
WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
|
||||
for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode);
|
||||
MemsetPointer(result->data_start() + old_len, *undefined_value(), grow_by);
|
||||
return Handle<T>(result, isolate());
|
||||
return result;
|
||||
}
|
||||
|
||||
Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
|
||||
|
@ -373,6 +373,21 @@ bool Heap::InNewSpace(HeapObject* heap_object) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InNewSpace(HeapObjectPtr heap_object) {
|
||||
bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
|
||||
#ifdef DEBUG
|
||||
// If in NEW_SPACE, then check we're either not in the middle of GC or the
|
||||
// object is in to-space.
|
||||
if (result) {
|
||||
// If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
|
||||
Heap* heap = Heap::FromWritableHeapObject(&heap_object);
|
||||
DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(Object* object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
@ -408,6 +423,11 @@ bool Heap::InToSpace(HeapObject* heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(HeapObjectPtr heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
|
||||
}
|
||||
|
||||
bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
|
||||
|
||||
bool Heap::InReadOnlySpace(Object* object) {
|
||||
@ -435,6 +455,19 @@ Heap* Heap::FromWritableHeapObject(const HeapObject* obj) {
|
||||
return heap;
|
||||
}
|
||||
|
||||
// static
|
||||
Heap* Heap::FromWritableHeapObject(const HeapObjectPtr* obj) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*obj);
|
||||
// RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
|
||||
// find a heap. The exception is when the ReadOnlySpace is writeable, during
|
||||
// bootstrapping, so explicitly allow this case.
|
||||
SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE ||
|
||||
static_cast<ReadOnlySpace*>(chunk->owner())->writable());
|
||||
Heap* heap = chunk->heap();
|
||||
SLOW_DCHECK(heap != nullptr);
|
||||
return heap;
|
||||
}
|
||||
|
||||
bool Heap::ShouldBePromoted(Address old_address) {
|
||||
Page* page = Page::FromAddress(old_address);
|
||||
Address age_mark = new_space_->age_mark();
|
||||
|
@ -12,6 +12,7 @@
|
||||
|
||||
#include "src/globals.h"
|
||||
#include "src/objects-inl.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
#include "src/objects/maybe-object-inl.h"
|
||||
#include "src/objects/slots.h"
|
||||
|
||||
@ -102,6 +103,16 @@ inline void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
heap_internals::GenerationalBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void GenerationalBarrierForElements(Heap* heap, FixedArray* array,
|
||||
int offset, int length) {
|
||||
heap_internals::MemoryChunk* array_chunk =
|
||||
@ -135,6 +146,16 @@ inline void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
heap_internals::MarkingBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void MarkingBarrierForElements(Heap* heap, HeapObject* object) {
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
|
@ -5,6 +5,8 @@
|
||||
#ifndef V8_HEAP_HEAP_WRITE_BARRIER_H_
|
||||
#define V8_HEAP_HEAP_WRITE_BARRIER_H_
|
||||
|
||||
#include "include/v8-internal.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
@ -12,6 +14,7 @@ class Code;
|
||||
class FixedArray;
|
||||
class Heap;
|
||||
class HeapObject;
|
||||
class HeapObjectPtr;
|
||||
class MaybeObject;
|
||||
class MaybeObjectSlot;
|
||||
class Object;
|
||||
@ -36,6 +39,11 @@ void WriteBarrierForCode(Code* host);
|
||||
void GenerationalBarrier(HeapObject* object, ObjectSlot slot, Object* value);
|
||||
void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject* value);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
|
||||
void GenerationalBarrierForElements(Heap* heap, FixedArray* array, int offset,
|
||||
int length);
|
||||
void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo,
|
||||
@ -45,6 +53,11 @@ void GenerationalBarrierForCode(Code* host, RelocInfo* rinfo,
|
||||
void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value);
|
||||
void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject* value);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
|
||||
void MarkingBarrierForElements(Heap* heap, HeapObject* object);
|
||||
void MarkingBarrierForCode(Code* host, RelocInfo* rinfo, HeapObject* object);
|
||||
|
||||
|
@ -61,6 +61,7 @@ class GCIdleTimeHeapState;
|
||||
class GCTracer;
|
||||
class HeapController;
|
||||
class HeapObjectAllocationTracker;
|
||||
class HeapObjectPtr;
|
||||
class HeapObjectsFilter;
|
||||
class HeapStats;
|
||||
class HistogramTimer;
|
||||
@ -904,12 +905,14 @@ class Heap {
|
||||
static inline bool InNewSpace(Object* object);
|
||||
static inline bool InNewSpace(MaybeObject* object);
|
||||
static inline bool InNewSpace(HeapObject* heap_object);
|
||||
static inline bool InNewSpace(HeapObjectPtr heap_object);
|
||||
static inline bool InFromSpace(Object* object);
|
||||
static inline bool InFromSpace(MaybeObject* object);
|
||||
static inline bool InFromSpace(HeapObject* heap_object);
|
||||
static inline bool InToSpace(Object* object);
|
||||
static inline bool InToSpace(MaybeObject* object);
|
||||
static inline bool InToSpace(HeapObject* heap_object);
|
||||
static inline bool InToSpace(HeapObjectPtr heap_object);
|
||||
|
||||
// Returns whether the object resides in old space.
|
||||
inline bool InOldSpace(Object* object);
|
||||
@ -935,6 +938,11 @@ class Heap {
|
||||
// Find the heap which owns this HeapObject. Should never be called for
|
||||
// objects in RO space.
|
||||
static inline Heap* FromWritableHeapObject(const HeapObject* obj);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
static inline Heap* FromWritableHeapObject(const HeapObjectPtr* obj);
|
||||
|
||||
// ===========================================================================
|
||||
// Object statistics tracking. ===============================================
|
||||
|
@ -473,7 +473,7 @@ void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
|
||||
if (boilerplate->HasFastProperties()) {
|
||||
// We'll mis-classify the empty_property_array here. Given that there is a
|
||||
// single instance, this is negligible.
|
||||
PropertyArray* properties = boilerplate->property_array();
|
||||
PropertyArray properties = boilerplate->property_array();
|
||||
RecordSimpleVirtualObjectStats(
|
||||
site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
|
||||
} else {
|
||||
@ -535,7 +535,7 @@ void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject* object) {
|
||||
|
||||
// Properties.
|
||||
if (object->HasFastProperties()) {
|
||||
PropertyArray* properties = object->property_array();
|
||||
PropertyArray properties = object->property_array();
|
||||
CHECK_EQ(PROPERTY_ARRAY_TYPE, properties->map()->instance_type());
|
||||
} else {
|
||||
NameDictionary* properties = object->property_dictionary();
|
||||
|
@ -20,11 +20,17 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
template <typename T>
|
||||
template <typename T, typename>
|
||||
T* HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
template <typename T, typename>
|
||||
T HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
|
||||
return Visit(object->map(), object);
|
||||
@ -35,10 +41,10 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map* map,
|
||||
HeapObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
switch (map->visitor_id()) {
|
||||
#define CASE(type) \
|
||||
case kVisit##type: \
|
||||
return visitor->Visit##type(map, \
|
||||
ConcreteVisitor::template Cast<type>(object));
|
||||
#define CASE(TypeName, Type) \
|
||||
case kVisit##TypeName: \
|
||||
return visitor->Visit##TypeName( \
|
||||
map, ConcreteVisitor::template Cast<TypeName>(object));
|
||||
TYPED_VISITOR_ID_LIST(CASE)
|
||||
#undef CASE
|
||||
case kVisitShortcutCandidate:
|
||||
@ -77,10 +83,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host,
|
||||
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, map);
|
||||
}
|
||||
|
||||
#define VISIT(type) \
|
||||
#define VISIT(TypeName, Type) \
|
||||
template <typename ResultType, typename ConcreteVisitor> \
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
|
||||
Map* map, type* object) { \
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##TypeName( \
|
||||
Map* map, Type object) { \
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
|
||||
if (!visitor->ShouldVisit(object)) return ResultType(); \
|
||||
if (!visitor->AllowDefaultJSObjectVisit()) { \
|
||||
@ -88,10 +94,10 @@ void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(HeapObject* host,
|
||||
"Implement custom visitor for new JSObject subclass in " \
|
||||
"concurrent marker"); \
|
||||
} \
|
||||
int size = type::BodyDescriptor::SizeOf(map, object); \
|
||||
int size = TypeName::BodyDescriptor::SizeOf(map, object); \
|
||||
if (visitor->ShouldVisitMapPointer()) \
|
||||
visitor->VisitMapPointer(object, object->map_slot()); \
|
||||
type::BodyDescriptor::IterateBody(map, object, size, visitor); \
|
||||
TypeName::BodyDescriptor::IterateBody(map, object, size, visitor); \
|
||||
return static_cast<ResultType>(size); \
|
||||
}
|
||||
TYPED_VISITOR_ID_LIST(VISIT)
|
||||
|
@ -30,46 +30,46 @@ class UncompiledDataWithoutPreParsedScope;
|
||||
class UncompiledDataWithPreParsedScope;
|
||||
class WasmInstanceObject;
|
||||
|
||||
#define TYPED_VISITOR_ID_LIST(V) \
|
||||
V(AllocationSite) \
|
||||
V(BigInt) \
|
||||
V(ByteArray) \
|
||||
V(BytecodeArray) \
|
||||
V(Cell) \
|
||||
V(Code) \
|
||||
V(CodeDataContainer) \
|
||||
V(ConsString) \
|
||||
V(DataHandler) \
|
||||
V(EphemeronHashTable) \
|
||||
V(FeedbackCell) \
|
||||
V(FeedbackVector) \
|
||||
V(FixedArray) \
|
||||
V(FixedDoubleArray) \
|
||||
V(FixedFloat64Array) \
|
||||
V(FixedTypedArrayBase) \
|
||||
V(JSArrayBuffer) \
|
||||
V(JSDataView) \
|
||||
V(JSObject) \
|
||||
V(JSTypedArray) \
|
||||
V(JSWeakCollection) \
|
||||
V(Map) \
|
||||
V(Oddball) \
|
||||
V(PreParsedScopeData) \
|
||||
V(PropertyArray) \
|
||||
V(PropertyCell) \
|
||||
V(PrototypeInfo) \
|
||||
V(SeqOneByteString) \
|
||||
V(SeqTwoByteString) \
|
||||
V(SharedFunctionInfo) \
|
||||
V(SlicedString) \
|
||||
V(SmallOrderedHashMap) \
|
||||
V(SmallOrderedHashSet) \
|
||||
V(Symbol) \
|
||||
V(ThinString) \
|
||||
V(TransitionArray) \
|
||||
V(UncompiledDataWithoutPreParsedScope) \
|
||||
V(UncompiledDataWithPreParsedScope) \
|
||||
V(WasmInstanceObject)
|
||||
#define TYPED_VISITOR_ID_LIST(V) \
|
||||
V(AllocationSite, AllocationSite*) \
|
||||
V(BigInt, BigInt*) \
|
||||
V(ByteArray, ByteArray*) \
|
||||
V(BytecodeArray, BytecodeArray*) \
|
||||
V(Cell, Cell*) \
|
||||
V(Code, Code*) \
|
||||
V(CodeDataContainer, CodeDataContainer*) \
|
||||
V(ConsString, ConsString*) \
|
||||
V(DataHandler, DataHandler*) \
|
||||
V(EphemeronHashTable, EphemeronHashTable*) \
|
||||
V(FeedbackCell, FeedbackCell*) \
|
||||
V(FeedbackVector, FeedbackVector*) \
|
||||
V(FixedArray, FixedArray*) \
|
||||
V(FixedDoubleArray, FixedDoubleArray*) \
|
||||
V(FixedFloat64Array, FixedFloat64Array*) \
|
||||
V(FixedTypedArrayBase, FixedTypedArrayBase*) \
|
||||
V(JSArrayBuffer, JSArrayBuffer*) \
|
||||
V(JSDataView, JSDataView*) \
|
||||
V(JSObject, JSObject*) \
|
||||
V(JSTypedArray, JSTypedArray*) \
|
||||
V(JSWeakCollection, JSWeakCollection*) \
|
||||
V(Map, Map*) \
|
||||
V(Oddball, Oddball*) \
|
||||
V(PreParsedScopeData, PreParsedScopeData*) \
|
||||
V(PropertyArray, PropertyArray) \
|
||||
V(PropertyCell, PropertyCell*) \
|
||||
V(PrototypeInfo, PrototypeInfo*) \
|
||||
V(SeqOneByteString, SeqOneByteString*) \
|
||||
V(SeqTwoByteString, SeqTwoByteString*) \
|
||||
V(SharedFunctionInfo, SharedFunctionInfo*) \
|
||||
V(SlicedString, SlicedString*) \
|
||||
V(SmallOrderedHashMap, SmallOrderedHashMap*) \
|
||||
V(SmallOrderedHashSet, SmallOrderedHashSet*) \
|
||||
V(Symbol, Symbol*) \
|
||||
V(ThinString, ThinString*) \
|
||||
V(TransitionArray, TransitionArray*) \
|
||||
V(UncompiledDataWithoutPreParsedScope, UncompiledDataWithoutPreParsedScope*) \
|
||||
V(UncompiledDataWithPreParsedScope, UncompiledDataWithPreParsedScope*) \
|
||||
V(WasmInstanceObject, WasmInstanceObject*)
|
||||
|
||||
// The base class for visitors that need to dispatch on object type. The default
|
||||
// behavior of all visit functions is to iterate body of the given object using
|
||||
@ -101,7 +101,8 @@ class HeapVisitor : public ObjectVisitor {
|
||||
// in default Visit implemention for subclasses of JSObject.
|
||||
V8_INLINE bool AllowDefaultJSObjectVisit() { return true; }
|
||||
|
||||
#define VISIT(type) V8_INLINE ResultType Visit##type(Map* map, type* object);
|
||||
#define VISIT(TypeName, Type) \
|
||||
V8_INLINE ResultType Visit##TypeName(Map* map, Type object);
|
||||
TYPED_VISITOR_ID_LIST(VISIT)
|
||||
#undef VISIT
|
||||
V8_INLINE ResultType VisitShortcutCandidate(Map* map, ConsString* object);
|
||||
@ -113,8 +114,13 @@ class HeapVisitor : public ObjectVisitor {
|
||||
V8_INLINE ResultType VisitFreeSpace(Map* map, FreeSpace* object);
|
||||
V8_INLINE ResultType VisitWeakArray(Map* map, HeapObject* object);
|
||||
|
||||
template <typename T>
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T>::value>::type>
|
||||
static V8_INLINE T* Cast(HeapObject* object);
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T>::value>::type>
|
||||
static V8_INLINE T Cast(HeapObject* object);
|
||||
};
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
|
@ -3651,7 +3651,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
|
||||
}
|
||||
}
|
||||
} else if (object->IsPropertyArray()) {
|
||||
PropertyArray* array = PropertyArray::cast(object);
|
||||
PropertyArray array = PropertyArray::cast(object);
|
||||
for (int j = 0; j < array->length(); j++) {
|
||||
Object* property = array->get(j);
|
||||
if (property->IsHeapObject()) {
|
||||
|
@ -25,6 +25,7 @@
|
||||
#include "src/heap/invalidated-slots.h"
|
||||
#include "src/heap/marking.h"
|
||||
#include "src/objects.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
#include "src/objects/map.h"
|
||||
#include "src/utils.h"
|
||||
|
||||
@ -409,6 +410,10 @@ class MemoryChunk {
|
||||
return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) &
|
||||
~kAlignmentMask);
|
||||
}
|
||||
// Only works if the object is in the first kPageSize of the MemoryChunk.
|
||||
static MemoryChunk* FromHeapObject(const HeapObjectPtr o) {
|
||||
return reinterpret_cast<MemoryChunk*>(o.ptr() & ~kAlignmentMask);
|
||||
}
|
||||
|
||||
void SetOldGenerationPageFlags(bool is_marking);
|
||||
void SetYoungGenerationPageFlags(bool is_marking);
|
||||
|
@ -7,6 +7,7 @@
|
||||
|
||||
#include "src/base/functional.h"
|
||||
#include "src/handles.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -103,6 +104,7 @@ class IdentityMap : public IdentityMapBase {
|
||||
V* Get(Object* key) {
|
||||
return reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key)));
|
||||
}
|
||||
V* Get(ObjectPtr key) { return reinterpret_cast<V*>(GetEntry(key.ptr())); }
|
||||
|
||||
// Searches this map for the given key using the object's address
|
||||
// as the identity, returning:
|
||||
@ -112,12 +114,18 @@ class IdentityMap : public IdentityMapBase {
|
||||
V* Find(Object* key) const {
|
||||
return reinterpret_cast<V*>(FindEntry(reinterpret_cast<Address>(key)));
|
||||
}
|
||||
V* Find(ObjectPtr key) const {
|
||||
return reinterpret_cast<V*>(FindEntry(key.ptr()));
|
||||
}
|
||||
|
||||
// Set the value for the given key.
|
||||
void Set(Handle<Object> key, V v) { Set(*key, v); }
|
||||
void Set(Object* key, V v) {
|
||||
*(reinterpret_cast<V*>(GetEntry(reinterpret_cast<Address>(key)))) = v;
|
||||
}
|
||||
void Set(ObjectPtr key, V v) {
|
||||
*(reinterpret_cast<V*>(GetEntry(key.ptr()))) = v;
|
||||
}
|
||||
|
||||
bool Delete(Handle<Object> key, V* deleted_value) {
|
||||
return Delete(*key, deleted_value);
|
||||
@ -130,6 +138,14 @@ class IdentityMap : public IdentityMapBase {
|
||||
}
|
||||
return deleted_something;
|
||||
}
|
||||
bool Delete(ObjectPtr key, V* deleted_value) {
|
||||
void* v = nullptr;
|
||||
bool deleted_something = DeleteEntry(key.ptr(), &v);
|
||||
if (deleted_value != nullptr && deleted_something) {
|
||||
*deleted_value = *reinterpret_cast<V*>(&v);
|
||||
}
|
||||
return deleted_something;
|
||||
}
|
||||
|
||||
// Removes all elements from the map.
|
||||
void Clear() { IdentityMapBase::Clear(); }
|
||||
|
@ -687,14 +687,14 @@ void WeakArrayList::WeakArrayListVerify(Isolate* isolate) {
|
||||
|
||||
void PropertyArray::PropertyArrayVerify(Isolate* isolate) {
|
||||
if (length() == 0) {
|
||||
CHECK_EQ(this, ReadOnlyRoots(isolate).empty_property_array());
|
||||
CHECK_EQ(*this, ReadOnlyRoots(isolate).empty_property_array());
|
||||
return;
|
||||
}
|
||||
// There are no empty PropertyArrays.
|
||||
CHECK_LT(0, length());
|
||||
for (int i = 0; i < length(); i++) {
|
||||
Object* e = get(i);
|
||||
VerifyPointer(isolate, e);
|
||||
Object::VerifyPointer(isolate, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -724,10 +724,18 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
|
||||
return value;
|
||||
}
|
||||
|
||||
ObjectSlot HeapObject::RawField(int byte_offset) const {
|
||||
return ObjectSlot(FIELD_ADDR(this, byte_offset));
|
||||
}
|
||||
|
||||
ObjectSlot HeapObject::RawField(const HeapObject* obj, int byte_offset) {
|
||||
return ObjectSlot(FIELD_ADDR(obj, byte_offset));
|
||||
}
|
||||
|
||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
|
||||
return MaybeObjectSlot(FIELD_ADDR(this, byte_offset));
|
||||
}
|
||||
|
||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject* obj,
|
||||
int byte_offset) {
|
||||
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
|
||||
@ -1551,7 +1559,7 @@ int HeapObject::SizeFromMap(Map* map) const {
|
||||
}
|
||||
if (instance_type == PROPERTY_ARRAY_TYPE) {
|
||||
return PropertyArray::SizeFor(
|
||||
reinterpret_cast<const PropertyArray*>(this)->synchronized_length());
|
||||
PropertyArray::cast(this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
|
||||
return SmallOrderedHashMap::SizeFor(
|
||||
|
@ -89,6 +89,19 @@ void HeapObject::PrintHeader(std::ostream& os, const char* id) { // NOLINT
|
||||
if (!IsMap()) os << "\n - map: " << Brief(map());
|
||||
}
|
||||
|
||||
void HeapObjectPtr::PrintHeader(std::ostream& os, const char* id) { // NOLINT
|
||||
os << reinterpret_cast<void*>(ptr()) << ": [";
|
||||
if (id != nullptr) {
|
||||
os << id;
|
||||
} else {
|
||||
os << map()->instance_type();
|
||||
}
|
||||
os << "]";
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(ptr());
|
||||
if (chunk->owner()->identity() == OLD_SPACE) os << " in OldSpace";
|
||||
if (!IsMap()) os << "\n - map: " << Brief(map());
|
||||
}
|
||||
|
||||
void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
|
||||
InstanceType instance_type = map()->instance_type();
|
||||
|
||||
@ -979,7 +992,7 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint(
|
||||
}
|
||||
|
||||
void PropertyArray::PropertyArrayPrint(std::ostream& os) { // NOLINT
|
||||
HeapObject::PrintHeader(os, "PropertyArray");
|
||||
PrintHeader(os, "PropertyArray");
|
||||
os << "\n - length: " << length();
|
||||
os << "\n - hash: " << Hash();
|
||||
PrintFixedArrayElements(os, this);
|
||||
|
@ -1066,6 +1066,10 @@ class Object {
|
||||
// Type testing.
|
||||
bool IsObject() const { return true; }
|
||||
|
||||
// Syntax compatibility with ObjectPtr, so the same macros can consume
|
||||
// arguments of either type.
|
||||
Address ptr() const { return reinterpret_cast<Address>(this); }
|
||||
|
||||
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
|
||||
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
@ -1711,7 +1715,9 @@ class HeapObject: public Object {
|
||||
// Does no checking, and is safe to use during GC, while maps are invalid.
|
||||
// Does not invoke write barrier, so should only be assigned to
|
||||
// during marking GC.
|
||||
inline ObjectSlot RawField(int byte_offset) const;
|
||||
static inline ObjectSlot RawField(const HeapObject* obj, int offset);
|
||||
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
|
||||
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject* obj, int offset);
|
||||
|
||||
DECL_CAST(HeapObject)
|
||||
|
52
src/objects/heap-object-inl.h
Normal file
52
src/objects/heap-object-inl.h
Normal file
@ -0,0 +1,52 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_OBJECTS_HEAP_OBJECT_INL_H_
|
||||
#define V8_OBJECTS_HEAP_OBJECT_INL_H_
|
||||
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
#include "src/heap/heap-write-barrier-inl.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
#include "src/objects/object-macros.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(HeapObjectPtr, ObjectPtr)
|
||||
|
||||
#define TYPE_CHECK_FORWARDER(Type) \
|
||||
bool HeapObjectPtr::Is##Type() const { \
|
||||
return reinterpret_cast<HeapObject*>(ptr())->Is##Type(); \
|
||||
}
|
||||
HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
|
||||
#undef TYPE_CHECK_FORWARDER
|
||||
|
||||
Map* HeapObjectPtr::map() const {
|
||||
return Map::cast(READ_FIELD(this, kMapOffset));
|
||||
}
|
||||
|
||||
ObjectSlot HeapObjectPtr::map_slot() {
|
||||
return ObjectSlot(FIELD_ADDR(this, kMapOffset));
|
||||
}
|
||||
|
||||
WriteBarrierMode HeapObjectPtr::GetWriteBarrierMode(
|
||||
const DisallowHeapAllocation& promise) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(this);
|
||||
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
|
||||
if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
|
||||
return UPDATE_WRITE_BARRIER;
|
||||
}
|
||||
|
||||
ObjectSlot HeapObjectPtr::RawField(int byte_offset) const {
|
||||
return ObjectSlot(FIELD_ADDR(this, byte_offset));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#include "src/objects/object-macros-undef.h"
|
||||
|
||||
#endif // V8_OBJECTS_HEAP_OBJECT_INL_H_
|
84
src/objects/heap-object.h
Normal file
84
src/objects/heap-object.h
Normal file
@ -0,0 +1,84 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_OBJECTS_HEAP_OBJECT_H_
|
||||
#define V8_OBJECTS_HEAP_OBJECT_H_
|
||||
|
||||
#include "src/globals.h"
|
||||
|
||||
#include "src/objects.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
#include "src/objects/object-macros.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
// This is the new way to represent the Object class. It is temporarily
|
||||
// separate to allow an incremental transition.
|
||||
// For a design overview, see https://goo.gl/Ph4CGz.
|
||||
class ObjectPtr {
|
||||
public:
|
||||
ObjectPtr() : ptr_(kNullAddress) {}
|
||||
explicit ObjectPtr(Address ptr) : ptr_(ptr) {}
|
||||
|
||||
// Enable incremental transition.
|
||||
operator Object*() const { return reinterpret_cast<Object*>(ptr()); }
|
||||
|
||||
bool operator==(const ObjectPtr other) const {
|
||||
return this->ptr() == other.ptr();
|
||||
}
|
||||
bool operator!=(const ObjectPtr other) const {
|
||||
return this->ptr() != other.ptr();
|
||||
}
|
||||
|
||||
// Returns the tagged "(heap) object pointer" representation of this object.
|
||||
Address ptr() const { return ptr_; }
|
||||
|
||||
private:
|
||||
Address ptr_;
|
||||
};
|
||||
|
||||
// Replacement for HeapObject; temporarily separate for incremental transition:
|
||||
class HeapObjectPtr : public ObjectPtr {
|
||||
public:
|
||||
inline Map* map() const;
|
||||
|
||||
inline ObjectSlot map_slot();
|
||||
|
||||
inline WriteBarrierMode GetWriteBarrierMode(
|
||||
const DisallowHeapAllocation& promise);
|
||||
|
||||
// Enable incremental transition.
|
||||
operator HeapObject*() { return reinterpret_cast<HeapObject*>(ptr()); }
|
||||
operator const HeapObject*() const {
|
||||
return reinterpret_cast<const HeapObject*>(ptr());
|
||||
}
|
||||
|
||||
bool IsHeapObjectPtr() const { return true; }
|
||||
|
||||
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
|
||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
#undef IS_TYPE_FUNCTION_DECL
|
||||
|
||||
// Untagged aligned address.
|
||||
inline Address address() const { return ptr() - kHeapObjectTag; }
|
||||
|
||||
inline ObjectSlot RawField(int byte_offset) const;
|
||||
|
||||
#ifdef OBJECT_PRINT
|
||||
void PrintHeader(std::ostream& os, const char* id); // NOLINT
|
||||
#endif
|
||||
|
||||
static const int kMapOffset = HeapObject::kMapOffset;
|
||||
|
||||
OBJECT_CONSTRUCTORS(HeapObjectPtr)
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#include "src/objects/object-macros-undef.h"
|
||||
|
||||
#endif // V8_OBJECTS_HEAP_OBJECT_H_
|
@ -803,7 +803,7 @@ NameDictionary* JSReceiver::property_dictionary() const {
|
||||
|
||||
// TODO(gsathya): Pass isolate directly to this function and access
|
||||
// the heap from this.
|
||||
PropertyArray* JSReceiver::property_array() const {
|
||||
PropertyArray JSReceiver::property_array() const {
|
||||
DCHECK(HasFastProperties());
|
||||
|
||||
Object* prop = raw_properties_or_hash();
|
||||
|
@ -28,7 +28,7 @@ class JSReceiver : public HeapObject, public NeverReadOnlySpaceObject {
|
||||
// exists. Otherwise, returns an empty_property_array when there's a
|
||||
// Smi (hash code) or an empty_fixed_array for a fast properties
|
||||
// map.
|
||||
inline PropertyArray* property_array() const;
|
||||
inline PropertyArray property_array() const;
|
||||
|
||||
// Gets slow properties for non-global objects.
|
||||
inline NameDictionary* property_dictionary() const;
|
||||
|
@ -4,12 +4,15 @@
|
||||
|
||||
// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD
|
||||
|
||||
#undef OBJECT_CONSTRUCTORS
|
||||
#undef DECL_PRIMITIVE_ACCESSORS
|
||||
#undef DECL_BOOLEAN_ACCESSORS
|
||||
#undef DECL_INT_ACCESSORS
|
||||
#undef DECL_ACCESSORS
|
||||
#undef DECL_CAST
|
||||
#undef DECL_CAST2
|
||||
#undef CAST_ACCESSOR
|
||||
#undef CAST_ACCESSOR2
|
||||
#undef INT_ACCESSORS
|
||||
#undef ACCESSORS_CHECKED2
|
||||
#undef ACCESSORS_CHECKED
|
||||
@ -36,6 +39,7 @@
|
||||
#undef RELEASE_WRITE_FIELD
|
||||
#undef RELAXED_WRITE_FIELD
|
||||
#undef WRITE_BARRIER
|
||||
#undef WEAK_WRITE_BARRIER
|
||||
#undef CONDITIONAL_WRITE_BARRIER
|
||||
#undef CONDITIONAL_WEAK_WRITE_BARRIER
|
||||
#undef READ_DOUBLE_FIELD
|
||||
|
@ -16,6 +16,21 @@
|
||||
|
||||
#include <src/v8memory.h>
|
||||
|
||||
// Since this changes visibility, it should always be last in a class
|
||||
// definition.
|
||||
#define OBJECT_CONSTRUCTORS(Type) \
|
||||
public: \
|
||||
Type(); \
|
||||
Type* operator->() { return this; } \
|
||||
const Type* operator->() const { return this; } \
|
||||
\
|
||||
protected: \
|
||||
explicit Type(Address ptr);
|
||||
|
||||
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \
|
||||
inline Type::Type() : Super() {} \
|
||||
inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); }
|
||||
|
||||
#define DECL_PRIMITIVE_ACCESSORS(name, type) \
|
||||
inline type name() const; \
|
||||
inline void set_##name(type value);
|
||||
@ -43,6 +58,13 @@
|
||||
V8_INLINE static type* cast(Object* object); \
|
||||
V8_INLINE static const type* cast(const Object* object);
|
||||
|
||||
// TODO(3770): Replacement for the above, temporarily separate for
|
||||
// incremental transition.
|
||||
#define DECL_CAST2(Type) \
|
||||
V8_INLINE static Type cast(Object* object); \
|
||||
V8_INLINE static const Type cast(const Object* object); \
|
||||
V8_INLINE static Type cast(ObjectPtr object);
|
||||
|
||||
#define CAST_ACCESSOR(type) \
|
||||
type* type::cast(Object* object) { \
|
||||
SLOW_DCHECK(object->Is##type()); \
|
||||
@ -53,6 +75,13 @@
|
||||
return reinterpret_cast<const type*>(object); \
|
||||
}
|
||||
|
||||
// TODO(3770): Replacement for the above, temporarily separate for
|
||||
// incremental transition.
|
||||
#define CAST_ACCESSOR2(Type) \
|
||||
Type Type::cast(Object* object) { return Type(object->ptr()); } \
|
||||
const Type Type::cast(const Object* object) { return Type(object->ptr()); } \
|
||||
Type Type::cast(ObjectPtr object) { return Type(object.ptr()); }
|
||||
|
||||
#define INT_ACCESSORS(holder, name, offset) \
|
||||
int holder::name() const { return READ_INT_FIELD(this, offset); } \
|
||||
void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
|
||||
@ -196,8 +225,7 @@
|
||||
return InstanceTypeChecker::Is##type(map()->instance_type()); \
|
||||
}
|
||||
|
||||
#define FIELD_ADDR(p, offset) \
|
||||
(reinterpret_cast<Address>(p) + offset - kHeapObjectTag)
|
||||
#define FIELD_ADDR(p, offset) ((p)->ptr() + offset - kHeapObjectTag)
|
||||
|
||||
#define READ_FIELD(p, offset) \
|
||||
(*reinterpret_cast<Object* const*>(FIELD_ADDR(p, offset)))
|
||||
@ -243,45 +271,40 @@
|
||||
reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
|
||||
reinterpret_cast<base::AtomicWord>(value));
|
||||
|
||||
#define WRITE_BARRIER(object, offset, value) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
MarkingBarrier(object, HeapObject::RawField(object, offset), value); \
|
||||
GenerationalBarrier(object, HeapObject::RawField(object, offset), value); \
|
||||
#define WRITE_BARRIER(object, offset, value) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
MarkingBarrier(object, (object)->RawField(offset), value); \
|
||||
GenerationalBarrier(object, (object)->RawField(offset), value); \
|
||||
} while (false)
|
||||
|
||||
#define WEAK_WRITE_BARRIER(object, offset, value) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
MarkingBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \
|
||||
value); \
|
||||
GenerationalBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \
|
||||
value); \
|
||||
} while (false)
|
||||
|
||||
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
|
||||
#define WEAK_WRITE_BARRIER(object, offset, value) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
if (mode != SKIP_WRITE_BARRIER) { \
|
||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||
MarkingBarrier(object, HeapObject::RawField(object, offset), value); \
|
||||
} \
|
||||
GenerationalBarrier(object, HeapObject::RawField(object, offset), \
|
||||
value); \
|
||||
} \
|
||||
MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||
GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||
} while (false)
|
||||
|
||||
#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
if (mode != SKIP_WRITE_BARRIER) { \
|
||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||
MarkingBarrier(object, HeapObject::RawMaybeWeakField(object, offset), \
|
||||
value); \
|
||||
} \
|
||||
GenerationalBarrier( \
|
||||
object, HeapObject::RawMaybeWeakField(object, offset), value); \
|
||||
} \
|
||||
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
if (mode != SKIP_WRITE_BARRIER) { \
|
||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||
MarkingBarrier(object, (object)->RawField(offset), value); \
|
||||
} \
|
||||
GenerationalBarrier(object, (object)->RawField(offset), value); \
|
||||
} \
|
||||
} while (false)
|
||||
|
||||
#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
|
||||
do { \
|
||||
DCHECK_NOT_NULL(Heap::FromWritableHeapObject(object)); \
|
||||
if (mode != SKIP_WRITE_BARRIER) { \
|
||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||
MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||
} \
|
||||
GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||
} \
|
||||
} while (false)
|
||||
|
||||
#define READ_DOUBLE_FIELD(p, offset) ReadDoubleValue(FIELD_ADDR(p, offset))
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "src/objects/property-array.h"
|
||||
|
||||
#include "src/heap/heap-write-barrier-inl.h"
|
||||
#include "src/objects/heap-object-inl.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
#include "src/objects/object-macros.h"
|
||||
@ -15,7 +16,8 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
CAST_ACCESSOR(PropertyArray)
|
||||
OBJECT_CONSTRUCTORS_IMPL(PropertyArray, HeapObjectPtr)
|
||||
CAST_ACCESSOR2(PropertyArray)
|
||||
|
||||
Object* PropertyArray::get(int index) const {
|
||||
DCHECK_GE(index, 0);
|
||||
@ -40,9 +42,7 @@ void PropertyArray::set(int index, Object* value, WriteBarrierMode mode) {
|
||||
CONDITIONAL_WRITE_BARRIER(this, offset, value, mode);
|
||||
}
|
||||
|
||||
ObjectSlot PropertyArray::data_start() {
|
||||
return HeapObject::RawField(this, kHeaderSize);
|
||||
}
|
||||
ObjectSlot PropertyArray::data_start() { return RawField(kHeaderSize); }
|
||||
|
||||
int PropertyArray::length() const {
|
||||
Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
|
||||
|
@ -5,7 +5,7 @@
|
||||
#ifndef V8_OBJECTS_PROPERTY_ARRAY_H_
|
||||
#define V8_OBJECTS_PROPERTY_ARRAY_H_
|
||||
|
||||
#include "src/objects.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
#include "src/objects/object-macros.h"
|
||||
@ -13,7 +13,7 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class PropertyArray : public HeapObject {
|
||||
class PropertyArray : public HeapObjectPtr {
|
||||
public:
|
||||
// [length]: length of the array.
|
||||
inline int length() const;
|
||||
@ -42,7 +42,7 @@ class PropertyArray : public HeapObject {
|
||||
return kHeaderSize + length * kPointerSize;
|
||||
}
|
||||
|
||||
DECL_CAST(PropertyArray)
|
||||
DECL_CAST2(PropertyArray)
|
||||
DECL_PRINTER(PropertyArray)
|
||||
DECL_VERIFIER(PropertyArray)
|
||||
|
||||
@ -61,8 +61,7 @@ class PropertyArray : public HeapObject {
|
||||
|
||||
static const int kNoHashSentinel = 0;
|
||||
|
||||
private:
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(PropertyArray);
|
||||
OBJECT_CONSTRUCTORS(PropertyArray);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -174,7 +174,7 @@ class RootVisitor;
|
||||
V(Map*, self_reference_marker_map, SelfReferenceMarkerMap) \
|
||||
/* Canonical empty values */ \
|
||||
V(EnumCache*, empty_enum_cache, EmptyEnumCache) \
|
||||
V(PropertyArray*, empty_property_array, EmptyPropertyArray) \
|
||||
V(PropertyArray, empty_property_array, EmptyPropertyArray) \
|
||||
V(ByteArray*, empty_byte_array, EmptyByteArray) \
|
||||
V(ObjectBoilerplateDescription*, empty_object_boilerplate_description, \
|
||||
EmptyObjectBoilerplateDescription) \
|
||||
|
Loading…
Reference in New Issue
Block a user