[objects.h splitting] Move HeapObject to heap-object.h

This does not include moving function definitions from objects-inl.h
to heap-object-inl.h, because that would be messy for no immediate
benefit.
This is in preparation for merging HeapObjectPtr into HeapObject.

Bug: v8:5402
Change-Id: Id170d9cac8ebabb2876b85bad29ce90fe02d842d
Reviewed-on: https://chromium-review.googlesource.com/c/1386491
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58401}
This commit is contained in:
Jakob Kummerow 2018-12-20 13:41:50 +01:00 committed by Commit Bot
parent 3b02afc538
commit 0604031eb1
3 changed files with 183 additions and 176 deletions

View File

@ -35,6 +35,7 @@
#include "src/objects/embedder-data-array-inl.h"
#include "src/objects/free-space-inl.h"
#include "src/objects/heap-number-inl.h"
#include "src/objects/heap-object.h" // TODO(jkummerow): See below [1].
#include "src/objects/js-proxy-inl.h"
#include "src/objects/literal-objects.h"
#include "src/objects/maybe-object-inl.h"
@ -53,6 +54,13 @@
#include "src/transitions-inl.h"
#include "src/v8memory.h"
// [1] This file currently contains the definitions of many
// HeapObject::IsFoo() predicates, which in turn require #including
// many other -inl.h files. Find a way to avoid this. Idea:
// Since e.g. HeapObject::IsSeqString requires things from string-inl.h,
// and presumably is mostly used from places that require/include string-inl.h
// anyway, maybe that's where it should be defined?
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

View File

@ -1036,182 +1036,6 @@ class MapWord {
Address value_;
};
// HeapObject is the superclass for all classes describing heap allocated
// objects.
class HeapObject: public Object {
public:
// [map]: Contains a map which contains the object's reflective
// information.
inline Map map() const;
inline void set_map(Map value);
inline MapWordSlot map_slot() const;
// The no-write-barrier version. This is OK if the object is white and in
// new space, or if the value is an immortal immutable object, like the maps
// of primitive (non-JS) objects like strings, heap numbers etc.
inline void set_map_no_write_barrier(Map value);
// Get the map using acquire load.
inline Map synchronized_map() const;
inline MapWord synchronized_map_word() const;
// Set the map using release store
inline void synchronized_set_map(Map value);
inline void synchronized_set_map_word(MapWord map_word);
// Initialize the map immediately after the object is allocated.
// Do not use this outside Heap.
inline void set_map_after_allocation(
Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// During garbage collection, the map word of a heap object does not
// necessarily contain a map pointer.
inline MapWord map_word() const;
inline void set_map_word(MapWord map_word);
// TODO(v8:7464): Once RO_SPACE is shared between isolates, this method can be
// removed as ReadOnlyRoots will be accessible from a global variable. For now
// this method exists to help remove GetIsolate/GetHeap from HeapObject, in a
// way that doesn't require passing Isolate/Heap down huge call chains or to
// places where it might not be safe to access it.
inline ReadOnlyRoots GetReadOnlyRoots() const;
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
#undef IS_TYPE_FUNCTION_DECL
V8_INLINE bool IsExternal(Isolate* isolate) const;
// Oddball checks are faster when they are raw pointer comparisons, so the
// isolate/read-only roots overloads should be preferred where possible.
#define IS_TYPE_FUNCTION_DECL(Type, Value) \
V8_INLINE bool Is##Type(Isolate* isolate) const; \
V8_INLINE bool Is##Type(ReadOnlyRoots roots) const; \
V8_INLINE bool Is##Type() const;
ODDBALL_LIST(IS_TYPE_FUNCTION_DECL)
#undef IS_TYPE_FUNCTION_DECL
V8_INLINE bool IsNullOrUndefined(Isolate* isolate) const;
V8_INLINE bool IsNullOrUndefined(ReadOnlyRoots roots) const;
V8_INLINE bool IsNullOrUndefined() const;
#define DECL_STRUCT_PREDICATE(NAME, Name, name) V8_INLINE bool Is##Name() const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE
// Converts an address to a HeapObject pointer.
static inline HeapObject* FromAddress(Address address) {
DCHECK_TAG_ALIGNED(address);
return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
}
// Returns the address of this HeapObject.
inline Address address() const {
return reinterpret_cast<Address>(this) - kHeapObjectTag;
}
// Iterates over pointers contained in the object (including the Map).
// If it's not performance critical iteration use the non-templatized
// version.
void Iterate(ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateFast(ObjectVisitor* v);
// Iterates over all pointers contained in the object except the
// first map pointer. The object type is given in the first
// parameter. This function does not access the map pointer in the
// object, and so is safe to call while the map pointer is modified.
// If it's not performance critical iteration use the non-templatized
// version.
void IterateBody(ObjectVisitor* v);
void IterateBody(Map map, int object_size, ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v);
// Returns true if the object contains a tagged value at given offset.
// It is used for invalid slots filtering. If the offset points outside
// of the object or to the map word, the result is UNDEFINED (!!!).
bool IsValidSlot(Map map, int offset);
// Returns the heap object's size in bytes
inline int Size() const;
// Given a heap object's map pointer, returns the heap size in bytes
// Useful when the map pointer field is used for other purposes.
// GC internal.
inline int SizeFromMap(Map map) const;
// Returns the field at offset in obj, as a read/write Object* reference.
// Does no checking, and is safe to use during GC, while maps are invalid.
// Does not invoke write barrier, so should only be assigned to
// during marking GC.
inline ObjectSlot RawField(int byte_offset) const;
static inline ObjectSlot RawField(const HeapObject* obj, int offset);
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject* obj, int offset);
DECL_CAST(HeapObject)
// Return the write barrier mode for this. Callers of this function
// must be able to present a reference to an DisallowHeapAllocation
// object as a sign that they are not going to use this function
// from code that allocates and thus invalidates the returned write
// barrier mode.
inline WriteBarrierMode GetWriteBarrierMode(
const DisallowHeapAllocation& promise);
// Dispatched behavior.
void HeapObjectShortPrint(std::ostream& os); // NOLINT
#ifdef OBJECT_PRINT
void PrintHeader(std::ostream& os, const char* id); // NOLINT
#endif
DECL_PRINTER(HeapObject)
DECL_VERIFIER(HeapObject)
#ifdef VERIFY_HEAP
inline void VerifyObjectField(Isolate* isolate, int offset);
inline void VerifySmiField(int offset);
inline void VerifyMaybeObjectField(Isolate* isolate, int offset);
// Verify a pointer is a valid HeapObject pointer that points to object
// areas in the heap.
static void VerifyHeapPointer(Isolate* isolate, Object* p);
#endif
static inline AllocationAlignment RequiredAlignment(Map map);
// Whether the object needs rehashing. That is the case if the object's
// content depends on FLAG_hash_seed. When the object is deserialized into
// a heap with a different hash seed, these objects need to adapt.
inline bool NeedsRehashing() const;
// Rehashing support is not implemented for all objects that need rehashing.
// With objects that need rehashing but cannot be rehashed, rehashing has to
// be disabled.
bool CanBeRehashed() const;
// Rehash the object based on the layout inferred from its map.
void RehashBasedOnMap(Isolate* isolate);
// Layout description.
// First field in a heap object is map.
static const int kMapOffset = Object::kHeaderSize;
static const int kHeaderSize = kMapOffset + kPointerSize;
STATIC_ASSERT(kMapOffset == Internals::kHeapObjectMapOffset);
inline Address GetFieldAddress(int field_offset) const;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(HeapObject);
};
// Mixin class for objects that can never be in RO space.
// TODO(leszeks): Add checks in the factory that we never allocate these objects
// in RO space.

View File

@ -157,6 +157,181 @@ bool ObjectPtr::IsHeapObject() const {
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
const ObjectPtr& obj);
// HeapObject is the superclass for all classes describing heap allocated
// objects.
class HeapObject : public Object {
public:
// [map]: Contains a map which contains the object's reflective
// information.
inline Map map() const;
inline void set_map(Map value);
inline MapWordSlot map_slot() const;
// The no-write-barrier version. This is OK if the object is white and in
// new space, or if the value is an immortal immutable object, like the maps
// of primitive (non-JS) objects like strings, heap numbers etc.
inline void set_map_no_write_barrier(Map value);
// Get the map using acquire load.
inline Map synchronized_map() const;
inline MapWord synchronized_map_word() const;
// Set the map using release store
inline void synchronized_set_map(Map value);
inline void synchronized_set_map_word(MapWord map_word);
// Initialize the map immediately after the object is allocated.
// Do not use this outside Heap.
inline void set_map_after_allocation(
Map value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// During garbage collection, the map word of a heap object does not
// necessarily contain a map pointer.
inline MapWord map_word() const;
inline void set_map_word(MapWord map_word);
// TODO(v8:7464): Once RO_SPACE is shared between isolates, this method can be
// removed as ReadOnlyRoots will be accessible from a global variable. For now
// this method exists to help remove GetIsolate/GetHeap from HeapObject, in a
// way that doesn't require passing Isolate/Heap down huge call chains or to
// places where it might not be safe to access it.
inline ReadOnlyRoots GetReadOnlyRoots() const;
#define IS_TYPE_FUNCTION_DECL(Type) V8_INLINE bool Is##Type() const;
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
#undef IS_TYPE_FUNCTION_DECL
V8_INLINE bool IsExternal(Isolate* isolate) const;
// Oddball checks are faster when they are raw pointer comparisons, so the
// isolate/read-only roots overloads should be preferred where possible.
#define IS_TYPE_FUNCTION_DECL(Type, Value) \
V8_INLINE bool Is##Type(Isolate* isolate) const; \
V8_INLINE bool Is##Type(ReadOnlyRoots roots) const; \
V8_INLINE bool Is##Type() const;
ODDBALL_LIST(IS_TYPE_FUNCTION_DECL)
#undef IS_TYPE_FUNCTION_DECL
V8_INLINE bool IsNullOrUndefined(Isolate* isolate) const;
V8_INLINE bool IsNullOrUndefined(ReadOnlyRoots roots) const;
V8_INLINE bool IsNullOrUndefined() const;
#define DECL_STRUCT_PREDICATE(NAME, Name, name) V8_INLINE bool Is##Name() const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE
// Converts an address to a HeapObject pointer.
static inline HeapObject* FromAddress(Address address) {
DCHECK_TAG_ALIGNED(address);
return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
}
// Returns the address of this HeapObject.
inline Address address() const {
return reinterpret_cast<Address>(this) - kHeapObjectTag;
}
// Iterates over pointers contained in the object (including the Map).
// If it's not performance critical iteration use the non-templatized
// version.
void Iterate(ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateFast(ObjectVisitor* v);
// Iterates over all pointers contained in the object except the
// first map pointer. The object type is given in the first
// parameter. This function does not access the map pointer in the
// object, and so is safe to call while the map pointer is modified.
// If it's not performance critical iteration use the non-templatized
// version.
void IterateBody(ObjectVisitor* v);
void IterateBody(Map map, int object_size, ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(ObjectVisitor* v);
template <typename ObjectVisitor>
inline void IterateBodyFast(Map map, int object_size, ObjectVisitor* v);
// Returns true if the object contains a tagged value at given offset.
// It is used for invalid slots filtering. If the offset points outside
// of the object or to the map word, the result is UNDEFINED (!!!).
bool IsValidSlot(Map map, int offset);
// Returns the heap object's size in bytes
inline int Size() const;
// Given a heap object's map pointer, returns the heap size in bytes
// Useful when the map pointer field is used for other purposes.
// GC internal.
inline int SizeFromMap(Map map) const;
// Returns the field at offset in obj, as a read/write Object* reference.
// Does no checking, and is safe to use during GC, while maps are invalid.
// Does not invoke write barrier, so should only be assigned to
// during marking GC.
inline ObjectSlot RawField(int byte_offset) const;
static inline ObjectSlot RawField(const HeapObject* obj, int offset);
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject* obj, int offset);
DECL_CAST(HeapObject)
// Return the write barrier mode for this. Callers of this function
// must be able to present a reference to an DisallowHeapAllocation
// object as a sign that they are not going to use this function
// from code that allocates and thus invalidates the returned write
// barrier mode.
inline WriteBarrierMode GetWriteBarrierMode(
const DisallowHeapAllocation& promise);
// Dispatched behavior.
void HeapObjectShortPrint(std::ostream& os); // NOLINT
#ifdef OBJECT_PRINT
void PrintHeader(std::ostream& os, const char* id); // NOLINT
#endif
DECL_PRINTER(HeapObject)
DECL_VERIFIER(HeapObject)
#ifdef VERIFY_HEAP
inline void VerifyObjectField(Isolate* isolate, int offset);
inline void VerifySmiField(int offset);
inline void VerifyMaybeObjectField(Isolate* isolate, int offset);
// Verify a pointer is a valid HeapObject pointer that points to object
// areas in the heap.
static void VerifyHeapPointer(Isolate* isolate, Object* p);
#endif
static inline AllocationAlignment RequiredAlignment(Map map);
// Whether the object needs rehashing. That is the case if the object's
// content depends on FLAG_hash_seed. When the object is deserialized into
// a heap with a different hash seed, these objects need to adapt.
inline bool NeedsRehashing() const;
// Rehashing support is not implemented for all objects that need rehashing.
// With objects that need rehashing but cannot be rehashed, rehashing has to
// be disabled.
bool CanBeRehashed() const;
// Rehash the object based on the layout inferred from its map.
void RehashBasedOnMap(Isolate* isolate);
// Layout description.
// First field in a heap object is map.
static const int kMapOffset = Object::kHeaderSize;
static const int kHeaderSize = kMapOffset + kPointerSize;
STATIC_ASSERT(kMapOffset == Internals::kHeapObjectMapOffset);
inline Address GetFieldAddress(int field_offset) const;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(HeapObject);
};
// Replacement for HeapObject; temporarily separate for incremental transition:
class HeapObjectPtr : public ObjectPtr {
public: