[ubsan] Port HeapObject to the new design
Merging the temporary HeapObjectPtr back into HeapObject. Bug: v8:3770 Change-Id: I5bcd23ca2f5ba862cf5b52955dca143e531c637b Reviewed-on: https://chromium-review.googlesource.com/c/1386492 Commit-Queue: Jakob Kummerow <jkummerow@chromium.org> Reviewed-by: Clemens Hammacher <clemensh@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Michael Stanton <mvstanton@chromium.org> Reviewed-by: Jakob Gruber <jgruber@chromium.org> Cr-Commit-Position: refs/heads/master@{#58410}
This commit is contained in:
parent
42b4180d20
commit
9302db480e
@ -881,7 +881,7 @@ class V8_EXPORT HandleScope {
|
||||
template<class F> friend class Local;
|
||||
|
||||
// Object::GetInternalField and Context::GetEmbedderData use CreateHandle with
|
||||
// a HeapObject* in their shortcuts.
|
||||
// a HeapObject in their shortcuts.
|
||||
friend class Object;
|
||||
friend class Context;
|
||||
};
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/isolate.h"
|
||||
#include "src/objects-inl.h"
|
||||
#include "src/objects/heap-object-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -23,7 +24,7 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
|
||||
// Since we map the raw address of an root item to its root list index, the
|
||||
// raw address must be constant, i.e. the object must be immovable.
|
||||
if (RootsTable::IsImmortalImmovable(root_index)) {
|
||||
HeapObject* heap_object = HeapObject::cast(root);
|
||||
HeapObject heap_object = HeapObject::cast(root);
|
||||
Maybe<uint32_t> maybe_index = map_->Get(heap_object);
|
||||
uint32_t index = static_cast<uint32_t>(root_index);
|
||||
if (maybe_index.IsJust()) {
|
||||
@ -37,5 +38,9 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
|
||||
isolate->set_root_index_map(map_);
|
||||
}
|
||||
|
||||
bool RootIndexMap::Lookup(Address obj, RootIndex* out_root_list) const {
|
||||
return Lookup(HeapObject::cast(ObjectPtr(obj)), out_root_list);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -8,7 +8,7 @@
|
||||
#include "include/v8.h"
|
||||
#include "src/assert-scope.h"
|
||||
#include "src/base/hashmap.h"
|
||||
#include "src/objects.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -44,20 +44,20 @@ inline uintptr_t PointerToIndexHashMap<Address>::Key(Address value) {
|
||||
return static_cast<uintptr_t>(value);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
inline uintptr_t PointerToIndexHashMap<Type>::Key(Type value) {
|
||||
return reinterpret_cast<uintptr_t>(value);
|
||||
template <>
|
||||
inline uintptr_t PointerToIndexHashMap<HeapObject>::Key(HeapObject value) {
|
||||
return value.ptr();
|
||||
}
|
||||
|
||||
class AddressToIndexHashMap : public PointerToIndexHashMap<Address> {};
|
||||
class HeapObjectToIndexHashMap : public PointerToIndexHashMap<HeapObject*> {};
|
||||
class HeapObjectToIndexHashMap : public PointerToIndexHashMap<HeapObject> {};
|
||||
|
||||
class RootIndexMap {
|
||||
public:
|
||||
explicit RootIndexMap(Isolate* isolate);
|
||||
|
||||
// Returns true on successful lookup and sets *|out_root_list|.
|
||||
bool Lookup(HeapObject* obj, RootIndex* out_root_list) const {
|
||||
bool Lookup(HeapObject obj, RootIndex* out_root_list) const {
|
||||
Maybe<uint32_t> maybe_index = map_->Get(obj);
|
||||
if (maybe_index.IsJust()) {
|
||||
*out_root_list = static_cast<RootIndex>(maybe_index.FromJust());
|
||||
@ -65,9 +65,7 @@ class RootIndexMap {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
bool Lookup(Address obj, RootIndex* out_root_list) {
|
||||
return Lookup(reinterpret_cast<HeapObject*>(obj), out_root_list);
|
||||
}
|
||||
bool Lookup(Address obj, RootIndex* out_root_list) const;
|
||||
|
||||
private:
|
||||
HeapObjectToIndexHashMap* map_;
|
||||
|
@ -23,7 +23,7 @@ PropertyCallbackArguments::PropertyCallbackArguments(Isolate* isolate,
|
||||
|
||||
// Here the hole is set as default value.
|
||||
// It cannot escape into js as it's removed in Call below.
|
||||
HeapObject* the_hole = ReadOnlyRoots(isolate).the_hole_value();
|
||||
HeapObject the_hole = ReadOnlyRoots(isolate).the_hole_value();
|
||||
slot_at(T::kReturnValueDefaultValueIndex).store(the_hole);
|
||||
slot_at(T::kReturnValueIndex).store(the_hole);
|
||||
DCHECK((*slot_at(T::kHolderIndex))->IsHeapObject());
|
||||
@ -32,8 +32,8 @@ PropertyCallbackArguments::PropertyCallbackArguments(Isolate* isolate,
|
||||
|
||||
FunctionCallbackArguments::FunctionCallbackArguments(
|
||||
internal::Isolate* isolate, internal::Object* data,
|
||||
internal::HeapObject* callee, internal::Object* holder,
|
||||
internal::HeapObject* new_target, internal::Address* argv, int argc)
|
||||
internal::HeapObject callee, internal::Object* holder,
|
||||
internal::HeapObject new_target, internal::Address* argv, int argc)
|
||||
: Super(isolate), argv_(argv), argc_(argc) {
|
||||
slot_at(T::kDataIndex).store(data);
|
||||
slot_at(T::kHolderIndex).store(holder);
|
||||
@ -41,7 +41,7 @@ FunctionCallbackArguments::FunctionCallbackArguments(
|
||||
slot_at(T::kIsolateIndex).store(reinterpret_cast<internal::Object*>(isolate));
|
||||
// Here the hole is set as default value.
|
||||
// It cannot escape into js as it's remove in Call below.
|
||||
HeapObject* the_hole = ReadOnlyRoots(isolate).the_hole_value();
|
||||
HeapObject the_hole = ReadOnlyRoots(isolate).the_hole_value();
|
||||
slot_at(T::kReturnValueDefaultValueIndex).store(the_hole);
|
||||
slot_at(T::kReturnValueIndex).store(the_hole);
|
||||
DCHECK((*slot_at(T::kHolderIndex))->IsHeapObject());
|
||||
|
@ -161,9 +161,9 @@ class FunctionCallbackArguments
|
||||
static const int kNewTargetIndex = T::kNewTargetIndex;
|
||||
|
||||
FunctionCallbackArguments(internal::Isolate* isolate, internal::Object* data,
|
||||
internal::HeapObject* callee,
|
||||
internal::HeapObject callee,
|
||||
internal::Object* holder,
|
||||
internal::HeapObject* new_target,
|
||||
internal::HeapObject new_target,
|
||||
internal::Address* argv, int argc);
|
||||
|
||||
/*
|
||||
|
13
src/api.cc
13
src/api.cc
@ -762,7 +762,8 @@ StartupData SnapshotCreator::CreateBlob(
|
||||
|
||||
{ // Heap allocation is disallowed within this scope.
|
||||
i::HeapIterator heap_iterator(isolate->heap());
|
||||
while (i::HeapObject* current_obj = heap_iterator.next()) {
|
||||
for (i::HeapObject current_obj = heap_iterator.next();
|
||||
!current_obj.is_null(); current_obj = heap_iterator.next()) {
|
||||
if (current_obj->IsSharedFunctionInfo()) {
|
||||
i::SharedFunctionInfo shared =
|
||||
i::SharedFunctionInfo::cast(current_obj);
|
||||
@ -807,7 +808,8 @@ StartupData SnapshotCreator::CreateBlob(
|
||||
CHECK(handle_checker.CheckGlobalAndEternalHandles());
|
||||
|
||||
i::HeapIterator heap_iterator(isolate->heap());
|
||||
while (i::HeapObject* current_obj = heap_iterator.next()) {
|
||||
for (i::HeapObject current_obj = heap_iterator.next(); !current_obj.is_null();
|
||||
current_obj = heap_iterator.next()) {
|
||||
if (current_obj->IsJSFunction()) {
|
||||
i::JSFunction fun = i::JSFunction::cast(current_obj);
|
||||
|
||||
@ -1108,7 +1110,8 @@ i::Address* HandleScope::CreateHandle(i::Isolate* isolate, i::Address value) {
|
||||
|
||||
i::Address* HandleScope::CreateHandle(
|
||||
i::NeverReadOnlySpaceObject* writable_object, i::Address value) {
|
||||
DCHECK(reinterpret_cast<i::HeapObject*>(writable_object)->IsHeapObject());
|
||||
DCHECK(i::ObjectPtr(reinterpret_cast<i::Address>(writable_object))
|
||||
.IsHeapObject());
|
||||
return i::HandleScope::CreateHandle(writable_object->GetIsolate(), value);
|
||||
}
|
||||
|
||||
@ -8630,8 +8633,8 @@ void Isolate::LowMemoryNotification() {
|
||||
}
|
||||
{
|
||||
i::HeapIterator iterator(isolate->heap());
|
||||
i::HeapObject* obj;
|
||||
while ((obj = iterator.next()) != nullptr) {
|
||||
for (i::HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsAbstractCode()) {
|
||||
i::AbstractCode::cast(obj)->DropStackFrameCache();
|
||||
}
|
||||
|
@ -96,10 +96,10 @@ int RelocInfo::target_address_size() {
|
||||
return kPointerSize;
|
||||
}
|
||||
|
||||
HeapObject* RelocInfo::target_object() {
|
||||
HeapObject RelocInfo::target_object() {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
return HeapObject::cast(reinterpret_cast<Object*>(
|
||||
Assembler::target_address_at(pc_, constant_pool_)));
|
||||
return HeapObject::cast(
|
||||
ObjectPtr(Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
@ -111,19 +111,17 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
return origin->relative_code_target_object_handle_at(pc_);
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(target),
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
|
||||
icache_flush_mode);
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
|
||||
WriteBarrierForCode(host(), this, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_external_reference() {
|
||||
DCHECK(rmode_ == EXTERNAL_REFERENCE);
|
||||
return Assembler::target_address_at(pc_, constant_pool_);
|
||||
|
@ -328,13 +328,12 @@ void ArmDebugger::Debug() {
|
||||
while (cur < end) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": 0x%08x %10d",
|
||||
reinterpret_cast<intptr_t>(cur), *cur, *cur);
|
||||
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
|
||||
int value = *cur;
|
||||
ObjectPtr obj(*cur);
|
||||
Heap* current_heap = sim_->isolate_->heap();
|
||||
if (((value & 1) == 0) || current_heap->Contains(obj)) {
|
||||
if (obj.IsSmi() || current_heap->Contains(HeapObject::cast(obj))) {
|
||||
PrintF(" (");
|
||||
if ((value & 1) == 0) {
|
||||
PrintF("smi %d", value / 2);
|
||||
if (obj.IsSmi()) {
|
||||
PrintF("smi %d", Smi::ToInt(obj));
|
||||
} else {
|
||||
obj->ShortPrint();
|
||||
}
|
||||
|
@ -685,10 +685,10 @@ Address RelocInfo::constant_pool_entry_address() {
|
||||
return Assembler::target_pointer_address_at(pc_);
|
||||
}
|
||||
|
||||
HeapObject* RelocInfo::target_object() {
|
||||
HeapObject RelocInfo::target_object() {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
return HeapObject::cast(reinterpret_cast<Object*>(
|
||||
Assembler::target_address_at(pc_, constant_pool_)));
|
||||
return HeapObject::cast(
|
||||
ObjectPtr(Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
@ -701,19 +701,17 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
}
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(target),
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
|
||||
icache_flush_mode);
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
|
||||
WriteBarrierForCode(host(), this, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_external_reference() {
|
||||
DCHECK(rmode_ == EXTERNAL_REFERENCE);
|
||||
return Assembler::target_address_at(pc_, constant_pool_);
|
||||
|
@ -3291,15 +3291,12 @@ void Simulator::Debug() {
|
||||
while (cur < end) {
|
||||
PrintF(" 0x%016" PRIx64 ": 0x%016" PRIx64 " %10" PRId64,
|
||||
reinterpret_cast<uint64_t>(cur), *cur, *cur);
|
||||
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
|
||||
int64_t value = *cur;
|
||||
ObjectPtr obj(*cur);
|
||||
Heap* current_heap = isolate_->heap();
|
||||
if (((value & 1) == 0) || current_heap->Contains(obj)) {
|
||||
if (obj.IsSmi() || current_heap->Contains(HeapObject::cast(obj))) {
|
||||
PrintF(" (");
|
||||
if ((value & kSmiTagMask) == 0) {
|
||||
DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
|
||||
int32_t untagged = (value >> kSmiShift) & 0xFFFFFFFF;
|
||||
PrintF("smi %" PRId32, untagged);
|
||||
if (obj.IsSmi()) {
|
||||
PrintF("smi %" PRId32, Smi::ToInt(obj));
|
||||
} else {
|
||||
obj->ShortPrint();
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
|
||||
|
||||
// Overwrite a host NaN with a quiet target NaN. Used by mksnapshot for
|
||||
// cross-snapshotting.
|
||||
static void QuietNaN(HeapObject* nan) { }
|
||||
static void QuietNaN(HeapObject nan) {}
|
||||
|
||||
int pc_offset() const { return static_cast<int>(pc_ - buffer_); }
|
||||
|
||||
|
@ -4073,7 +4073,7 @@ void Genesis::ConfigureUtilsObject() {
|
||||
}
|
||||
|
||||
// The utils object can be removed for cases that reach this point.
|
||||
HeapObject* undefined = ReadOnlyRoots(heap()).undefined_value();
|
||||
HeapObject undefined = ReadOnlyRoots(heap()).undefined_value();
|
||||
native_context()->set_natives_utils_object(undefined);
|
||||
native_context()->set_extras_utils_object(undefined);
|
||||
}
|
||||
|
@ -255,7 +255,7 @@ V8_WARN_UNUSED_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
|
||||
JSObject obj = JSObject::cast(*receiver);
|
||||
|
||||
// Set the new target.
|
||||
HeapObject* new_target;
|
||||
HeapObject new_target;
|
||||
if (is_construct_call) {
|
||||
// TODO(adamk): This should be passed through in args instead of
|
||||
// being patched in here. We need to set a non-undefined value
|
||||
|
@ -237,7 +237,8 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
|
||||
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
|
||||
RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET);
|
||||
HeapIterator iterator(isolate->heap());
|
||||
while (HeapObject* obj = iterator.next()) {
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (!obj->IsCode()) continue;
|
||||
Code code = Code::cast(obj);
|
||||
bool flush_icache = false;
|
||||
|
@ -362,7 +362,7 @@ void InstallUnoptimizedCode(UnoptimizedCompilationInfo* compilation_info,
|
||||
// Clear the feedback metadata field. In lite mode we don't need feedback
|
||||
// metadata since we never allocate feedback vectors.
|
||||
shared_info->set_raw_outer_scope_info_or_feedback_metadata(
|
||||
HeapObjectPtr::cast(ReadOnlyRoots(isolate).undefined_value()));
|
||||
ReadOnlyRoots(isolate).undefined_value());
|
||||
} else {
|
||||
Handle<FeedbackMetadata> feedback_metadata = FeedbackMetadata::New(
|
||||
isolate, compilation_info->feedback_vector_spec());
|
||||
|
@ -169,20 +169,16 @@ struct MachineTypeOf<Smi> {
|
||||
static constexpr MachineType value = MachineType::TaggedSigned();
|
||||
};
|
||||
template <class HeapObjectSubtype>
|
||||
struct MachineTypeOf<
|
||||
HeapObjectSubtype,
|
||||
typename std::enable_if<
|
||||
std::is_base_of<HeapObject, HeapObjectSubtype>::value ||
|
||||
std::is_base_of<HeapObjectPtr, HeapObjectSubtype>::value>::type> {
|
||||
struct MachineTypeOf<HeapObjectSubtype,
|
||||
typename std::enable_if<std::is_base_of<
|
||||
HeapObject, HeapObjectSubtype>::value>::type> {
|
||||
static constexpr MachineType value = MachineType::TaggedPointer();
|
||||
};
|
||||
|
||||
template <class HeapObjectSubtype>
|
||||
constexpr MachineType MachineTypeOf<
|
||||
HeapObjectSubtype,
|
||||
typename std::enable_if<
|
||||
std::is_base_of<HeapObject, HeapObjectSubtype>::value ||
|
||||
std::is_base_of<HeapObjectPtr, HeapObjectSubtype>::value>::type>::value;
|
||||
HeapObjectSubtype, typename std::enable_if<std::is_base_of<
|
||||
HeapObject, HeapObjectSubtype>::value>::type>::value;
|
||||
|
||||
template <class Type, class Enable = void>
|
||||
struct MachineRepresentationOf {
|
||||
@ -363,15 +359,15 @@ typedef ZoneVector<CodeAssemblerVariable*> CodeAssemblerVariableList;
|
||||
|
||||
typedef std::function<void()> CodeAssemblerCallback;
|
||||
|
||||
// TODO(3770): The HeapObject/HeapObjectPtr dance is temporary (while the
|
||||
// TODO(3770): The Object/HeapObject dance is temporary (while the
|
||||
// incremental transition is in progress, we want to pretend that subclasses
|
||||
// of HeapObjectPtr are also subclasses of Object/HeapObject); it can be
|
||||
// of HeapObject are also subclasses of Object); it can be
|
||||
// removed when the migration is complete.
|
||||
template <class T, class U>
|
||||
struct is_subtype {
|
||||
static const bool value = std::is_base_of<U, T>::value ||
|
||||
(std::is_base_of<U, HeapObject>::value &&
|
||||
std::is_base_of<HeapObjectPtr, T>::value);
|
||||
static const bool value =
|
||||
std::is_base_of<U, T>::value ||
|
||||
(std::is_same<U, Object>::value && std::is_base_of<HeapObject, T>::value);
|
||||
};
|
||||
// TODO(3770): Temporary; remove after migration.
|
||||
template <>
|
||||
|
@ -3207,7 +3207,7 @@ bool ShouldUseCallICFeedback(Node* node) {
|
||||
|
||||
base::Optional<HeapObjectRef> GetHeapObjectFeedback(
|
||||
JSHeapBroker* broker, const FeedbackNexus& nexus) {
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
if (!nexus.GetFeedback()->GetHeapObject(&object)) return base::nullopt;
|
||||
return HeapObjectRef(broker, handle(object, broker->isolate()));
|
||||
}
|
||||
|
@ -898,9 +898,7 @@ class FixedArrayBaseData : public HeapObjectData {
|
||||
public:
|
||||
FixedArrayBaseData(JSHeapBroker* broker, ObjectData** storage,
|
||||
Handle<FixedArrayBase> object)
|
||||
// TODO(3770): Drop explicit cast after migrating HeapObject*.
|
||||
: HeapObjectData(broker, storage, Handle<HeapObject>(object.location())),
|
||||
length_(object->length()) {}
|
||||
: HeapObjectData(broker, storage, object), length_(object->length()) {}
|
||||
|
||||
int length() const { return length_; }
|
||||
|
||||
|
@ -40,7 +40,7 @@ Handle<Context> ScriptContextTable::GetContext(Isolate* isolate,
|
||||
FixedArray::get(*table, i + kFirstContextSlotIndex, isolate));
|
||||
}
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(Context, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(Context, HeapObject)
|
||||
NEVER_READ_ONLY_SPACE_IMPL(Context)
|
||||
CAST_ACCESSOR2(Context)
|
||||
SMI_ACCESSORS(Context, length, kLengthOffset)
|
||||
@ -83,12 +83,10 @@ void Context::set_previous(Context context) { set(PREVIOUS_INDEX, context); }
|
||||
Object* Context::next_context_link() { return get(Context::NEXT_CONTEXT_LINK); }
|
||||
|
||||
bool Context::has_extension() { return !extension()->IsTheHole(); }
|
||||
HeapObject* Context::extension() {
|
||||
HeapObject Context::extension() {
|
||||
return HeapObject::cast(get(EXTENSION_INDEX));
|
||||
}
|
||||
void Context::set_extension(HeapObject* object) {
|
||||
set(EXTENSION_INDEX, object);
|
||||
}
|
||||
void Context::set_extension(HeapObject object) { set(EXTENSION_INDEX, object); }
|
||||
|
||||
NativeContext Context::native_context() const {
|
||||
Object* result = get(NATIVE_CONTEXT_INDEX);
|
||||
|
@ -91,7 +91,7 @@ Context Context::closure_context() {
|
||||
JSObject Context::extension_object() {
|
||||
DCHECK(IsNativeContext() || IsFunctionContext() || IsBlockContext() ||
|
||||
IsEvalContext() || IsCatchContext());
|
||||
HeapObject* object = extension();
|
||||
HeapObject object = extension();
|
||||
if (object->IsTheHole()) return JSObject();
|
||||
DCHECK(object->IsJSContextExtensionObject() ||
|
||||
(IsNativeContext() && object->IsJSGlobalObject()));
|
||||
|
@ -442,7 +442,7 @@ class ScriptContextTable : public FixedArray {
|
||||
// Script contexts from all top-level scripts are gathered in
|
||||
// ScriptContextTable.
|
||||
|
||||
class Context : public HeapObjectPtr {
|
||||
class Context : public HeapObject {
|
||||
public:
|
||||
NEVER_READ_ONLY_SPACE
|
||||
|
||||
@ -553,8 +553,8 @@ class Context : public HeapObjectPtr {
|
||||
inline Object* next_context_link();
|
||||
|
||||
inline bool has_extension();
|
||||
inline HeapObject* extension();
|
||||
inline void set_extension(HeapObject* object);
|
||||
inline HeapObject extension();
|
||||
inline void set_extension(HeapObject object);
|
||||
JSObject extension_object();
|
||||
JSReceiver extension_receiver();
|
||||
ScopeInfo scope_info();
|
||||
@ -675,7 +675,7 @@ class Context : public HeapObjectPtr {
|
||||
static bool IsBootstrappingOrValidParentContext(Object* object, Context kid);
|
||||
#endif
|
||||
|
||||
OBJECT_CONSTRUCTORS(Context, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS(Context, HeapObject)
|
||||
};
|
||||
|
||||
class NativeContext : public Context {
|
||||
|
@ -58,7 +58,7 @@ void RuntimeCallTimer::CommitTimeToCounter() {
|
||||
bool RuntimeCallTimer::IsStarted() { return start_ticks_ != base::TimeTicks(); }
|
||||
|
||||
RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
|
||||
HeapObject* heap_object,
|
||||
HeapObject heap_object,
|
||||
RuntimeCallCounterId counter_id)
|
||||
: RuntimeCallTimerScope(isolate, counter_id) {}
|
||||
|
||||
|
@ -1151,7 +1151,7 @@ class RuntimeCallTimerScope {
|
||||
RuntimeCallCounterId counter_id);
|
||||
// This constructor is here just to avoid calling GetIsolate() when the
|
||||
// stats are disabled and the isolate is not directly available.
|
||||
inline RuntimeCallTimerScope(Isolate* isolate, HeapObject* heap_object,
|
||||
inline RuntimeCallTimerScope(Isolate* isolate, HeapObject heap_object,
|
||||
RuntimeCallCounterId counter_id);
|
||||
inline RuntimeCallTimerScope(RuntimeCallStats* stats,
|
||||
RuntimeCallCounterId counter_id) {
|
||||
|
@ -514,7 +514,8 @@ std::unique_ptr<Coverage> Coverage::Collect(
|
||||
->IsArrayList());
|
||||
DCHECK_EQ(v8::debug::Coverage::kBestEffort, collectionMode);
|
||||
HeapIterator heap_iterator(isolate->heap());
|
||||
while (HeapObject* current_obj = heap_iterator.next()) {
|
||||
for (HeapObject current_obj = heap_iterator.next();
|
||||
!current_obj.is_null(); current_obj = heap_iterator.next()) {
|
||||
if (!current_obj->IsFeedbackVector()) continue;
|
||||
FeedbackVector vector = FeedbackVector::cast(current_obj);
|
||||
SharedFunctionInfo shared = vector->shared_function_info();
|
||||
@ -631,7 +632,8 @@ void Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) {
|
||||
isolate->MaybeInitializeVectorListFromHeap();
|
||||
|
||||
HeapIterator heap_iterator(isolate->heap());
|
||||
while (HeapObject* o = heap_iterator.next()) {
|
||||
for (HeapObject o = heap_iterator.next(); !o.is_null();
|
||||
o = heap_iterator.next()) {
|
||||
if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
|
||||
// If collecting binary coverage, reset
|
||||
// SFI::has_reported_binary_coverage to avoid optimizing / inlining
|
||||
|
@ -1251,7 +1251,8 @@ void Debug::InstallDebugBreakTrampoline() {
|
||||
std::vector<Handle<JSFunction>> needs_compile;
|
||||
{
|
||||
HeapIterator iterator(isolate_->heap());
|
||||
while (HeapObject* obj = iterator.next()) {
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (needs_to_clear_ic && obj->IsFeedbackVector()) {
|
||||
FeedbackVector::cast(obj)->ClearSlots(isolate_);
|
||||
continue;
|
||||
|
@ -825,7 +825,8 @@ class FunctionDataMap : public ThreadVisitor {
|
||||
void Fill(Isolate* isolate, Address* restart_frame_fp) {
|
||||
{
|
||||
HeapIterator iterator(isolate->heap(), HeapIterator::kFilterUnreachable);
|
||||
while (HeapObject* obj = iterator.next()) {
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsSharedFunctionInfo()) {
|
||||
SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
|
||||
FunctionData* data = nullptr;
|
||||
|
@ -2246,7 +2246,7 @@ Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
|
||||
for (int i = 0; i < array->length(); i++) {
|
||||
new_array->set(i, array->get(i));
|
||||
}
|
||||
HeapObject* undefined_value = ReadOnlyRoots(isolate()).undefined_value();
|
||||
HeapObject undefined_value = ReadOnlyRoots(isolate()).undefined_value();
|
||||
for (int i = array->length(); i < length; i++) {
|
||||
new_array->set(i, undefined_value);
|
||||
}
|
||||
|
@ -124,10 +124,10 @@ void StatisticsExtension::GetCounters(
|
||||
args.GetReturnValue().Set(result);
|
||||
|
||||
HeapIterator iterator(reinterpret_cast<Isolate*>(args.GetIsolate())->heap());
|
||||
HeapObject* obj;
|
||||
int reloc_info_total = 0;
|
||||
int source_position_table_total = 0;
|
||||
while ((obj = iterator.next()) != nullptr) {
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsCode()) {
|
||||
Code code = Code::cast(obj);
|
||||
reloc_info_total += code->relocation_info()->Size();
|
||||
|
@ -20,8 +20,8 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(FeedbackVector, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(FeedbackMetadata, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(FeedbackVector, HeapObject)
|
||||
OBJECT_CONSTRUCTORS_IMPL(FeedbackMetadata, HeapObject)
|
||||
|
||||
NEVER_READ_ONLY_SPACE_IMPL(FeedbackVector)
|
||||
|
||||
@ -115,7 +115,7 @@ void FeedbackVector::increment_deopt_count() {
|
||||
Code FeedbackVector::optimized_code() const {
|
||||
MaybeObject slot = optimized_code_weak_or_smi();
|
||||
DCHECK(slot->IsSmi() || slot->IsWeakOrCleared());
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
return slot->GetHeapObject(&heap_object) ? Code::cast(heap_object) : Code();
|
||||
}
|
||||
|
||||
@ -276,7 +276,7 @@ void FeedbackVector::ComputeCounts(int* with_type_info, int* generic,
|
||||
case FeedbackSlotKind::kStoreInArrayLiteral:
|
||||
case FeedbackSlotKind::kStoreDataPropertyInLiteral:
|
||||
case FeedbackSlotKind::kTypeProfile: {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (obj->IsWeakOrCleared() ||
|
||||
(obj->GetHeapObjectIfStrong(&heap_object) &&
|
||||
(heap_object->IsWeakFixedArray() || heap_object->IsString()))) {
|
||||
|
@ -41,7 +41,7 @@ bool FeedbackVectorSpec::HasTypeProfileSlot() const {
|
||||
}
|
||||
|
||||
static bool IsPropertyNameFeedback(MaybeObject feedback) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (!feedback->GetHeapObjectIfStrong(&heap_object)) return false;
|
||||
if (heap_object->IsString()) return true;
|
||||
if (!heap_object->IsSymbol()) return false;
|
||||
@ -372,7 +372,7 @@ bool FeedbackVector::ClearSlots(Isolate* isolate) {
|
||||
|
||||
void FeedbackVector::AssertNoLegacyTypes(MaybeObject object) {
|
||||
#ifdef DEBUG
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object->GetHeapObject(&heap_object)) {
|
||||
// Instead of FixedArray, the Feedback and the Extra should contain
|
||||
// WeakFixedArrays. The only allowed FixedArray subtype is HashTable.
|
||||
@ -383,7 +383,7 @@ void FeedbackVector::AssertNoLegacyTypes(MaybeObject object) {
|
||||
|
||||
Handle<WeakFixedArray> FeedbackNexus::EnsureArrayOfSize(int length) {
|
||||
Isolate* isolate = GetIsolate();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (GetFeedback()->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsWeakFixedArray() &&
|
||||
WeakFixedArray::cast(heap_object)->length() == length) {
|
||||
@ -396,7 +396,7 @@ Handle<WeakFixedArray> FeedbackNexus::EnsureArrayOfSize(int length) {
|
||||
|
||||
Handle<WeakFixedArray> FeedbackNexus::EnsureExtraArrayOfSize(int length) {
|
||||
Isolate* isolate = GetIsolate();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (GetFeedbackExtra()->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsWeakFixedArray() &&
|
||||
WeakFixedArray::cast(heap_object)->length() == length) {
|
||||
@ -600,7 +600,7 @@ InlineCacheState FeedbackNexus::StateFromFeedback() const {
|
||||
// Don't check if the map is cleared.
|
||||
return MONOMORPHIC;
|
||||
}
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (feedback->GetHeapObjectIfStrong(&heap_object)) {
|
||||
if (heap_object->IsWeakFixedArray()) {
|
||||
// Determine state purely by our structure, don't check if the maps
|
||||
@ -617,7 +617,7 @@ InlineCacheState FeedbackNexus::StateFromFeedback() const {
|
||||
UNREACHABLE();
|
||||
}
|
||||
case FeedbackSlotKind::kCall: {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (feedback == MaybeObject::FromObject(
|
||||
*FeedbackVector::MegamorphicSentinel(isolate))) {
|
||||
return GENERIC;
|
||||
@ -762,7 +762,7 @@ void FeedbackNexus::ConfigureCloneObject(Handle<Map> source_map,
|
||||
MaybeObject maybe_feedback = GetFeedback();
|
||||
Handle<HeapObject> feedback(maybe_feedback->IsStrongOrWeak()
|
||||
? maybe_feedback->GetHeapObject()
|
||||
: nullptr,
|
||||
: HeapObject(),
|
||||
isolate);
|
||||
switch (ic_state()) {
|
||||
case UNINITIALIZED:
|
||||
@ -924,7 +924,7 @@ int FeedbackNexus::ExtractMaps(MapHandles* maps) const {
|
||||
Isolate* isolate = GetIsolate();
|
||||
MaybeObject feedback = GetFeedback();
|
||||
bool is_named_feedback = IsPropertyNameFeedback(feedback);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsWeakFixedArray()) ||
|
||||
is_named_feedback) {
|
||||
@ -937,7 +937,7 @@ int FeedbackNexus::ExtractMaps(MapHandles* maps) const {
|
||||
array = WeakFixedArray::cast(heap_object);
|
||||
}
|
||||
const int increment = 2;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
for (int i = 0; i < array->length(); i += increment) {
|
||||
DCHECK(array->Get(i)->IsWeakOrCleared());
|
||||
if (array->Get(i)->GetHeapObjectIfWeak(&heap_object)) {
|
||||
@ -972,7 +972,7 @@ MaybeObjectHandle FeedbackNexus::FindHandlerForMap(Handle<Map> map) const {
|
||||
MaybeObject feedback = GetFeedback();
|
||||
Isolate* isolate = GetIsolate();
|
||||
bool is_named_feedback = IsPropertyNameFeedback(feedback);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsWeakFixedArray()) ||
|
||||
is_named_feedback) {
|
||||
@ -984,7 +984,7 @@ MaybeObjectHandle FeedbackNexus::FindHandlerForMap(Handle<Map> map) const {
|
||||
array = WeakFixedArray::cast(heap_object);
|
||||
}
|
||||
const int increment = 2;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
for (int i = 0; i < array->length(); i += increment) {
|
||||
DCHECK(array->Get(i)->IsWeakOrCleared());
|
||||
if (array->Get(i)->GetHeapObjectIfWeak(&heap_object)) {
|
||||
@ -1019,7 +1019,7 @@ bool FeedbackNexus::FindHandlers(MaybeObjectHandles* code_list,
|
||||
Isolate* isolate = GetIsolate();
|
||||
int count = 0;
|
||||
bool is_named_feedback = IsPropertyNameFeedback(feedback);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if ((feedback->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsWeakFixedArray()) ||
|
||||
is_named_feedback) {
|
||||
@ -1031,7 +1031,7 @@ bool FeedbackNexus::FindHandlers(MaybeObjectHandles* code_list,
|
||||
array = WeakFixedArray::cast(heap_object);
|
||||
}
|
||||
const int increment = 2;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
for (int i = 0; i < array->length(); i += increment) {
|
||||
// Be sure to skip handlers whose maps have been cleared.
|
||||
DCHECK(array->Get(i)->IsWeakOrCleared());
|
||||
@ -1227,7 +1227,7 @@ MaybeHandle<JSObject> FeedbackNexus::GetConstructorFeedback() const {
|
||||
DCHECK_EQ(kind(), FeedbackSlotKind::kInstanceOf);
|
||||
Isolate* isolate = GetIsolate();
|
||||
MaybeObject feedback = GetFeedback();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (feedback->GetHeapObjectIfWeak(&heap_object)) {
|
||||
return handle(JSObject::cast(heap_object), isolate);
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ class FeedbackMetadata;
|
||||
// - optimized code cell (weak cell or Smi marker)
|
||||
// followed by an array of feedback slots, of length determined by the feedback
|
||||
// metadata.
|
||||
class FeedbackVector : public HeapObjectPtr {
|
||||
class FeedbackVector : public HeapObject {
|
||||
public:
|
||||
NEVER_READ_ONLY_SPACE
|
||||
|
||||
@ -306,7 +306,7 @@ class FeedbackVector : public HeapObjectPtr {
|
||||
static void AddToVectorsForProfilingTools(Isolate* isolate,
|
||||
Handle<FeedbackVector> vector);
|
||||
|
||||
OBJECT_CONSTRUCTORS(FeedbackVector, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(FeedbackVector, HeapObject);
|
||||
};
|
||||
|
||||
class V8_EXPORT_PRIVATE FeedbackVectorSpec {
|
||||
@ -451,7 +451,7 @@ class SharedFeedbackSlot {
|
||||
// of int32 data. The length is never stored - it is always calculated from
|
||||
// slot_count. All instances are created through the static New function, and
|
||||
// the number of slots is static once an instance is created.
|
||||
class FeedbackMetadata : public HeapObjectPtr {
|
||||
class FeedbackMetadata : public HeapObject {
|
||||
public:
|
||||
DECL_CAST2(FeedbackMetadata)
|
||||
|
||||
@ -517,7 +517,7 @@ class FeedbackMetadata : public HeapObjectPtr {
|
||||
kInt32Size * kBitsPerByte, uint32_t>
|
||||
VectorICComputer;
|
||||
|
||||
OBJECT_CONSTRUCTORS(FeedbackMetadata, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(FeedbackMetadata, HeapObject);
|
||||
};
|
||||
|
||||
// Verify that an empty hash field looks like a tagged object, but can't
|
||||
|
@ -632,7 +632,7 @@ struct SlotTraits<SlotLocation::kOnHeap> {
|
||||
using ObjectSlot = SlotTraits<SlotLocation::kOnHeap>::TObjectSlot;
|
||||
|
||||
// An MapWordSlot instance describes a kTaggedSize-sized on-heap field ("slot")
|
||||
// holding HeapObjectPtr (strong heap object) value or a forwarding pointer.
|
||||
// holding HeapObject (strong heap object) value or a forwarding pointer.
|
||||
using MapWordSlot = SlotTraits<SlotLocation::kOnHeap>::TMapWordSlot;
|
||||
|
||||
// A MaybeObjectSlot instance describes a kTaggedSize-sized on-heap field
|
||||
|
@ -27,7 +27,7 @@ bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
|
||||
DCHECK_NOT_NULL(location_);
|
||||
Object* object = reinterpret_cast<Object*>(*location_);
|
||||
if (object->IsSmi()) return true;
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
Isolate* isolate;
|
||||
if (!Isolate::FromWritableHeapObject(heap_object, &isolate)) return true;
|
||||
RootIndex root_index;
|
||||
|
@ -76,7 +76,7 @@ class LocalArrayBufferTracker {
|
||||
// Frees up array buffers.
|
||||
//
|
||||
// Sample usage:
|
||||
// Free([](HeapObject* array_buffer) {
|
||||
// Free([](HeapObject array_buffer) {
|
||||
// if (should_free_internal(array_buffer)) return true;
|
||||
// return false;
|
||||
// });
|
||||
|
@ -12,7 +12,7 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
// Record code statisitcs.
|
||||
void CodeStatistics::RecordCodeAndMetadataStatistics(HeapObject* object,
|
||||
void CodeStatistics::RecordCodeAndMetadataStatistics(HeapObject object,
|
||||
Isolate* isolate) {
|
||||
if (object->IsScript()) {
|
||||
Script script = Script::cast(object);
|
||||
@ -61,7 +61,7 @@ void CodeStatistics::ResetCodeAndMetadataStatistics(Isolate* isolate) {
|
||||
void CodeStatistics::CollectCodeStatistics(PagedSpace* space,
|
||||
Isolate* isolate) {
|
||||
HeapObjectIterator obj_it(space);
|
||||
for (HeapObject* obj = obj_it.Next(); obj != nullptr; obj = obj_it.Next()) {
|
||||
for (HeapObject obj = obj_it.Next(); !obj.is_null(); obj = obj_it.Next()) {
|
||||
RecordCodeAndMetadataStatistics(obj, isolate);
|
||||
}
|
||||
}
|
||||
@ -73,7 +73,7 @@ void CodeStatistics::CollectCodeStatistics(PagedSpace* space,
|
||||
void CodeStatistics::CollectCodeStatistics(LargeObjectSpace* space,
|
||||
Isolate* isolate) {
|
||||
LargeObjectIterator obj_it(space);
|
||||
for (HeapObject* obj = obj_it.Next(); obj != nullptr; obj = obj_it.Next()) {
|
||||
for (HeapObject obj = obj_it.Next(); !obj.is_null(); obj = obj_it.Next()) {
|
||||
RecordCodeAndMetadataStatistics(obj, isolate);
|
||||
}
|
||||
}
|
||||
@ -192,7 +192,7 @@ void CodeStatistics::CollectCommentStatistics(Isolate* isolate,
|
||||
}
|
||||
|
||||
// Collects code comment statistics
|
||||
void CodeStatistics::CollectCodeCommentStatistics(HeapObject* obj,
|
||||
void CodeStatistics::CollectCodeCommentStatistics(HeapObject obj,
|
||||
Isolate* isolate) {
|
||||
// Bytecode objects do not contain RelocInfo. Only process code objects
|
||||
// for code comment statistics.
|
||||
|
@ -31,13 +31,13 @@ class CodeStatistics {
|
||||
#endif
|
||||
|
||||
private:
|
||||
static void RecordCodeAndMetadataStatistics(HeapObject* object,
|
||||
static void RecordCodeAndMetadataStatistics(HeapObject object,
|
||||
Isolate* isolate);
|
||||
|
||||
#ifdef DEBUG
|
||||
static void CollectCommentStatistics(Isolate* isolate,
|
||||
CodeCommentsIterator* it);
|
||||
static void CollectCodeCommentStatistics(HeapObject* obj, Isolate* isolate);
|
||||
static void CollectCodeCommentStatistics(HeapObject obj, Isolate* isolate);
|
||||
static void EnterComment(Isolate* isolate, const char* comment, int delta);
|
||||
static void ResetCodeStatistics(Isolate* isolate);
|
||||
#endif
|
||||
|
@ -92,34 +92,27 @@ class ConcurrentMarkingVisitor final
|
||||
embedder_tracing_enabled_(embedder_tracing_enabled),
|
||||
mark_compact_epoch_(mark_compact_epoch) {}
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T>::value>::type>
|
||||
static V8_INLINE T* Cast(HeapObject* object) {
|
||||
template <typename T>
|
||||
static V8_INLINE T Cast(HeapObject object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T>::value>::type>
|
||||
static V8_INLINE T Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
bool ShouldVisit(HeapObject* object) {
|
||||
bool ShouldVisit(HeapObject object) {
|
||||
return marking_state_.GreyToBlack(object);
|
||||
}
|
||||
|
||||
bool AllowDefaultJSObjectVisit() { return false; }
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
void ProcessStrongHeapObject(HeapObject* host, THeapObjectSlot slot,
|
||||
HeapObject* heap_object) {
|
||||
void ProcessStrongHeapObject(HeapObject host, THeapObjectSlot slot,
|
||||
HeapObject heap_object) {
|
||||
MarkObject(heap_object);
|
||||
MarkCompactCollector::RecordSlot(host, slot, heap_object);
|
||||
}
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
void ProcessWeakHeapObject(HeapObject* host, THeapObjectSlot slot,
|
||||
HeapObject* heap_object) {
|
||||
void ProcessWeakHeapObject(HeapObject host, THeapObjectSlot slot,
|
||||
HeapObject heap_object) {
|
||||
#ifdef THREAD_SANITIZER
|
||||
// Perform a dummy acquire load to tell TSAN that there is no data race
|
||||
// in mark-bit initialization. See MemoryChunk::Initialize for the
|
||||
@ -140,22 +133,22 @@ class ConcurrentMarkingVisitor final
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end) {
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.Relaxed_Load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObjectIfStrong(&heap_object)) {
|
||||
// If the reference changes concurrently from strong to weak, the write
|
||||
// barrier will treat the weak reference as strong, so we won't miss the
|
||||
@ -170,12 +163,12 @@ class ConcurrentMarkingVisitor final
|
||||
|
||||
// Weak list pointers should be ignored during marking. The lists are
|
||||
// reconstructed after GC.
|
||||
void VisitCustomWeakPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {}
|
||||
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* object = HeapObject::cast(rinfo->target_object());
|
||||
HeapObject object = rinfo->target_object();
|
||||
RecordRelocSlot(host, rinfo, object);
|
||||
if (!marking_state_.IsBlackOrGrey(object)) {
|
||||
if (host->IsWeakObject(object)) {
|
||||
@ -194,13 +187,13 @@ class ConcurrentMarkingVisitor final
|
||||
MarkObject(target);
|
||||
}
|
||||
|
||||
void VisitPointersInSnapshot(HeapObject* host, const SlotSnapshot& snapshot) {
|
||||
void VisitPointersInSnapshot(HeapObject host, const SlotSnapshot& snapshot) {
|
||||
for (int i = 0; i < snapshot.number_of_slots(); i++) {
|
||||
ObjectSlot slot = snapshot.slot(i);
|
||||
Object* object = snapshot.value(i);
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
if (!object->IsHeapObject()) continue;
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
MarkObject(heap_object);
|
||||
MarkCompactCollector::RecordSlot(host, slot, heap_object);
|
||||
}
|
||||
@ -228,7 +221,7 @@ class ConcurrentMarkingVisitor final
|
||||
return 0;
|
||||
}
|
||||
if (weak_ref->target()->IsHeapObject()) {
|
||||
HeapObject* target = HeapObject::cast(weak_ref->target());
|
||||
HeapObject target = HeapObject::cast(weak_ref->target());
|
||||
if (marking_state_.IsBlackOrGrey(target)) {
|
||||
// Record the slot inside the JSWeakRef, since the
|
||||
// VisitJSObjectSubclass above didn't visit it.
|
||||
@ -251,7 +244,7 @@ class ConcurrentMarkingVisitor final
|
||||
}
|
||||
|
||||
if (weak_cell->target()->IsHeapObject()) {
|
||||
HeapObject* target = HeapObject::cast(weak_cell->target());
|
||||
HeapObject target = HeapObject::cast(weak_cell->target());
|
||||
if (marking_state_.IsBlackOrGrey(target)) {
|
||||
// Record the slot inside the JSWeakCell, since the
|
||||
// VisitJSObjectSubclass above didn't visit it.
|
||||
@ -424,7 +417,7 @@ class ConcurrentMarkingVisitor final
|
||||
for (int i = 0; i < table->Capacity(); i++) {
|
||||
ObjectSlot key_slot =
|
||||
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
|
||||
HeapObject* key = HeapObject::cast(table->KeyAt(i));
|
||||
HeapObject key = HeapObject::cast(table->KeyAt(i));
|
||||
MarkCompactCollector::RecordSlot(table, key_slot, key);
|
||||
|
||||
ObjectSlot value_slot =
|
||||
@ -437,7 +430,7 @@ class ConcurrentMarkingVisitor final
|
||||
Object* value_obj = table->ValueAt(i);
|
||||
|
||||
if (value_obj->IsHeapObject()) {
|
||||
HeapObject* value = HeapObject::cast(value_obj);
|
||||
HeapObject value = HeapObject::cast(value_obj);
|
||||
MarkCompactCollector::RecordSlot(table, value_slot, value);
|
||||
|
||||
// Revisit ephemerons with both key and value unreachable at end
|
||||
@ -455,7 +448,7 @@ class ConcurrentMarkingVisitor final
|
||||
|
||||
// Implements ephemeron semantics: Marks value if key is already reachable.
|
||||
// Returns true if value was actually marked.
|
||||
bool VisitEphemeron(HeapObject* key, HeapObject* value) {
|
||||
bool VisitEphemeron(HeapObject key, HeapObject value) {
|
||||
if (marking_state_.IsBlackOrGrey(key)) {
|
||||
if (marking_state_.WhiteToGrey(value)) {
|
||||
shared_.Push(value);
|
||||
@ -469,7 +462,7 @@ class ConcurrentMarkingVisitor final
|
||||
return false;
|
||||
}
|
||||
|
||||
void MarkObject(HeapObject* object) {
|
||||
void MarkObject(HeapObject object) {
|
||||
#ifdef THREAD_SANITIZER
|
||||
// Perform a dummy acquire load to tell TSAN that there is no data race
|
||||
// in mark-bit initialization. See MemoryChunk::Initialize for the
|
||||
@ -491,7 +484,7 @@ class ConcurrentMarkingVisitor final
|
||||
slot_snapshot_->clear();
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
Object* object = p.Relaxed_Load();
|
||||
@ -499,7 +492,7 @@ class ConcurrentMarkingVisitor final
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
// This should never happen, because we don't use snapshotting for objects
|
||||
// which contain weak references.
|
||||
@ -518,7 +511,7 @@ class ConcurrentMarkingVisitor final
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void VisitCustomWeakPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
DCHECK(host->IsJSWeakCell() || host->IsJSWeakRef());
|
||||
}
|
||||
@ -596,7 +589,7 @@ class ConcurrentMarkingVisitor final
|
||||
return slot_snapshot_;
|
||||
}
|
||||
|
||||
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject* target) {
|
||||
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target) {
|
||||
MarkCompactCollector::RecordRelocSlotInfo info =
|
||||
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
|
||||
if (info.should_record) {
|
||||
@ -623,33 +616,33 @@ class ConcurrentMarkingVisitor final
|
||||
// Strings can change maps due to conversion to thin string or external strings.
|
||||
// Use unchecked cast to avoid data race in slow dchecks.
|
||||
template <>
|
||||
ConsString ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
ConsString ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return ConsString::unchecked_cast(object);
|
||||
}
|
||||
|
||||
template <>
|
||||
SlicedString ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
SlicedString ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return SlicedString::unchecked_cast(object);
|
||||
}
|
||||
|
||||
template <>
|
||||
ThinString ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
ThinString ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return ThinString::unchecked_cast(object);
|
||||
}
|
||||
|
||||
template <>
|
||||
SeqOneByteString ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
SeqOneByteString ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return SeqOneByteString::unchecked_cast(object);
|
||||
}
|
||||
|
||||
template <>
|
||||
SeqTwoByteString ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
SeqTwoByteString ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return SeqTwoByteString::unchecked_cast(object);
|
||||
}
|
||||
|
||||
// Fixed array can become a free space during left trimming.
|
||||
template <>
|
||||
FixedArray ConcurrentMarkingVisitor::Cast(HeapObject* object) {
|
||||
FixedArray ConcurrentMarkingVisitor::Cast(HeapObject object) {
|
||||
return FixedArray::unchecked_cast(object);
|
||||
}
|
||||
|
||||
@ -729,7 +722,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
|
||||
int objects_processed = 0;
|
||||
while (current_marked_bytes < kBytesUntilInterruptCheck &&
|
||||
objects_processed < kObjectsUntilInterrupCheck) {
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
if (!shared_->Pop(task_id, &object)) {
|
||||
done = true;
|
||||
break;
|
||||
|
@ -63,8 +63,8 @@ class ConcurrentMarking {
|
||||
// Worklist::kMaxNumTasks being maxed at 8 (concurrent marking doesn't use
|
||||
// task 0, reserved for the main thread).
|
||||
static constexpr int kMaxTasks = 7;
|
||||
using MarkingWorklist = Worklist<HeapObject*, 64 /* segment size */>;
|
||||
using EmbedderTracingWorklist = Worklist<HeapObject*, 16 /* segment size */>;
|
||||
using MarkingWorklist = Worklist<HeapObject, 64 /* segment size */>;
|
||||
using EmbedderTracingWorklist = Worklist<HeapObject, 16 /* segment size */>;
|
||||
|
||||
ConcurrentMarking(Heap* heap, MarkingWorklist* shared,
|
||||
MarkingWorklist* bailout, MarkingWorklist* on_hold,
|
||||
|
@ -122,31 +122,30 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
|
||||
|
||||
} // namespace
|
||||
|
||||
HeapObject* Factory::AllocateRawWithImmortalMap(int size,
|
||||
PretenureFlag pretenure,
|
||||
Map map,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject* result = isolate()->heap()->AllocateRawWithRetryOrFail(
|
||||
HeapObject Factory::AllocateRawWithImmortalMap(int size,
|
||||
PretenureFlag pretenure, Map map,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject result = isolate()->heap()->AllocateRawWithRetryOrFail(
|
||||
size, Heap::SelectSpace(pretenure), alignment);
|
||||
result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
|
||||
return result;
|
||||
}
|
||||
|
||||
HeapObject* Factory::AllocateRawWithAllocationSite(
|
||||
HeapObject Factory::AllocateRawWithAllocationSite(
|
||||
Handle<Map> map, PretenureFlag pretenure,
|
||||
Handle<AllocationSite> allocation_site) {
|
||||
DCHECK(map->instance_type() != MAP_TYPE);
|
||||
int size = map->instance_size();
|
||||
if (!allocation_site.is_null()) size += AllocationMemento::kSize;
|
||||
AllocationSpace space = Heap::SelectSpace(pretenure);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
|
||||
WriteBarrierMode write_barrier_mode =
|
||||
space == NEW_SPACE ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
|
||||
result->set_map_after_allocation(*map, write_barrier_mode);
|
||||
if (!allocation_site.is_null()) {
|
||||
AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
|
||||
ObjectPtr(reinterpret_cast<Address>(result) + map->instance_size()));
|
||||
ObjectPtr(result->ptr() + map->instance_size()));
|
||||
InitializeAllocationMemento(alloc_memento, *allocation_site);
|
||||
}
|
||||
return result;
|
||||
@ -162,9 +161,9 @@ void Factory::InitializeAllocationMemento(AllocationMemento memento,
|
||||
}
|
||||
}
|
||||
|
||||
HeapObject* Factory::AllocateRawArray(int size, PretenureFlag pretenure) {
|
||||
HeapObject Factory::AllocateRawArray(int size, PretenureFlag pretenure) {
|
||||
AllocationSpace space = Heap::SelectSpace(pretenure);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
|
||||
if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
|
||||
@ -173,27 +172,26 @@ HeapObject* Factory::AllocateRawArray(int size, PretenureFlag pretenure) {
|
||||
return result;
|
||||
}
|
||||
|
||||
HeapObject* Factory::AllocateRawFixedArray(int length,
|
||||
PretenureFlag pretenure) {
|
||||
HeapObject Factory::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
|
||||
if (length < 0 || length > FixedArray::kMaxLength) {
|
||||
isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
|
||||
}
|
||||
return AllocateRawArray(FixedArray::SizeFor(length), pretenure);
|
||||
}
|
||||
|
||||
HeapObject* Factory::AllocateRawWeakArrayList(int capacity,
|
||||
PretenureFlag pretenure) {
|
||||
HeapObject Factory::AllocateRawWeakArrayList(int capacity,
|
||||
PretenureFlag pretenure) {
|
||||
if (capacity < 0 || capacity > WeakArrayList::kMaxCapacity) {
|
||||
isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
|
||||
}
|
||||
return AllocateRawArray(WeakArrayList::SizeForCapacity(capacity), pretenure);
|
||||
}
|
||||
|
||||
HeapObject* Factory::New(Handle<Map> map, PretenureFlag pretenure) {
|
||||
HeapObject Factory::New(Handle<Map> map, PretenureFlag pretenure) {
|
||||
DCHECK(map->instance_type() != MAP_TYPE);
|
||||
int size = map->instance_size();
|
||||
AllocationSpace space = Heap::SelectSpace(pretenure);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
|
||||
// New space objects are allocated white.
|
||||
WriteBarrierMode write_barrier_mode =
|
||||
@ -206,7 +204,7 @@ Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
|
||||
AllocationSpace space) {
|
||||
AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned;
|
||||
Heap* heap = isolate()->heap();
|
||||
HeapObject* result = heap->AllocateRawWithRetryOrFail(size, space, alignment);
|
||||
HeapObject result = heap->AllocateRawWithRetryOrFail(size, space, alignment);
|
||||
#ifdef DEBUG
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
|
||||
DCHECK(chunk->owner()->identity() == space);
|
||||
@ -290,7 +288,7 @@ Handle<PropertyArray> Factory::NewPropertyArray(int length,
|
||||
PretenureFlag pretenure) {
|
||||
DCHECK_LE(0, length);
|
||||
if (length == 0) return empty_property_array();
|
||||
HeapObject* result = AllocateRawFixedArray(length, pretenure);
|
||||
HeapObject result = AllocateRawFixedArray(length, pretenure);
|
||||
result->set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
|
||||
Handle<PropertyArray> array(PropertyArray::cast(result), isolate());
|
||||
array->initialize_length(length);
|
||||
@ -301,7 +299,7 @@ Handle<PropertyArray> Factory::NewPropertyArray(int length,
|
||||
Handle<FixedArray> Factory::NewFixedArrayWithFiller(RootIndex map_root_index,
|
||||
int length, Object* filler,
|
||||
PretenureFlag pretenure) {
|
||||
HeapObject* result = AllocateRawFixedArray(length, pretenure);
|
||||
HeapObject result = AllocateRawFixedArray(length, pretenure);
|
||||
DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
|
||||
Map map = Map::cast(isolate()->root(map_root_index));
|
||||
result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
|
||||
@ -333,7 +331,7 @@ Handle<T> Factory::NewWeakFixedArrayWithMap(RootIndex map_root_index,
|
||||
// Zero-length case must be handled outside.
|
||||
DCHECK_LT(0, length);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
|
||||
Map map = Map::cast(isolate()->root(map_root_index));
|
||||
result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
|
||||
@ -359,7 +357,7 @@ Handle<WeakFixedArray> Factory::NewWeakFixedArray(int length,
|
||||
PretenureFlag pretenure) {
|
||||
DCHECK_LE(0, length);
|
||||
if (length == 0) return empty_weak_fixed_array();
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
|
||||
DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kWeakFixedArrayMap));
|
||||
result->set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER);
|
||||
@ -378,7 +376,7 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(int length,
|
||||
AllocationSpace space = Heap::SelectSpace(pretenure);
|
||||
Heap* heap = isolate()->heap();
|
||||
AllocationResult allocation = heap->AllocateRaw(size, space);
|
||||
HeapObject* result = nullptr;
|
||||
HeapObject result;
|
||||
if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
|
||||
if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
|
||||
@ -418,7 +416,7 @@ Handle<FeedbackVector> Factory::NewFeedbackVector(
|
||||
DCHECK_LE(0, length);
|
||||
int size = FeedbackVector::SizeFor(length);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, *feedback_vector_map());
|
||||
Handle<FeedbackVector> vector(FeedbackVector::cast(result), isolate());
|
||||
vector->set_shared_function_info(*shared);
|
||||
@ -439,7 +437,7 @@ Handle<EmbedderDataArray> Factory::NewEmbedderDataArray(
|
||||
DCHECK_LE(0, length);
|
||||
int size = EmbedderDataArray::SizeFor(length);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, *embedder_data_array_map());
|
||||
Handle<EmbedderDataArray> array(EmbedderDataArray::cast(result), isolate());
|
||||
array->set_length(length);
|
||||
@ -497,7 +495,7 @@ Handle<FixedArrayBase> Factory::NewFixedDoubleArray(int length,
|
||||
}
|
||||
int size = FixedDoubleArray::SizeFor(length);
|
||||
Map map = *fixed_double_array_map();
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, map, kDoubleAligned);
|
||||
Handle<FixedDoubleArray> array(FixedDoubleArray::cast(result), isolate());
|
||||
array->set_length(length);
|
||||
@ -518,7 +516,7 @@ Handle<FeedbackMetadata> Factory::NewFeedbackMetadata(int slot_count,
|
||||
PretenureFlag tenure) {
|
||||
DCHECK_LE(0, slot_count);
|
||||
int size = FeedbackMetadata::SizeFor(slot_count);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, tenure, *feedback_metadata_map());
|
||||
Handle<FeedbackMetadata> data(FeedbackMetadata::cast(result), isolate());
|
||||
data->set_slot_count(slot_count);
|
||||
@ -549,7 +547,7 @@ Handle<T> Factory::AllocateSmallOrderedHashTable(Handle<Map> map, int capacity,
|
||||
DCHECK_EQ(0, capacity % T::kLoadFactor);
|
||||
|
||||
int size = T::SizeFor(capacity);
|
||||
HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, *map);
|
||||
HeapObject result = AllocateRawWithImmortalMap(size, pretenure, *map);
|
||||
Handle<T> table(T::cast(result), isolate());
|
||||
table->Initialize(isolate(), capacity);
|
||||
return table;
|
||||
@ -825,7 +823,7 @@ Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString(
|
||||
|
||||
Map map = *one_byte_internalized_string_map();
|
||||
int size = SeqOneByteString::SizeFor(length);
|
||||
HeapObject* result = AllocateRawWithImmortalMap(
|
||||
HeapObject result = AllocateRawWithImmortalMap(
|
||||
size,
|
||||
isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
|
||||
: TENURED,
|
||||
@ -844,7 +842,7 @@ Handle<String> Factory::AllocateTwoByteInternalizedString(
|
||||
|
||||
Map map = *internalized_string_map();
|
||||
int size = SeqTwoByteString::SizeFor(str.length());
|
||||
HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map);
|
||||
HeapObject result = AllocateRawWithImmortalMap(size, TENURED, map);
|
||||
Handle<SeqTwoByteString> answer(SeqTwoByteString::cast(result), isolate());
|
||||
answer->set_length(str.length());
|
||||
answer->set_hash_field(hash_field);
|
||||
@ -874,7 +872,7 @@ Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
|
||||
size = SeqTwoByteString::SizeFor(chars);
|
||||
}
|
||||
|
||||
HeapObject* result = AllocateRawWithImmortalMap(
|
||||
HeapObject result = AllocateRawWithImmortalMap(
|
||||
size,
|
||||
isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
|
||||
: TENURED,
|
||||
@ -1004,7 +1002,7 @@ MaybeHandle<SeqOneByteString> Factory::NewRawOneByteString(
|
||||
int size = SeqOneByteString::SizeFor(length);
|
||||
DCHECK_GE(SeqOneByteString::kMaxSize, size);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, *one_byte_string_map());
|
||||
Handle<SeqOneByteString> string(SeqOneByteString::cast(result), isolate());
|
||||
string->set_length(length);
|
||||
@ -1022,7 +1020,7 @@ MaybeHandle<SeqTwoByteString> Factory::NewRawTwoByteString(
|
||||
int size = SeqTwoByteString::SizeFor(length);
|
||||
DCHECK_GE(SeqTwoByteString::kMaxSize, size);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, *string_map());
|
||||
Handle<SeqTwoByteString> string(SeqTwoByteString::cast(result), isolate());
|
||||
string->set_length(length);
|
||||
@ -1382,7 +1380,7 @@ Handle<Symbol> Factory::NewSymbol(PretenureFlag flag) {
|
||||
// Statically ensure that it is safe to allocate symbols in paged spaces.
|
||||
STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
|
||||
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(Symbol::kSize, flag, *symbol_map());
|
||||
|
||||
// Generate a random hash value.
|
||||
@ -1420,7 +1418,7 @@ Handle<Context> Factory::NewContext(RootIndex map_root_index, int size,
|
||||
DCHECK_LE(Context::SizeFor(variadic_part_length), size);
|
||||
|
||||
Map map = Map::cast(isolate()->root(map_root_index));
|
||||
HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map);
|
||||
HeapObject result = AllocateRawWithImmortalMap(size, pretenure, map);
|
||||
Handle<Context> context(Context::cast(result), isolate());
|
||||
context->set_length(variadic_part_length);
|
||||
DCHECK_EQ(context->SizeFromMap(map), size);
|
||||
@ -1609,7 +1607,7 @@ Handle<Struct> Factory::NewStruct(InstanceType type, PretenureFlag pretenure) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
int size = map->instance_size();
|
||||
HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map);
|
||||
HeapObject result = AllocateRawWithImmortalMap(size, pretenure, map);
|
||||
Handle<Struct> str(Struct::cast(result), isolate());
|
||||
str->InitializeBody(size);
|
||||
return str;
|
||||
@ -1740,7 +1738,7 @@ Handle<Foreign> Factory::NewForeign(Address addr, PretenureFlag pretenure) {
|
||||
// Statically ensure that it is safe to allocate foreigns in paged spaces.
|
||||
STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
|
||||
Map map = *foreign_map();
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(map->instance_size(), pretenure, map);
|
||||
Handle<Foreign> foreign(Foreign::cast(result), isolate());
|
||||
foreign->set_foreign_address(addr);
|
||||
@ -1753,7 +1751,7 @@ Handle<ByteArray> Factory::NewByteArray(int length, PretenureFlag pretenure) {
|
||||
isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
|
||||
}
|
||||
int size = ByteArray::SizeFor(length);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, pretenure, *byte_array_map());
|
||||
Handle<ByteArray> array(ByteArray::cast(result), isolate());
|
||||
array->set_length(length);
|
||||
@ -1772,7 +1770,7 @@ Handle<BytecodeArray> Factory::NewBytecodeArray(
|
||||
DCHECK(!Heap::InNewSpace(*constant_pool));
|
||||
|
||||
int size = BytecodeArray::SizeFor(length);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map());
|
||||
Handle<BytecodeArray> instance(BytecodeArray::cast(result), isolate());
|
||||
instance->set_length(length);
|
||||
@ -1799,7 +1797,7 @@ Handle<FixedTypedArrayBase> Factory::NewFixedTypedArrayWithExternalPointer(
|
||||
// TODO(7881): Smi length check
|
||||
DCHECK(0 <= length && length <= Smi::kMaxValue);
|
||||
int size = FixedTypedArrayBase::kHeaderSize;
|
||||
HeapObject* result = AllocateRawWithImmortalMap(
|
||||
HeapObject result = AllocateRawWithImmortalMap(
|
||||
size, pretenure,
|
||||
ReadOnlyRoots(isolate()).MapForFixedTypedArray(array_type));
|
||||
Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(result),
|
||||
@ -1821,8 +1819,8 @@ Handle<FixedTypedArrayBase> Factory::NewFixedTypedArray(
|
||||
Map map = ReadOnlyRoots(isolate()).MapForFixedTypedArray(array_type);
|
||||
AllocationAlignment alignment =
|
||||
array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned;
|
||||
HeapObject* object = AllocateRawWithImmortalMap(static_cast<int>(size),
|
||||
pretenure, map, alignment);
|
||||
HeapObject object = AllocateRawWithImmortalMap(static_cast<int>(size),
|
||||
pretenure, map, alignment);
|
||||
|
||||
Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(object),
|
||||
isolate());
|
||||
@ -1839,7 +1837,7 @@ Handle<FixedTypedArrayBase> Factory::NewFixedTypedArray(
|
||||
Handle<Cell> Factory::NewCell(Handle<Object> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(Cell::kSize, TENURED, *cell_map());
|
||||
Handle<Cell> cell(Cell::cast(result), isolate());
|
||||
cell->set_value(*value);
|
||||
@ -1848,8 +1846,8 @@ Handle<Cell> Factory::NewCell(Handle<Object> value) {
|
||||
|
||||
Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*no_closures_cell_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*no_closures_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
cell->set_value(*value);
|
||||
return cell;
|
||||
@ -1857,8 +1855,8 @@ Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
|
||||
|
||||
Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*one_closure_cell_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*one_closure_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
cell->set_value(*value);
|
||||
return cell;
|
||||
@ -1866,8 +1864,8 @@ Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
|
||||
|
||||
Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*many_closures_cell_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*many_closures_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
cell->set_value(*value);
|
||||
return cell;
|
||||
@ -1875,8 +1873,8 @@ Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
|
||||
|
||||
Handle<FeedbackCell> Factory::NewNoFeedbackCell() {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*no_feedback_cell_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
|
||||
*no_feedback_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
// Set the value to undefined. We wouldn't allocate feedback vectors with
|
||||
// NoFeedbackCell map type.
|
||||
@ -1888,8 +1886,8 @@ Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name,
|
||||
PretenureFlag pretenure) {
|
||||
DCHECK(name->IsUniqueName());
|
||||
STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
|
||||
HeapObject* result = AllocateRawWithImmortalMap(
|
||||
PropertyCell::kSize, pretenure, *global_property_cell_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(PropertyCell::kSize, pretenure,
|
||||
*global_property_cell_map());
|
||||
Handle<PropertyCell> cell(PropertyCell::cast(result), isolate());
|
||||
cell->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
|
||||
SKIP_WRITE_BARRIER);
|
||||
@ -1908,7 +1906,7 @@ Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors,
|
||||
int size = DescriptorArray::SizeFor(number_of_all_descriptors);
|
||||
DCHECK_LT(size, kMaxRegularHeapObjectSize);
|
||||
AllocationSpace space = Heap::SelectSpace(pretenure);
|
||||
HeapObject* obj = isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
|
||||
HeapObject obj = isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
|
||||
obj->set_map_after_allocation(*descriptor_array_map(), SKIP_WRITE_BARRIER);
|
||||
DescriptorArray array = DescriptorArray::cast(obj);
|
||||
array->Initialize(*empty_enum_cache(), *undefined_value(),
|
||||
@ -1958,7 +1956,7 @@ Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
|
||||
!Map::CanHaveFastTransitionableElementsKind(type),
|
||||
IsDictionaryElementsKind(elements_kind) ||
|
||||
IsTerminalElementsKind(elements_kind));
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
isolate()->heap()->AllocateRawWithRetryOrFail(Map::kSize, MAP_SPACE);
|
||||
result->set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
|
||||
return handle(InitializeMap(Map::cast(result), type, instance_size,
|
||||
@ -2035,7 +2033,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
|
||||
int object_size = map->instance_size();
|
||||
int adjusted_object_size =
|
||||
site.is_null() ? object_size : object_size + AllocationMemento::kSize;
|
||||
HeapObject* raw_clone = isolate()->heap()->AllocateRawWithRetryOrFail(
|
||||
HeapObject raw_clone = isolate()->heap()->AllocateRawWithRetryOrFail(
|
||||
adjusted_object_size, NEW_SPACE);
|
||||
|
||||
SLOW_DCHECK(Heap::InNewSpace(raw_clone));
|
||||
@ -2046,7 +2044,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
|
||||
|
||||
if (!site.is_null()) {
|
||||
AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
|
||||
ObjectPtr(reinterpret_cast<Address>(raw_clone) + object_size));
|
||||
ObjectPtr(raw_clone->ptr() + object_size));
|
||||
InitializeAllocationMemento(alloc_memento, *site);
|
||||
}
|
||||
|
||||
@ -2100,7 +2098,7 @@ void initialize_length<PropertyArray>(Handle<PropertyArray> array, int length) {
|
||||
template <typename T>
|
||||
Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
|
||||
int len = src->length();
|
||||
HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED);
|
||||
HeapObject obj = AllocateRawFixedArray(len, NOT_TENURED);
|
||||
obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
|
||||
|
||||
Handle<T> result(T::cast(obj), isolate());
|
||||
@ -2126,7 +2124,7 @@ Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
|
||||
DCHECK_LE(grow_by, kMaxInt - src->length());
|
||||
int old_len = src->length();
|
||||
int new_len = old_len + grow_by;
|
||||
HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
HeapObject obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
|
||||
|
||||
Handle<T> result(T::cast(obj), isolate());
|
||||
@ -2158,7 +2156,7 @@ Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
|
||||
int old_len = src->length();
|
||||
int new_len = old_len + grow_by;
|
||||
DCHECK_GE(new_len, old_len);
|
||||
HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
HeapObject obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
DCHECK_EQ(old_len, src->length());
|
||||
obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
|
||||
|
||||
@ -2179,7 +2177,7 @@ Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
|
||||
int old_capacity = src->capacity();
|
||||
int new_capacity = old_capacity + grow_by;
|
||||
DCHECK_GE(new_capacity, old_capacity);
|
||||
HeapObject* obj = AllocateRawWeakArrayList(new_capacity, pretenure);
|
||||
HeapObject obj = AllocateRawWeakArrayList(new_capacity, pretenure);
|
||||
obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
|
||||
|
||||
WeakArrayList result = WeakArrayList::cast(obj);
|
||||
@ -2207,7 +2205,7 @@ Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array,
|
||||
DCHECK_LE(new_len, array->length());
|
||||
if (new_len == 0) return empty_fixed_array();
|
||||
|
||||
HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
HeapObject obj = AllocateRawFixedArray(new_len, pretenure);
|
||||
obj->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
|
||||
Handle<FixedArray> result(FixedArray::cast(obj), isolate());
|
||||
result->set_length(new_len);
|
||||
@ -2253,7 +2251,7 @@ Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
|
||||
Handle<FeedbackVector> Factory::CopyFeedbackVector(
|
||||
Handle<FeedbackVector> array) {
|
||||
int len = array->length();
|
||||
HeapObject* obj = AllocateRawWithImmortalMap(
|
||||
HeapObject obj = AllocateRawWithImmortalMap(
|
||||
FeedbackVector::SizeFor(len), NOT_TENURED, *feedback_vector_map());
|
||||
Handle<FeedbackVector> result(FeedbackVector::cast(obj), isolate());
|
||||
|
||||
@ -2305,8 +2303,8 @@ Handle<Object> Factory::NewNumberFromUint(uint32_t value,
|
||||
Handle<HeapNumber> Factory::NewHeapNumber(PretenureFlag pretenure) {
|
||||
STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
|
||||
Map map = *heap_number_map();
|
||||
HeapObject* result = AllocateRawWithImmortalMap(HeapNumber::kSize, pretenure,
|
||||
map, kDoubleUnaligned);
|
||||
HeapObject result = AllocateRawWithImmortalMap(HeapNumber::kSize, pretenure,
|
||||
map, kDoubleUnaligned);
|
||||
return handle(HeapNumber::cast(result), isolate());
|
||||
}
|
||||
|
||||
@ -2314,7 +2312,7 @@ Handle<MutableHeapNumber> Factory::NewMutableHeapNumber(
|
||||
PretenureFlag pretenure) {
|
||||
STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
|
||||
Map map = *mutable_heap_number_map();
|
||||
HeapObject* result = AllocateRawWithImmortalMap(
|
||||
HeapObject result = AllocateRawWithImmortalMap(
|
||||
MutableHeapNumber::kSize, pretenure, map, kDoubleUnaligned);
|
||||
return handle(MutableHeapNumber::cast(result), isolate());
|
||||
}
|
||||
@ -2324,8 +2322,8 @@ Handle<FreshlyAllocatedBigInt> Factory::NewBigInt(int length,
|
||||
if (length < 0 || length > BigInt::kMaxLength) {
|
||||
isolate()->heap()->FatalProcessOutOfMemory("invalid BigInt length");
|
||||
}
|
||||
HeapObject* result = AllocateRawWithImmortalMap(BigInt::SizeFor(length),
|
||||
pretenure, *bigint_map());
|
||||
HeapObject result = AllocateRawWithImmortalMap(BigInt::SizeFor(length),
|
||||
pretenure, *bigint_map());
|
||||
FreshlyAllocatedBigInt bigint = FreshlyAllocatedBigInt::cast(result);
|
||||
bigint->clear_padding();
|
||||
return handle(bigint, isolate());
|
||||
@ -2717,11 +2715,11 @@ MaybeHandle<Code> Factory::TryNewCode(
|
||||
|
||||
Heap* heap = isolate()->heap();
|
||||
CodePageCollectionMemoryModificationScope code_allocation(heap);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
heap->AllocateRawWithLightRetry(object_size, CODE_SPACE);
|
||||
|
||||
// Return an empty handle if we cannot allocate the code object.
|
||||
if (!result) return MaybeHandle<Code>();
|
||||
if (result.is_null()) return MaybeHandle<Code>();
|
||||
|
||||
if (movability == kImmovable) {
|
||||
result = heap->EnsureImmovableCode(result, object_size);
|
||||
@ -2775,7 +2773,7 @@ Handle<Code> Factory::NewCode(
|
||||
|
||||
Heap* heap = isolate()->heap();
|
||||
CodePageCollectionMemoryModificationScope code_allocation(heap);
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
heap->AllocateRawWithRetryOrFail(object_size, CODE_SPACE);
|
||||
if (movability == kImmovable) {
|
||||
result = heap->EnsureImmovableCode(result, object_size);
|
||||
@ -2864,7 +2862,7 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
||||
{
|
||||
int obj_size = code->Size();
|
||||
CodePageCollectionMemoryModificationScope code_allocation(heap);
|
||||
HeapObject* result = heap->AllocateRawWithRetryOrFail(obj_size, CODE_SPACE);
|
||||
HeapObject result = heap->AllocateRawWithRetryOrFail(obj_size, CODE_SPACE);
|
||||
|
||||
// Copy code object.
|
||||
Address old_addr = code->address();
|
||||
@ -2896,7 +2894,7 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
||||
Handle<BytecodeArray> Factory::CopyBytecodeArray(
|
||||
Handle<BytecodeArray> bytecode_array) {
|
||||
int size = BytecodeArray::SizeFor(bytecode_array->length());
|
||||
HeapObject* result =
|
||||
HeapObject result =
|
||||
AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map());
|
||||
|
||||
Handle<BytecodeArray> copy(BytecodeArray::cast(result), isolate());
|
||||
@ -3047,7 +3045,7 @@ Handle<JSObject> Factory::NewJSObjectFromMap(
|
||||
// AllocateGlobalObject to be properly initialized.
|
||||
DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
|
||||
|
||||
HeapObject* obj =
|
||||
HeapObject obj =
|
||||
AllocateRawWithAllocationSite(map, pretenure, allocation_site);
|
||||
Handle<JSObject> js_obj(JSObject::cast(obj), isolate());
|
||||
|
||||
@ -3906,7 +3904,7 @@ Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context,
|
||||
// Check to see whether there is a matching element in the cache.
|
||||
Handle<WeakFixedArray> cache = Handle<WeakFixedArray>::cast(maybe_cache);
|
||||
MaybeObject result = cache->Get(cache_index);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (result->GetHeapObjectIfWeak(&heap_object)) {
|
||||
Map map = Map::cast(heap_object);
|
||||
DCHECK(!map->is_dictionary_map());
|
||||
|
@ -934,7 +934,7 @@ class V8_EXPORT_PRIVATE Factory {
|
||||
|
||||
Handle<CallHandlerInfo> NewCallHandlerInfo(bool has_no_side_effect = false);
|
||||
|
||||
HeapObject* NewForTest(Handle<Map> map, PretenureFlag pretenure) {
|
||||
HeapObject NewForTest(Handle<Map> map, PretenureFlag pretenure) {
|
||||
return New(map, pretenure);
|
||||
}
|
||||
|
||||
@ -947,17 +947,17 @@ class V8_EXPORT_PRIVATE Factory {
|
||||
return (Isolate*)this; // NOLINT(readability/casting)
|
||||
}
|
||||
|
||||
HeapObject* AllocateRawWithImmortalMap(
|
||||
HeapObject AllocateRawWithImmortalMap(
|
||||
int size, PretenureFlag pretenure, Map map,
|
||||
AllocationAlignment alignment = kWordAligned);
|
||||
HeapObject* AllocateRawWithAllocationSite(
|
||||
HeapObject AllocateRawWithAllocationSite(
|
||||
Handle<Map> map, PretenureFlag pretenure,
|
||||
Handle<AllocationSite> allocation_site);
|
||||
|
||||
// Allocate memory for an uninitialized array (e.g., a FixedArray or similar).
|
||||
HeapObject* AllocateRawArray(int size, PretenureFlag pretenure);
|
||||
HeapObject* AllocateRawFixedArray(int length, PretenureFlag pretenure);
|
||||
HeapObject* AllocateRawWeakArrayList(int length, PretenureFlag pretenure);
|
||||
HeapObject AllocateRawArray(int size, PretenureFlag pretenure);
|
||||
HeapObject AllocateRawFixedArray(int length, PretenureFlag pretenure);
|
||||
HeapObject AllocateRawWeakArrayList(int length, PretenureFlag pretenure);
|
||||
Handle<FixedArray> NewFixedArrayWithFiller(RootIndex map_root_index,
|
||||
int length, Object* filler,
|
||||
PretenureFlag pretenure);
|
||||
@ -974,7 +974,7 @@ class V8_EXPORT_PRIVATE Factory {
|
||||
|
||||
// Creates a heap object based on the map. The fields of the heap object are
|
||||
// not initialized, it's the responsibility of the caller to do that.
|
||||
HeapObject* New(Handle<Map> map, PretenureFlag pretenure);
|
||||
HeapObject New(Handle<Map> map, PretenureFlag pretenure);
|
||||
|
||||
template <typename T>
|
||||
Handle<T> CopyArrayWithMap(Handle<T> src, Handle<Map> map);
|
||||
|
@ -54,7 +54,7 @@ AllocationSpace AllocationResult::RetrySpace() {
|
||||
return static_cast<AllocationSpace>(Smi::ToInt(object_));
|
||||
}
|
||||
|
||||
HeapObject* AllocationResult::ToObjectChecked() {
|
||||
HeapObject AllocationResult::ToObjectChecked() {
|
||||
CHECK(!IsRetry());
|
||||
return HeapObject::cast(object_);
|
||||
}
|
||||
@ -179,7 +179,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
|
||||
|
||||
bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
|
||||
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
AllocationResult allocation;
|
||||
if (NEW_SPACE == space) {
|
||||
if (large_object) {
|
||||
@ -243,7 +243,7 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
|
||||
return allocation;
|
||||
}
|
||||
|
||||
void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
|
||||
void Heap::OnAllocationEvent(HeapObject object, int size_in_bytes) {
|
||||
for (auto& tracker : allocation_trackers_) {
|
||||
tracker->AllocationEvent(object->address(), size_in_bytes);
|
||||
}
|
||||
@ -269,8 +269,7 @@ void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
|
||||
void Heap::OnMoveEvent(HeapObject target, HeapObject source,
|
||||
int size_in_bytes) {
|
||||
HeapProfiler* heap_profiler = isolate_->heap_profiler();
|
||||
if (heap_profiler->is_tracking_object_moves()) {
|
||||
@ -308,7 +307,7 @@ bool Heap::CanAllocateInReadOnlySpace() {
|
||||
!isolate()->initialized_from_snapshot());
|
||||
}
|
||||
|
||||
void Heap::UpdateAllocationsHash(HeapObject* object) {
|
||||
void Heap::UpdateAllocationsHash(HeapObject object) {
|
||||
Address object_address = object->address();
|
||||
MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
|
||||
AllocationSpace allocation_space = memory_chunk->owner()->identity();
|
||||
@ -321,7 +320,6 @@ void Heap::UpdateAllocationsHash(HeapObject* object) {
|
||||
UpdateAllocationsHash(value);
|
||||
}
|
||||
|
||||
|
||||
void Heap::UpdateAllocationsHash(uint32_t value) {
|
||||
uint16_t c1 = static_cast<uint16_t>(value);
|
||||
uint16_t c2 = static_cast<uint16_t>(value >> 16);
|
||||
@ -380,29 +378,14 @@ bool Heap::InNewSpace(Object* object) {
|
||||
|
||||
// static
|
||||
bool Heap::InNewSpace(MaybeObject object) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InNewSpace(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InNewSpace(HeapObject* heap_object) {
|
||||
bool Heap::InNewSpace(HeapObject heap_object) {
|
||||
// Inlined check from NewSpace::Contains.
|
||||
bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
|
||||
#ifdef DEBUG
|
||||
// If in NEW_SPACE, then check we're either not in the middle of GC or the
|
||||
// object is in to-space.
|
||||
if (result) {
|
||||
// If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
|
||||
Heap* heap = Heap::FromWritableHeapObject(heap_object);
|
||||
DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InNewSpace(HeapObjectPtr heap_object) {
|
||||
bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
|
||||
#ifdef DEBUG
|
||||
// If in NEW_SPACE, then check we're either not in the middle of GC or the
|
||||
// object is in to-space.
|
||||
@ -423,12 +406,12 @@ bool Heap::InFromSpace(Object* object) {
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(MaybeObject object) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InFromSpace(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(HeapObject* heap_object) {
|
||||
bool Heap::InFromSpace(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)
|
||||
->IsFlagSet(Page::IN_FROM_SPACE);
|
||||
}
|
||||
@ -441,17 +424,12 @@ bool Heap::InToSpace(Object* object) {
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(MaybeObject object) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InToSpace(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(HeapObject* heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(HeapObjectPtr heap_object) {
|
||||
bool Heap::InToSpace(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
|
||||
}
|
||||
|
||||
@ -462,7 +440,7 @@ bool Heap::InReadOnlySpace(Object* object) {
|
||||
}
|
||||
|
||||
// static
|
||||
Heap* Heap::FromWritableHeapObject(const HeapObject* obj) {
|
||||
Heap* Heap::FromWritableHeapObject(const HeapObject obj) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
|
||||
// RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
|
||||
// find a heap. The exception is when the ReadOnlySpace is writeable, during
|
||||
@ -475,16 +453,8 @@ Heap* Heap::FromWritableHeapObject(const HeapObject* obj) {
|
||||
}
|
||||
|
||||
// static
|
||||
Heap* Heap::FromWritableHeapObject(const HeapObjectPtr* obj) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*obj);
|
||||
// RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
|
||||
// find a heap. The exception is when the ReadOnlySpace is writeable, during
|
||||
// bootstrapping, so explicitly allow this case.
|
||||
SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE ||
|
||||
static_cast<ReadOnlySpace*>(chunk->owner())->writable());
|
||||
Heap* heap = chunk->heap();
|
||||
SLOW_DCHECK(heap != nullptr);
|
||||
return heap;
|
||||
Heap* Heap::FromWritableHeapObject(const HeapObject* obj) {
|
||||
return FromWritableHeapObject(*obj);
|
||||
}
|
||||
|
||||
bool Heap::ShouldBePromoted(Address old_address) {
|
||||
@ -501,7 +471,7 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
|
||||
}
|
||||
|
||||
template <Heap::FindMementoMode mode>
|
||||
AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject* object) {
|
||||
AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
|
||||
Address object_address = object->address();
|
||||
Address memento_address = object_address + object->SizeFromMap(map);
|
||||
Address last_memento_word_address = memento_address + kTaggedSize;
|
||||
@ -509,7 +479,7 @@ AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject* object) {
|
||||
if (!Page::OnSamePage(object_address, last_memento_word_address)) {
|
||||
return AllocationMemento();
|
||||
}
|
||||
HeapObject* candidate = HeapObject::FromAddress(memento_address);
|
||||
HeapObject candidate = HeapObject::FromAddress(memento_address);
|
||||
MapWordSlot candidate_map_slot = candidate->map_slot();
|
||||
// This fast check may peek at an uninitialized word. However, the slow check
|
||||
// below (memento_address == top) ensures that this is safe. Mark the word as
|
||||
@ -562,7 +532,7 @@ AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject* object) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void Heap::UpdateAllocationSite(Map map, HeapObject* object,
|
||||
void Heap::UpdateAllocationSite(Map map, HeapObject object,
|
||||
PretenuringFeedbackMap* pretenuring_feedback) {
|
||||
DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
|
||||
DCHECK(
|
||||
|
@ -26,14 +26,15 @@ namespace heap_internals {
|
||||
|
||||
struct MemoryChunk {
|
||||
static constexpr uintptr_t kFlagsOffset = sizeof(size_t);
|
||||
static constexpr uintptr_t kHeapOffset =
|
||||
kFlagsOffset + kUIntptrSize + 4 * kSystemPointerSize;
|
||||
static constexpr uintptr_t kMarkingBit = uintptr_t{1} << 18;
|
||||
static constexpr uintptr_t kFromSpaceBit = uintptr_t{1} << 3;
|
||||
static constexpr uintptr_t kToSpaceBit = uintptr_t{1} << 4;
|
||||
|
||||
V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
|
||||
HeapObject* object) {
|
||||
return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(object) &
|
||||
~kPageAlignmentMask);
|
||||
HeapObject object) {
|
||||
return reinterpret_cast<MemoryChunk*>(object->ptr() & ~kPageAlignmentMask);
|
||||
}
|
||||
|
||||
V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
|
||||
@ -44,13 +45,20 @@ struct MemoryChunk {
|
||||
}
|
||||
|
||||
V8_INLINE uintptr_t GetFlags() const {
|
||||
return *reinterpret_cast<const uintptr_t*>(
|
||||
reinterpret_cast<const uint8_t*>(this) + kFlagsOffset);
|
||||
return *reinterpret_cast<const uintptr_t*>(reinterpret_cast<Address>(this) +
|
||||
kFlagsOffset);
|
||||
}
|
||||
|
||||
V8_INLINE Heap* GetHeap() {
|
||||
Heap* heap = *reinterpret_cast<Heap**>(reinterpret_cast<Address>(this) +
|
||||
kHeapOffset);
|
||||
SLOW_DCHECK(heap != nullptr);
|
||||
return heap;
|
||||
}
|
||||
};
|
||||
|
||||
inline void GenerationalBarrierInternal(HeapObject* object, Address slot,
|
||||
HeapObject* value) {
|
||||
inline void GenerationalBarrierInternal(HeapObject object, Address slot,
|
||||
HeapObject value) {
|
||||
DCHECK(Heap::PageFlagsAreConsistent(object));
|
||||
heap_internals::MemoryChunk* value_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(value);
|
||||
@ -62,8 +70,8 @@ inline void GenerationalBarrierInternal(HeapObject* object, Address slot,
|
||||
Heap::GenerationalBarrierSlow(object, slot, value);
|
||||
}
|
||||
|
||||
inline void MarkingBarrierInternal(HeapObject* object, Address slot,
|
||||
HeapObject* value) {
|
||||
inline void MarkingBarrierInternal(HeapObject object, Address slot,
|
||||
HeapObject value) {
|
||||
DCHECK(Heap::PageFlagsAreConsistent(object));
|
||||
heap_internals::MemoryChunk* value_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(value);
|
||||
@ -78,7 +86,7 @@ inline void MarkingBarrierInternal(HeapObject* object, Address slot,
|
||||
inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object* value) {
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
HeapObject* object = HeapObject::cast(value);
|
||||
HeapObject object = HeapObject::cast(value);
|
||||
GenerationalBarrierForCode(host, rinfo, object);
|
||||
MarkingBarrierForCode(host, rinfo, object);
|
||||
}
|
||||
@ -87,7 +95,7 @@ inline void WriteBarrierForCode(Code host) {
|
||||
Heap::WriteBarrierForCodeSlow(host);
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObject* object, ObjectSlot slot,
|
||||
inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
@ -96,33 +104,19 @@ inline void GenerationalBarrier(HeapObject* object, ObjectSlot slot,
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObject* object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
GenerationalBarrier(*object, slot, value);
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject value) {
|
||||
HeapObject* value_heap_object;
|
||||
HeapObject value_heap_object;
|
||||
if (!value->GetHeapObject(&value_heap_object)) return;
|
||||
heap_internals::GenerationalBarrierInternal(object, slot.address(),
|
||||
heap_internals::GenerationalBarrierInternal(*object, slot.address(),
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
heap_internals::GenerationalBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void GenerationalBarrier(HeapObjectPtr* object, MaybeObjectSlot slot,
|
||||
MaybeObject value) {
|
||||
HeapObject* value_heap_object;
|
||||
if (!value->GetHeapObject(&value_heap_object)) return;
|
||||
heap_internals::GenerationalBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void GenerationalBarrierForElements(Heap* heap, FixedArray array,
|
||||
int offset, int length) {
|
||||
heap_internals::MemoryChunk* array_chunk =
|
||||
@ -133,14 +127,14 @@ inline void GenerationalBarrierForElements(Heap* heap, FixedArray array,
|
||||
}
|
||||
|
||||
inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
|
||||
HeapObject* object) {
|
||||
HeapObject object) {
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
if (!object_chunk->InNewSpace()) return;
|
||||
Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value) {
|
||||
inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object* value) {
|
||||
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
@ -148,34 +142,19 @@ inline void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value) {
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value) {
|
||||
MarkingBarrier(*object, slot, value);
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject value) {
|
||||
HeapObject* value_heap_object;
|
||||
HeapObject value_heap_object;
|
||||
if (!value->GetHeapObject(&value_heap_object)) return;
|
||||
heap_internals::MarkingBarrierInternal(object, slot.address(),
|
||||
heap_internals::MarkingBarrierInternal(*object, slot.address(),
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
if (!value->IsHeapObject()) return;
|
||||
heap_internals::MarkingBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
HeapObject::cast(value));
|
||||
}
|
||||
|
||||
inline void MarkingBarrier(HeapObjectPtr* object, MaybeObjectSlot slot,
|
||||
MaybeObject value) {
|
||||
HeapObject* value_heap_object;
|
||||
if (!value->GetHeapObject(&value_heap_object)) return;
|
||||
heap_internals::MarkingBarrierInternal(
|
||||
reinterpret_cast<HeapObject*>(object->ptr()), slot.address(),
|
||||
value_heap_object);
|
||||
}
|
||||
|
||||
inline void MarkingBarrierForElements(Heap* heap, HeapObject* object) {
|
||||
inline void MarkingBarrierForElements(Heap* heap, HeapObject object) {
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
if (!object_chunk->IsMarking()) return;
|
||||
@ -184,8 +163,8 @@ inline void MarkingBarrierForElements(Heap* heap, HeapObject* object) {
|
||||
}
|
||||
|
||||
inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
|
||||
HeapObject* object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
HeapObject object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object.ptr()));
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
if (!object_chunk->IsMarking()) return;
|
||||
@ -193,7 +172,7 @@ inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
|
||||
}
|
||||
|
||||
inline void MarkingBarrierForDescriptorArray(Heap* heap,
|
||||
HeapObject* descriptor_array,
|
||||
HeapObject descriptor_array,
|
||||
int number_of_own_descriptors) {
|
||||
heap_internals::MemoryChunk* chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(descriptor_array);
|
||||
@ -203,6 +182,12 @@ inline void MarkingBarrierForDescriptorArray(Heap* heap,
|
||||
number_of_own_descriptors);
|
||||
}
|
||||
|
||||
inline Heap* GetHeapFromWritableObject(const HeapObject object) {
|
||||
heap_internals::MemoryChunk* chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
return chunk->GetHeap();
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -15,7 +15,6 @@ class Code;
|
||||
class FixedArray;
|
||||
class Heap;
|
||||
class HeapObject;
|
||||
class HeapObjectPtr;
|
||||
class MaybeObject;
|
||||
class Object;
|
||||
class RelocInfo;
|
||||
@ -35,36 +34,35 @@ void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object* value);
|
||||
void WriteBarrierForCode(Code host);
|
||||
|
||||
// Generational write barrier.
|
||||
// This takes a HeapObject* (as opposed to a plain HeapObject)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the old HeapObject*
|
||||
// version.
|
||||
// TODO(3770): This should probably take a HeapObject eventually.
|
||||
void GenerationalBarrier(HeapObject* object, ObjectSlot slot, Object* value);
|
||||
void GenerationalBarrier(HeapObject object, ObjectSlot slot, Object* value);
|
||||
void GenerationalBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject value);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
void GenerationalBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
|
||||
void GenerationalBarrier(HeapObjectPtr* object, MaybeObjectSlot slot,
|
||||
MaybeObject value);
|
||||
void GenerationalBarrierForElements(Heap* heap, FixedArray array, int offset,
|
||||
int length);
|
||||
void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
|
||||
HeapObject* object);
|
||||
void GenerationalBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
|
||||
|
||||
// Marking write barrier.
|
||||
// This takes a HeapObject* (as opposed to a plain HeapObject)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the old HeapObject*
|
||||
// version.
|
||||
// TODO(3770): This should probably take a HeapObject eventually.
|
||||
void MarkingBarrier(HeapObject* object, ObjectSlot slot, Object* value);
|
||||
void MarkingBarrier(HeapObject object, ObjectSlot slot, Object* value);
|
||||
void MarkingBarrier(HeapObject* object, MaybeObjectSlot slot,
|
||||
MaybeObject value);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
void MarkingBarrier(HeapObjectPtr* object, ObjectSlot slot, Object* value);
|
||||
void MarkingBarrier(HeapObjectPtr* object, MaybeObjectSlot slot,
|
||||
MaybeObject value);
|
||||
void MarkingBarrierForElements(Heap* heap, HeapObject* object);
|
||||
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject* object);
|
||||
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject* descriptor_array,
|
||||
void MarkingBarrierForElements(Heap* heap, HeapObject object);
|
||||
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
|
||||
|
||||
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject descriptor_array,
|
||||
int number_of_own_descriptors);
|
||||
|
||||
Heap* GetHeapFromWritableObject(const HeapObject object);
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
273
src/heap/heap.cc
273
src/heap/heap.cc
@ -436,7 +436,7 @@ void Heap::AddRetainingPathTarget(Handle<HeapObject> object,
|
||||
}
|
||||
}
|
||||
|
||||
bool Heap::IsRetainingPathTarget(HeapObject* object,
|
||||
bool Heap::IsRetainingPathTarget(HeapObject object,
|
||||
RetainingPathOption* option) {
|
||||
WeakArrayList targets = retaining_path_targets();
|
||||
int length = targets->length();
|
||||
@ -453,12 +453,12 @@ bool Heap::IsRetainingPathTarget(HeapObject* object,
|
||||
return false;
|
||||
}
|
||||
|
||||
void Heap::PrintRetainingPath(HeapObject* target, RetainingPathOption option) {
|
||||
void Heap::PrintRetainingPath(HeapObject target, RetainingPathOption option) {
|
||||
PrintF("\n\n\n");
|
||||
PrintF("#################################################\n");
|
||||
PrintF("Retaining path for %p:\n", static_cast<void*>(target));
|
||||
HeapObject* object = target;
|
||||
std::vector<std::pair<HeapObject*, bool>> retaining_path;
|
||||
PrintF("Retaining path for %p:\n", reinterpret_cast<void*>(target->ptr()));
|
||||
HeapObject object = target;
|
||||
std::vector<std::pair<HeapObject, bool>> retaining_path;
|
||||
Root root = Root::kUnknown;
|
||||
bool ephemeron = false;
|
||||
while (true) {
|
||||
@ -479,7 +479,7 @@ void Heap::PrintRetainingPath(HeapObject* target, RetainingPathOption option) {
|
||||
}
|
||||
int distance = static_cast<int>(retaining_path.size());
|
||||
for (auto node : retaining_path) {
|
||||
HeapObject* object = node.first;
|
||||
HeapObject object = node.first;
|
||||
bool ephemeron = node.second;
|
||||
PrintF("\n");
|
||||
PrintF("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n");
|
||||
@ -499,7 +499,7 @@ void Heap::PrintRetainingPath(HeapObject* target, RetainingPathOption option) {
|
||||
PrintF("-------------------------------------------------\n");
|
||||
}
|
||||
|
||||
void Heap::AddRetainer(HeapObject* retainer, HeapObject* object) {
|
||||
void Heap::AddRetainer(HeapObject retainer, HeapObject object) {
|
||||
if (retainer_.count(object)) return;
|
||||
retainer_[object] = retainer;
|
||||
RetainingPathOption option = RetainingPathOption::kDefault;
|
||||
@ -513,7 +513,7 @@ void Heap::AddRetainer(HeapObject* retainer, HeapObject* object) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::AddEphemeronRetainer(HeapObject* retainer, HeapObject* object) {
|
||||
void Heap::AddEphemeronRetainer(HeapObject retainer, HeapObject object) {
|
||||
if (ephemeron_retainer_.count(object)) return;
|
||||
ephemeron_retainer_[object] = retainer;
|
||||
RetainingPathOption option = RetainingPathOption::kDefault;
|
||||
@ -526,7 +526,7 @@ void Heap::AddEphemeronRetainer(HeapObject* retainer, HeapObject* object) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::AddRetainingRoot(Root root, HeapObject* object) {
|
||||
void Heap::AddRetainingRoot(Root root, HeapObject object) {
|
||||
if (retaining_root_.count(object)) return;
|
||||
retaining_root_[object] = root;
|
||||
RetainingPathOption option = RetainingPathOption::kDefault;
|
||||
@ -1070,7 +1070,7 @@ void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
|
||||
|
||||
namespace {
|
||||
|
||||
intptr_t CompareWords(int size, HeapObject* a, HeapObject* b) {
|
||||
intptr_t CompareWords(int size, HeapObject a, HeapObject b) {
|
||||
int slots = size / kTaggedSize;
|
||||
DCHECK_EQ(a->Size(), size);
|
||||
DCHECK_EQ(b->Size(), size);
|
||||
@ -1086,17 +1086,17 @@ intptr_t CompareWords(int size, HeapObject* a, HeapObject* b) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ReportDuplicates(int size, std::vector<HeapObject*>& objects) {
|
||||
void ReportDuplicates(int size, std::vector<HeapObject>& objects) {
|
||||
if (objects.size() == 0) return;
|
||||
|
||||
sort(objects.begin(), objects.end(), [size](HeapObject* a, HeapObject* b) {
|
||||
sort(objects.begin(), objects.end(), [size](HeapObject a, HeapObject b) {
|
||||
intptr_t c = CompareWords(size, a, b);
|
||||
if (c != 0) return c < 0;
|
||||
return a < b;
|
||||
});
|
||||
|
||||
std::vector<std::pair<int, HeapObject*>> duplicates;
|
||||
HeapObject* current = objects[0];
|
||||
std::vector<std::pair<int, HeapObject>> duplicates;
|
||||
HeapObject current = objects[0];
|
||||
int count = 1;
|
||||
for (size_t i = 1; i < objects.size(); i++) {
|
||||
if (CompareWords(size, current, objects[i]) == 0) {
|
||||
@ -1170,18 +1170,18 @@ void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
|
||||
EagerlyFreeExternalMemory();
|
||||
|
||||
if (FLAG_trace_duplicate_threshold_kb) {
|
||||
std::map<int, std::vector<HeapObject*>> objects_by_size;
|
||||
std::map<int, std::vector<HeapObject>> objects_by_size;
|
||||
PagedSpaces spaces(this);
|
||||
for (PagedSpace* space = spaces.next(); space != nullptr;
|
||||
space = spaces.next()) {
|
||||
HeapObjectIterator it(space);
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
objects_by_size[obj->Size()].push_back(obj);
|
||||
}
|
||||
}
|
||||
{
|
||||
LargeObjectIterator it(lo_space());
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
objects_by_size[obj->Size()].push_back(obj);
|
||||
}
|
||||
}
|
||||
@ -1458,20 +1458,20 @@ class StringTableVerifier : public ObjectVisitor {
|
||||
public:
|
||||
explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
// Visit all HeapObject pointers in [start, end).
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*p));
|
||||
if ((*p)->IsHeapObject()) {
|
||||
HeapObject* object = HeapObject::cast(*p);
|
||||
HeapObject object = HeapObject::cast(*p);
|
||||
// Check that the string is actually internalized.
|
||||
CHECK(object->IsTheHole(isolate_) || object->IsUndefined(isolate_) ||
|
||||
object->IsInternalizedString());
|
||||
}
|
||||
}
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
UNREACHABLE();
|
||||
}
|
||||
@ -1519,7 +1519,7 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
|
||||
// The deserializer will update the skip list.
|
||||
AllocationResult allocation = map_space()->AllocateRawUnaligned(
|
||||
Map::kSize, PagedSpace::IGNORE_SKIP_LIST);
|
||||
HeapObject* free_space = nullptr;
|
||||
HeapObject free_space;
|
||||
if (allocation.To(&free_space)) {
|
||||
// Mark with a free list node, in case we have a GC before
|
||||
// deserializing.
|
||||
@ -1552,7 +1552,7 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
|
||||
allocation = paged_space(space)->AllocateRawUnaligned(
|
||||
size, PagedSpace::IGNORE_SKIP_LIST);
|
||||
}
|
||||
HeapObject* free_space = nullptr;
|
||||
HeapObject free_space;
|
||||
if (allocation.To(&free_space)) {
|
||||
// Mark with a free list node, in case we have a GC before
|
||||
// deserializing.
|
||||
@ -2029,7 +2029,7 @@ void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::UnprotectAndRegisterMemoryChunk(HeapObject* object) {
|
||||
void Heap::UnprotectAndRegisterMemoryChunk(HeapObject object) {
|
||||
UnprotectAndRegisterMemoryChunk(MemoryChunk::FromAddress(object->address()));
|
||||
}
|
||||
|
||||
@ -2367,16 +2367,14 @@ int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) {
|
||||
HeapObject Heap::PrecedeWithFiller(HeapObject object, int filler_size) {
|
||||
CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo);
|
||||
return HeapObject::FromAddress(object->address() + filler_size);
|
||||
}
|
||||
|
||||
|
||||
HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size,
|
||||
int allocation_size,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject Heap::AlignWithFiller(HeapObject object, int object_size,
|
||||
int allocation_size,
|
||||
AllocationAlignment alignment) {
|
||||
int filler_size = allocation_size - object_size;
|
||||
DCHECK_LT(0, filler_size);
|
||||
int pre_filler = GetFillToAlign(object->address(), alignment);
|
||||
@ -2384,9 +2382,10 @@ HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size,
|
||||
object = PrecedeWithFiller(object, pre_filler);
|
||||
filler_size -= pre_filler;
|
||||
}
|
||||
if (filler_size)
|
||||
if (filler_size) {
|
||||
CreateFillerObjectAt(object->address() + object_size, filler_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
}
|
||||
return object;
|
||||
}
|
||||
|
||||
@ -2418,11 +2417,11 @@ void Heap::FlushNumberStringCache() {
|
||||
}
|
||||
}
|
||||
|
||||
HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
ClearRecordedSlots clear_slots_mode,
|
||||
ClearFreedMemoryMode clear_memory_mode) {
|
||||
if (size == 0) return nullptr;
|
||||
HeapObject* filler = HeapObject::FromAddress(addr);
|
||||
HeapObject Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
ClearRecordedSlots clear_slots_mode,
|
||||
ClearFreedMemoryMode clear_memory_mode) {
|
||||
if (size == 0) return HeapObject();
|
||||
HeapObject filler = HeapObject::FromAddress(addr);
|
||||
if (size == kTaggedSize) {
|
||||
filler->set_map_after_allocation(
|
||||
Map::unchecked_cast(isolate()->root(RootIndex::kOnePointerFillerMap)),
|
||||
@ -2458,8 +2457,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
return filler;
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CanMoveObjectStart(HeapObject* object) {
|
||||
bool Heap::CanMoveObjectStart(HeapObject object) {
|
||||
if (!FLAG_move_object_start) return false;
|
||||
|
||||
// Sampling heap profiler may have a reference to the object.
|
||||
@ -2473,12 +2471,12 @@ bool Heap::CanMoveObjectStart(HeapObject* object) {
|
||||
return Page::FromAddress(address)->SweepingDone();
|
||||
}
|
||||
|
||||
bool Heap::IsImmovable(HeapObject* object) {
|
||||
bool Heap::IsImmovable(HeapObject object) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
|
||||
return chunk->NeverEvacuate() || IsLargeObject(object);
|
||||
}
|
||||
|
||||
bool Heap::IsLargeObject(HeapObject* object) {
|
||||
bool Heap::IsLargeObject(HeapObject object) {
|
||||
return IsLargeMemoryChunk(MemoryChunk::FromHeapObject(object));
|
||||
}
|
||||
|
||||
@ -2488,7 +2486,7 @@ bool Heap::IsLargeMemoryChunk(MemoryChunk* chunk) {
|
||||
chunk->owner()->identity() == CODE_LO_SPACE;
|
||||
}
|
||||
|
||||
bool Heap::IsInYoungGeneration(HeapObject* object) {
|
||||
bool Heap::IsInYoungGeneration(HeapObject object) {
|
||||
if (MemoryChunk::FromHeapObject(object)->IsInNewLargeObjectSpace()) {
|
||||
return !object->map_word().IsForwardingAddress();
|
||||
}
|
||||
@ -2519,7 +2517,7 @@ class LeftTrimmerVerifierRootVisitor : public RootVisitor {
|
||||
#endif // ENABLE_SLOW_DCHECKS
|
||||
|
||||
namespace {
|
||||
bool MayContainRecordedSlots(HeapObject* object) {
|
||||
bool MayContainRecordedSlots(HeapObject object) {
|
||||
// New space object do not have recorded slots.
|
||||
if (MemoryChunk::FromHeapObject(object)->InNewSpace()) return false;
|
||||
// Whitelist objects that definitely do not have pointers.
|
||||
@ -2569,7 +2567,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
// Technically in new space this write might be omitted (except for
|
||||
// debug mode which iterates through the heap), but to play safer
|
||||
// we still do it.
|
||||
HeapObject* filler =
|
||||
HeapObject filler =
|
||||
CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
|
||||
|
||||
// Initialize header of the trimmed array. Since left trimming is only
|
||||
@ -2691,9 +2689,9 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim,
|
||||
// we still do it.
|
||||
// We do not create a filler for objects in a large object space.
|
||||
if (!IsLargeObject(object)) {
|
||||
HeapObject* filler =
|
||||
HeapObject filler =
|
||||
CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
|
||||
DCHECK_NOT_NULL(filler);
|
||||
DCHECK(!filler.is_null());
|
||||
// Clear the mark bits of the black area that belongs now to the filler.
|
||||
// This is an optimization. The sweeper will release black fillers anyway.
|
||||
if (incremental_marking()->black_allocation() &&
|
||||
@ -2928,7 +2926,7 @@ void Heap::FinalizeIncrementalMarkingIncrementally(
|
||||
}
|
||||
|
||||
void Heap::RegisterDeserializedObjectsForBlackAllocation(
|
||||
Reservation* reservations, const std::vector<HeapObject*>& large_objects,
|
||||
Reservation* reservations, const std::vector<HeapObject>& large_objects,
|
||||
const std::vector<Address>& maps) {
|
||||
// TODO(ulan): pause black allocation during deserialization to avoid
|
||||
// iterating all these objects in one go.
|
||||
@ -2944,7 +2942,7 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
|
||||
for (auto& chunk : res) {
|
||||
Address addr = chunk.start;
|
||||
while (addr < chunk.end) {
|
||||
HeapObject* obj = HeapObject::FromAddress(addr);
|
||||
HeapObject obj = HeapObject::FromAddress(addr);
|
||||
// Objects can have any color because incremental marking can
|
||||
// start in the middle of Heap::ReserveSpace().
|
||||
if (marking_state->IsBlack(obj)) {
|
||||
@ -2956,7 +2954,7 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
|
||||
}
|
||||
|
||||
// Large object space doesn't use reservations, so it needs custom handling.
|
||||
for (HeapObject* object : large_objects) {
|
||||
for (HeapObject object : large_objects) {
|
||||
incremental_marking()->ProcessBlackAllocatedObject(object);
|
||||
}
|
||||
|
||||
@ -2967,7 +2965,7 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::NotifyObjectLayoutChange(HeapObject* object, int size,
|
||||
void Heap::NotifyObjectLayoutChange(HeapObject object, int size,
|
||||
const DisallowHeapAllocation&) {
|
||||
if (incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->MarkBlackAndPush(object);
|
||||
@ -2989,11 +2987,11 @@ void Heap::NotifyObjectLayoutChange(HeapObject* object, int size,
|
||||
// Helper class for collecting slot addresses.
|
||||
class SlotCollectingVisitor final : public ObjectVisitor {
|
||||
public:
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
for (MaybeObjectSlot p = start; p < end; ++p) {
|
||||
slots_.push_back(p);
|
||||
@ -3014,7 +3012,7 @@ class SlotCollectingVisitor final : public ObjectVisitor {
|
||||
std::vector<MaybeObjectSlot> slots_;
|
||||
};
|
||||
|
||||
void Heap::VerifyObjectLayoutChange(HeapObject* object, Map new_map) {
|
||||
void Heap::VerifyObjectLayoutChange(HeapObject object, Map new_map) {
|
||||
if (!FLAG_verify_heap) return;
|
||||
|
||||
// Check that Heap::NotifyObjectLayout was called for object transitions
|
||||
@ -3042,7 +3040,7 @@ void Heap::VerifyObjectLayoutChange(HeapObject* object, Map new_map) {
|
||||
}
|
||||
} else {
|
||||
DCHECK_EQ(pending_layout_change_object_, object);
|
||||
pending_layout_change_object_ = nullptr;
|
||||
pending_layout_change_object_ = HeapObject();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@ -3401,7 +3399,7 @@ const char* Heap::GarbageCollectionReasonToString(
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
bool Heap::Contains(HeapObject* value) {
|
||||
bool Heap::Contains(HeapObject value) {
|
||||
if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
|
||||
return false;
|
||||
}
|
||||
@ -3412,7 +3410,7 @@ bool Heap::Contains(HeapObject* value) {
|
||||
code_lo_space_->Contains(value) || new_lo_space_->Contains(value));
|
||||
}
|
||||
|
||||
bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
|
||||
bool Heap::InSpace(HeapObject value, AllocationSpace space) {
|
||||
if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
|
||||
return false;
|
||||
}
|
||||
@ -3489,15 +3487,15 @@ class VerifyReadOnlyPointersVisitor : public VerifyPointersVisitor {
|
||||
: VerifyPointersVisitor(heap) {}
|
||||
|
||||
protected:
|
||||
void VerifyPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VerifyPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
if (host != nullptr) {
|
||||
if (!host.is_null()) {
|
||||
CHECK(heap_->InReadOnlySpace(host->map()));
|
||||
}
|
||||
VerifyPointersVisitor::VerifyPointers(host, start, end);
|
||||
|
||||
for (MaybeObjectSlot current = start; current < end; ++current) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if ((*current)->GetHeapObject(&heap_object)) {
|
||||
CHECK(heap_->InReadOnlySpace(heap_object));
|
||||
}
|
||||
@ -3540,14 +3538,9 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
||||
std::set<std::pair<SlotType, Address> >* typed)
|
||||
: untyped_(untyped), typed_(typed) {}
|
||||
|
||||
virtual bool ShouldHaveBeenRecorded(HeapObject* host, MaybeObject target) = 0;
|
||||
// TODO(3770): Drop this after the migration.
|
||||
bool ShouldHaveBeenRecorded(Code host, MaybeObject target) {
|
||||
return ShouldHaveBeenRecorded(reinterpret_cast<HeapObject*>(host.ptr()),
|
||||
target);
|
||||
}
|
||||
virtual bool ShouldHaveBeenRecorded(HeapObject host, MaybeObject target) = 0;
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
#ifdef DEBUG
|
||||
for (ObjectSlot slot = start; slot < end; ++slot) {
|
||||
@ -3557,7 +3550,7 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
||||
VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
for (MaybeObjectSlot slot = start; slot < end; ++slot) {
|
||||
if (ShouldHaveBeenRecorded(host, *slot)) {
|
||||
@ -3599,7 +3592,7 @@ class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
||||
std::set<std::pair<SlotType, Address>>* typed)
|
||||
: SlotVerifyingVisitor(untyped, typed) {}
|
||||
|
||||
bool ShouldHaveBeenRecorded(HeapObject* host, MaybeObject target) override {
|
||||
bool ShouldHaveBeenRecorded(HeapObject host, MaybeObject target) override {
|
||||
DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InNewSpace(target),
|
||||
Heap::InToSpace(target));
|
||||
return target->IsStrongOrWeak() && Heap::InNewSpace(target) &&
|
||||
@ -3629,7 +3622,7 @@ void CollectSlots(MemoryChunk* chunk, Address start, Address end,
|
||||
});
|
||||
}
|
||||
|
||||
void Heap::VerifyRememberedSetFor(HeapObject* object) {
|
||||
void Heap::VerifyRememberedSetFor(HeapObject object) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
|
||||
DCHECK_IMPLIES(chunk->mutex() == nullptr, InReadOnlySpace(object));
|
||||
// In RO_SPACE chunk->mutex() may be nullptr, so just ignore it.
|
||||
@ -3759,13 +3752,13 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
|
||||
private:
|
||||
inline void FixHandle(FullObjectSlot p) {
|
||||
if (!(*p)->IsHeapObject()) return;
|
||||
HeapObject* current = reinterpret_cast<HeapObject*>(*p);
|
||||
HeapObject current = HeapObject::cast(*p);
|
||||
const MapWord map_word = current->map_word();
|
||||
if (!map_word.IsForwardingAddress() && current->IsFiller()) {
|
||||
#ifdef DEBUG
|
||||
// We need to find a FixedArrayBase map after walking the fillers.
|
||||
while (current->IsFiller()) {
|
||||
Address next = reinterpret_cast<Address>(current);
|
||||
Address next = current->ptr();
|
||||
if (current->map() == ReadOnlyRoots(heap_).one_pointer_filler_map()) {
|
||||
next += kTaggedSize;
|
||||
} else if (current->map() ==
|
||||
@ -3774,7 +3767,7 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
|
||||
} else {
|
||||
next += current->Size();
|
||||
}
|
||||
current = reinterpret_cast<HeapObject*>(next);
|
||||
current = HeapObject::cast(ObjectPtr(next));
|
||||
}
|
||||
DCHECK(current->IsFixedArrayBase());
|
||||
#endif // DEBUG
|
||||
@ -4061,7 +4054,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
|
||||
*stats->malloced_peak_memory = isolate_->allocator()->GetMaxMemoryUsage();
|
||||
if (take_snapshot) {
|
||||
HeapIterator iterator(this);
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
InstanceType type = obj->map()->instance_type();
|
||||
DCHECK(0 <= type && type <= LAST_TYPE);
|
||||
@ -4252,12 +4245,11 @@ void Heap::DisableInlineAllocation() {
|
||||
}
|
||||
}
|
||||
|
||||
HeapObject* Heap::EnsureImmovableCode(HeapObject* heap_object,
|
||||
int object_size) {
|
||||
HeapObject Heap::EnsureImmovableCode(HeapObject heap_object, int object_size) {
|
||||
// Code objects which should stay at a fixed address are allocated either
|
||||
// in the first page of code space, in large object space, or (during
|
||||
// snapshot creation) the containing page is marked as immovable.
|
||||
DCHECK(heap_object);
|
||||
DCHECK(!heap_object.is_null());
|
||||
DCHECK(code_space_->Contains(heap_object));
|
||||
DCHECK_GE(object_size, 0);
|
||||
if (!Heap::IsImmovable(heap_object)) {
|
||||
@ -4278,9 +4270,9 @@ HeapObject* Heap::EnsureImmovableCode(HeapObject* heap_object,
|
||||
return heap_object;
|
||||
}
|
||||
|
||||
HeapObject* Heap::AllocateRawWithLightRetry(int size, AllocationSpace space,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject* result;
|
||||
HeapObject Heap::AllocateRawWithLightRetry(int size, AllocationSpace space,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject result;
|
||||
AllocationResult alloc = AllocateRaw(size, space, alignment);
|
||||
if (alloc.To(&result)) {
|
||||
DCHECK(result != ReadOnlyRoots(this).exception());
|
||||
@ -4296,14 +4288,14 @@ HeapObject* Heap::AllocateRawWithLightRetry(int size, AllocationSpace space,
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
HeapObject* Heap::AllocateRawWithRetryOrFail(int size, AllocationSpace space,
|
||||
AllocationAlignment alignment) {
|
||||
HeapObject Heap::AllocateRawWithRetryOrFail(int size, AllocationSpace space,
|
||||
AllocationAlignment alignment) {
|
||||
AllocationResult alloc;
|
||||
HeapObject* result = AllocateRawWithLightRetry(size, space, alignment);
|
||||
if (result) return result;
|
||||
HeapObject result = AllocateRawWithLightRetry(size, space, alignment);
|
||||
if (!result.is_null()) return result;
|
||||
|
||||
isolate()->counters()->gc_last_resort_from_handles()->Increment();
|
||||
CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
|
||||
@ -4317,14 +4309,14 @@ HeapObject* Heap::AllocateRawWithRetryOrFail(int size, AllocationSpace space,
|
||||
}
|
||||
// TODO(1181417): Fix this.
|
||||
FatalProcessOutOfMemory("CALL_AND_RETRY_LAST");
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
// TODO(jkummerow): Refactor this. AllocateRaw should take an "immovability"
|
||||
// parameter and just do what's necessary.
|
||||
HeapObject* Heap::AllocateRawCodeInLargeObjectSpace(int size) {
|
||||
HeapObject Heap::AllocateRawCodeInLargeObjectSpace(int size) {
|
||||
AllocationResult alloc = code_lo_space()->AllocateRaw(size);
|
||||
HeapObject* result;
|
||||
HeapObject result;
|
||||
if (alloc.To(&result)) {
|
||||
DCHECK(result != ReadOnlyRoots(this).exception());
|
||||
return result;
|
||||
@ -4351,7 +4343,7 @@ HeapObject* Heap::AllocateRawCodeInLargeObjectSpace(int size) {
|
||||
}
|
||||
// TODO(1181417): Fix this.
|
||||
FatalProcessOutOfMemory("CALL_AND_RETRY_LAST");
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
void Heap::SetUp() {
|
||||
@ -4564,7 +4556,7 @@ void Heap::RegisterExternallyReferencedObject(Address* location) {
|
||||
// objects are just passed around as Smis.
|
||||
ObjectPtr object(*location);
|
||||
if (!object->IsHeapObject()) return;
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
DCHECK(Contains(heap_object));
|
||||
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->WhiteToGreyAndPush(heap_object);
|
||||
@ -4793,7 +4785,7 @@ void Heap::CompactWeakArrayLists(PretenureFlag pretenure) {
|
||||
std::vector<Handle<PrototypeInfo>> prototype_infos;
|
||||
{
|
||||
HeapIterator iterator(this);
|
||||
for (HeapObject* o = iterator.next(); o != nullptr; o = iterator.next()) {
|
||||
for (HeapObject o = iterator.next(); !o.is_null(); o = iterator.next()) {
|
||||
if (o->IsPrototypeInfo()) {
|
||||
PrototypeInfo prototype_info = PrototypeInfo::cast(o);
|
||||
if (prototype_info->prototype_users()->IsWeakArrayList()) {
|
||||
@ -4871,7 +4863,7 @@ void Heap::CompactRetainedMaps(WeakArrayList retained_maps) {
|
||||
new_length += 2;
|
||||
}
|
||||
number_of_disposed_maps_ = new_number_of_disposed_maps;
|
||||
HeapObject* undefined = ReadOnlyRoots(this).undefined_value();
|
||||
HeapObject undefined = ReadOnlyRoots(this).undefined_value();
|
||||
for (int i = new_length; i < length; i++) {
|
||||
retained_maps->Set(i, HeapObjectReference::Strong(undefined));
|
||||
}
|
||||
@ -4937,7 +4929,7 @@ Address Heap::store_buffer_overflow_function_address() {
|
||||
return FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow);
|
||||
}
|
||||
|
||||
void Heap::ClearRecordedSlot(HeapObject* object, ObjectSlot slot) {
|
||||
void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
if (!page->InNewSpace()) {
|
||||
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
|
||||
@ -4946,7 +4938,7 @@ void Heap::ClearRecordedSlot(HeapObject* object, ObjectSlot slot) {
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
void Heap::VerifyClearedSlot(HeapObject* object, ObjectSlot slot) {
|
||||
void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
|
||||
if (InNewSpace(object)) return;
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
|
||||
@ -5002,7 +4994,7 @@ Space* SpaceIterator::next() {
|
||||
class HeapObjectsFilter {
|
||||
public:
|
||||
virtual ~HeapObjectsFilter() = default;
|
||||
virtual bool SkipObject(HeapObject* object) = 0;
|
||||
virtual bool SkipObject(HeapObject object) = 0;
|
||||
};
|
||||
|
||||
|
||||
@ -5019,7 +5011,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
}
|
||||
}
|
||||
|
||||
bool SkipObject(HeapObject* object) override {
|
||||
bool SkipObject(HeapObject object) override {
|
||||
if (object->IsFiller()) return true;
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
|
||||
if (reachable_.count(chunk) == 0) return true;
|
||||
@ -5027,10 +5019,11 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
}
|
||||
|
||||
private:
|
||||
bool MarkAsReachable(HeapObject* object) {
|
||||
bool MarkAsReachable(HeapObject object) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
|
||||
if (reachable_.count(chunk) == 0) {
|
||||
reachable_[chunk] = new std::unordered_set<HeapObject*>();
|
||||
reachable_[chunk] =
|
||||
new std::unordered_set<HeapObject, HeapObject::Hasher>();
|
||||
}
|
||||
if (reachable_[chunk]->count(object)) return false;
|
||||
reachable_[chunk]->insert(object);
|
||||
@ -5042,12 +5035,12 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
explicit MarkingVisitor(UnreachableObjectsFilter* filter)
|
||||
: filter_(filter) {}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
MarkPointers(MaybeObjectSlot(start), MaybeObjectSlot(end));
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
MarkPointers(start, end);
|
||||
}
|
||||
@ -5067,7 +5060,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
|
||||
void TransitiveClosure() {
|
||||
while (!marking_stack_.empty()) {
|
||||
HeapObject* obj = marking_stack_.back();
|
||||
HeapObject obj = marking_stack_.back();
|
||||
marking_stack_.pop_back();
|
||||
obj->Iterate(this);
|
||||
}
|
||||
@ -5083,21 +5076,21 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
// Treat weak references as strong.
|
||||
for (TSlot p = start; p < end; ++p) {
|
||||
typename TSlot::TObject object = p.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
MarkHeapObject(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
V8_INLINE void MarkHeapObject(HeapObject* heap_object) {
|
||||
V8_INLINE void MarkHeapObject(HeapObject heap_object) {
|
||||
if (filter_->MarkAsReachable(heap_object)) {
|
||||
marking_stack_.push_back(heap_object);
|
||||
}
|
||||
}
|
||||
|
||||
UnreachableObjectsFilter* filter_;
|
||||
std::vector<HeapObject*> marking_stack_;
|
||||
std::vector<HeapObject> marking_stack_;
|
||||
};
|
||||
|
||||
friend class MarkingVisitor;
|
||||
@ -5110,7 +5103,9 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
|
||||
Heap* heap_;
|
||||
DisallowHeapAllocation no_allocation_;
|
||||
std::unordered_map<MemoryChunk*, std::unordered_set<HeapObject*>*> reachable_;
|
||||
std::unordered_map<MemoryChunk*,
|
||||
std::unordered_set<HeapObject, HeapObject::Hasher>*>
|
||||
reachable_;
|
||||
};
|
||||
|
||||
HeapIterator::HeapIterator(Heap* heap,
|
||||
@ -5148,38 +5143,37 @@ HeapIterator::~HeapIterator() {
|
||||
delete filter_;
|
||||
}
|
||||
|
||||
|
||||
HeapObject* HeapIterator::next() {
|
||||
HeapObject HeapIterator::next() {
|
||||
if (filter_ == nullptr) return NextObject();
|
||||
|
||||
HeapObject* obj = NextObject();
|
||||
while ((obj != nullptr) && (filter_->SkipObject(obj))) obj = NextObject();
|
||||
HeapObject obj = NextObject();
|
||||
while (!obj.is_null() && (filter_->SkipObject(obj))) obj = NextObject();
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
HeapObject* HeapIterator::NextObject() {
|
||||
HeapObject HeapIterator::NextObject() {
|
||||
// No iterator means we are done.
|
||||
if (object_iterator_.get() == nullptr) return nullptr;
|
||||
if (object_iterator_.get() == nullptr) return HeapObject();
|
||||
|
||||
if (HeapObject* obj = object_iterator_.get()->Next()) {
|
||||
HeapObject obj = object_iterator_.get()->Next();
|
||||
if (!obj.is_null()) {
|
||||
// If the current iterator has more objects we are fine.
|
||||
return obj;
|
||||
} else {
|
||||
// Go though the spaces looking for one that has objects.
|
||||
while (space_iterator_->has_next()) {
|
||||
object_iterator_ = space_iterator_->next()->GetObjectIterator();
|
||||
if (HeapObject* obj = object_iterator_.get()->Next()) {
|
||||
obj = object_iterator_.get()->Next();
|
||||
if (!obj.is_null()) {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Done with the last space.
|
||||
object_iterator_.reset(nullptr);
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
|
||||
void Heap::UpdateTotalGCTime(double duration) {
|
||||
if (FLAG_trace_gc_verbose) {
|
||||
total_gc_time_ms_ += duration;
|
||||
@ -5299,7 +5293,7 @@ void Heap::SetInterpreterEntryTrampolineForProfiling(Code code) {
|
||||
|
||||
void Heap::AddDirtyJSWeakFactory(
|
||||
JSWeakFactory weak_factory,
|
||||
std::function<void(HeapObject* object, ObjectSlot slot, Object* target)>
|
||||
std::function<void(HeapObject object, ObjectSlot slot, Object* target)>
|
||||
gc_notify_updated_slot) {
|
||||
DCHECK(dirty_js_weak_factories()->IsUndefined(isolate()) ||
|
||||
dirty_js_weak_factories()->IsJSWeakFactory());
|
||||
@ -5415,12 +5409,12 @@ const char* AllocationSpaceName(AllocationSpace space) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) {
|
||||
VerifyPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject* host,
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject host,
|
||||
MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
VerifyPointers(host, start, end);
|
||||
@ -5433,7 +5427,7 @@ void VerifyPointersVisitor::VisitRootPointers(Root root,
|
||||
VerifyPointersImpl(start, end);
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VerifyHeapObjectImpl(HeapObject* heap_object) {
|
||||
void VerifyPointersVisitor::VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK(heap_->Contains(heap_object));
|
||||
CHECK(heap_object->map()->IsMap());
|
||||
}
|
||||
@ -5442,7 +5436,7 @@ template <typename TSlot>
|
||||
void VerifyPointersVisitor::VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
} else {
|
||||
@ -5451,7 +5445,7 @@ void VerifyPointersVisitor::VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
}
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VerifyPointers(HeapObject* host,
|
||||
void VerifyPointersVisitor::VerifyPointers(HeapObject host,
|
||||
MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
VerifyPointersImpl(start, end);
|
||||
@ -5474,7 +5468,7 @@ void VerifySmisVisitor::VisitRootPointers(Root root, const char* description,
|
||||
}
|
||||
}
|
||||
|
||||
bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
|
||||
bool Heap::AllowedToBeMigrated(HeapObject obj, AllocationSpace dst) {
|
||||
// Object migration is governed by the following rules:
|
||||
//
|
||||
// 1) Objects in new-space can be migrated to the old space
|
||||
@ -5532,17 +5526,17 @@ void AllocationObserver::AllocationStep(int bytes_allocated,
|
||||
|
||||
namespace {
|
||||
|
||||
Map GcSafeMapOfCodeSpaceObject(HeapObject* object) {
|
||||
Map GcSafeMapOfCodeSpaceObject(HeapObject object) {
|
||||
MapWord map_word = object->map_word();
|
||||
return map_word.IsForwardingAddress() ? map_word.ToForwardingAddress()->map()
|
||||
: map_word.ToMap();
|
||||
}
|
||||
|
||||
int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
|
||||
int GcSafeSizeOfCodeSpaceObject(HeapObject object) {
|
||||
return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
|
||||
}
|
||||
|
||||
Code GcSafeCastToCode(Heap* heap, HeapObject* object, Address inner_pointer) {
|
||||
Code GcSafeCastToCode(Heap* heap, HeapObject object, Address inner_pointer) {
|
||||
Code code = Code::unchecked_cast(object);
|
||||
DCHECK(!code.is_null());
|
||||
DCHECK(heap->GcSafeCodeContains(code, inner_pointer));
|
||||
@ -5588,7 +5582,7 @@ Code Heap::GcSafeFindCodeForInnerPointer(Address inner_pointer) {
|
||||
continue;
|
||||
}
|
||||
|
||||
HeapObject* obj = HeapObject::FromAddress(addr);
|
||||
HeapObject obj = HeapObject::FromAddress(addr);
|
||||
int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
|
||||
Address next_addr = addr + obj_size;
|
||||
if (next_addr > inner_pointer) {
|
||||
@ -5606,9 +5600,9 @@ void Heap::WriteBarrierForCodeSlow(Code code) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::GenerationalBarrierSlow(HeapObject* object, Address slot,
|
||||
HeapObject* value) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(object);
|
||||
void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
|
||||
HeapObject value) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(&object);
|
||||
heap->store_buffer()->InsertEntry(slot);
|
||||
}
|
||||
|
||||
@ -5622,7 +5616,7 @@ void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
|
||||
}
|
||||
|
||||
void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
|
||||
HeapObject* object) {
|
||||
HeapObject object) {
|
||||
DCHECK(InNewSpace(object));
|
||||
Page* source_page = Page::FromAddress(host.ptr());
|
||||
RelocInfo::Mode rmode = rinfo->rmode();
|
||||
@ -5643,14 +5637,14 @@ void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
|
||||
static_cast<uint32_t>(offset));
|
||||
}
|
||||
|
||||
void Heap::MarkingBarrierSlow(HeapObject* object, Address slot,
|
||||
HeapObject* value) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(object);
|
||||
void Heap::MarkingBarrierSlow(HeapObject object, Address slot,
|
||||
HeapObject value) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(&object);
|
||||
heap->incremental_marking()->RecordWriteSlow(object, HeapObjectSlot(slot),
|
||||
value);
|
||||
}
|
||||
|
||||
void Heap::MarkingBarrierForElementsSlow(Heap* heap, HeapObject* object) {
|
||||
void Heap::MarkingBarrierForElementsSlow(Heap* heap, HeapObject object) {
|
||||
if (FLAG_concurrent_marking ||
|
||||
heap->incremental_marking()->marking_state()->IsBlack(object)) {
|
||||
heap->incremental_marking()->RevisitObject(object);
|
||||
@ -5658,15 +5652,15 @@ void Heap::MarkingBarrierForElementsSlow(Heap* heap, HeapObject* object) {
|
||||
}
|
||||
|
||||
void Heap::MarkingBarrierForCodeSlow(Code host, RelocInfo* rinfo,
|
||||
HeapObject* object) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(host);
|
||||
HeapObject object) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(&host);
|
||||
DCHECK(heap->incremental_marking()->IsMarking());
|
||||
heap->incremental_marking()->RecordWriteIntoCode(host, rinfo, object);
|
||||
}
|
||||
|
||||
void Heap::MarkingBarrierForDescriptorArraySlow(
|
||||
Heap* heap, HeapObject* raw_descriptor_array,
|
||||
int number_of_own_descriptors) {
|
||||
void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap,
|
||||
HeapObject raw_descriptor_array,
|
||||
int number_of_own_descriptors) {
|
||||
DCHECK(heap->incremental_marking()->IsMarking());
|
||||
DescriptorArray descriptor_array =
|
||||
DescriptorArray::cast(raw_descriptor_array);
|
||||
@ -5679,7 +5673,7 @@ void Heap::MarkingBarrierForDescriptorArraySlow(
|
||||
}
|
||||
}
|
||||
|
||||
bool Heap::PageFlagsAreConsistent(HeapObject* object) {
|
||||
bool Heap::PageFlagsAreConsistent(HeapObject object) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(object);
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
|
||||
heap_internals::MemoryChunk* slim_chunk =
|
||||
@ -5708,6 +5702,9 @@ static_assert(MemoryChunk::Flag::IN_TO_SPACE ==
|
||||
static_assert(MemoryChunk::kFlagsOffset ==
|
||||
heap_internals::MemoryChunk::kFlagsOffset,
|
||||
"Flag offset inconsistent");
|
||||
static_assert(MemoryChunk::kHeapOffset ==
|
||||
heap_internals::MemoryChunk::kHeapOffset,
|
||||
"Heap offset inconsistent");
|
||||
|
||||
void Heap::SetEmbedderStackStateForNextFinalizaton(
|
||||
EmbedderHeapTracer::EmbedderStackState stack_state) {
|
||||
|
134
src/heap/heap.h
134
src/heap/heap.h
@ -63,7 +63,6 @@ class GCIdleTimeHeapState;
|
||||
class GCTracer;
|
||||
class HeapController;
|
||||
class HeapObjectAllocationTracker;
|
||||
class HeapObjectPtr;
|
||||
class HeapObjectsFilter;
|
||||
class HeapStats;
|
||||
class HistogramTimer;
|
||||
@ -171,7 +170,7 @@ class AllocationResult {
|
||||
AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
|
||||
|
||||
inline bool IsRetry() { return object_->IsSmi(); }
|
||||
inline HeapObject* ToObjectChecked();
|
||||
inline HeapObject ToObjectChecked();
|
||||
inline AllocationSpace RetrySpace();
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
@ -227,7 +226,7 @@ class Heap {
|
||||
};
|
||||
|
||||
using PretenuringFeedbackMap =
|
||||
std::unordered_map<AllocationSite, size_t, HeapObjectPtr::Hasher>;
|
||||
std::unordered_map<AllocationSite, size_t, HeapObject::Hasher>;
|
||||
|
||||
// Taking this mutex prevents the GC from entering a phase that relocates
|
||||
// object references.
|
||||
@ -341,24 +340,24 @@ class Heap {
|
||||
static inline void CopyBlock(Address dst, Address src, int byte_size);
|
||||
|
||||
V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
|
||||
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject* object,
|
||||
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
|
||||
Address slot,
|
||||
HeapObject* value);
|
||||
HeapObject value);
|
||||
V8_EXPORT_PRIVATE static void GenerationalBarrierForElementsSlow(
|
||||
Heap* heap, FixedArray array, int offset, int length);
|
||||
V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
|
||||
Code host, RelocInfo* rinfo, HeapObject* value);
|
||||
V8_EXPORT_PRIVATE static void MarkingBarrierSlow(HeapObject* object,
|
||||
Code host, RelocInfo* rinfo, HeapObject value);
|
||||
V8_EXPORT_PRIVATE static void MarkingBarrierSlow(HeapObject object,
|
||||
Address slot,
|
||||
HeapObject* value);
|
||||
HeapObject value);
|
||||
V8_EXPORT_PRIVATE static void MarkingBarrierForElementsSlow(
|
||||
Heap* heap, HeapObject* object);
|
||||
Heap* heap, HeapObject object);
|
||||
V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host,
|
||||
RelocInfo* rinfo,
|
||||
HeapObject* value);
|
||||
HeapObject value);
|
||||
V8_EXPORT_PRIVATE static void MarkingBarrierForDescriptorArraySlow(
|
||||
Heap* heap, HeapObject* descriptor_array, int number_of_own_descriptors);
|
||||
V8_EXPORT_PRIVATE static bool PageFlagsAreConsistent(HeapObject* object);
|
||||
Heap* heap, HeapObject descriptor_array, int number_of_own_descriptors);
|
||||
V8_EXPORT_PRIVATE static bool PageFlagsAreConsistent(HeapObject object);
|
||||
|
||||
// Notifies the heap that is ok to start marking or other activities that
|
||||
// should not happen during deserialization.
|
||||
@ -380,7 +379,7 @@ class Heap {
|
||||
// pass ClearRecordedSlots::kNo. If the memory after the object header of
|
||||
// the filler should be cleared, pass in kClearFreedMemory. The default is
|
||||
// kDontClearFreedMemory.
|
||||
V8_EXPORT_PRIVATE HeapObject* CreateFillerObjectAt(
|
||||
V8_EXPORT_PRIVATE HeapObject CreateFillerObjectAt(
|
||||
Address addr, int size, ClearRecordedSlots clear_slots_mode,
|
||||
ClearFreedMemoryMode clear_memory_mode =
|
||||
ClearFreedMemoryMode::kDontClearFreedMemory);
|
||||
@ -388,15 +387,15 @@ class Heap {
|
||||
template <typename T>
|
||||
void CreateFillerForArray(T object, int elements_to_trim, int bytes_to_trim);
|
||||
|
||||
bool CanMoveObjectStart(HeapObject* object);
|
||||
bool CanMoveObjectStart(HeapObject object);
|
||||
|
||||
bool IsImmovable(HeapObject* object);
|
||||
bool IsImmovable(HeapObject object);
|
||||
|
||||
bool IsLargeObject(HeapObject* object);
|
||||
bool IsLargeObject(HeapObject object);
|
||||
bool IsLargeMemoryChunk(MemoryChunk* chunk);
|
||||
inline bool IsWithinLargeObject(Address address);
|
||||
|
||||
bool IsInYoungGeneration(HeapObject* object);
|
||||
bool IsInYoungGeneration(HeapObject object);
|
||||
|
||||
// Trim the given array from the left. Note that this relocates the object
|
||||
// start and hence is only valid if there is only a single reference to it.
|
||||
@ -437,7 +436,7 @@ class Heap {
|
||||
|
||||
// Checks whether the given object is allowed to be migrated from it's
|
||||
// current space into the given destination space. Used for debugging.
|
||||
bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
|
||||
bool AllowedToBeMigrated(HeapObject object, AllocationSpace dest);
|
||||
|
||||
void CheckHandleCount();
|
||||
|
||||
@ -462,7 +461,7 @@ class Heap {
|
||||
}
|
||||
|
||||
void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk);
|
||||
void UnprotectAndRegisterMemoryChunk(HeapObject* object);
|
||||
void UnprotectAndRegisterMemoryChunk(HeapObject object);
|
||||
void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk);
|
||||
V8_EXPORT_PRIVATE void ProtectUnprotectedMemoryChunks();
|
||||
|
||||
@ -487,7 +486,7 @@ class Heap {
|
||||
// If an object has an AllocationMemento trailing it, return it, otherwise
|
||||
// return a null AllocationMemento.
|
||||
template <FindMementoMode mode>
|
||||
inline AllocationMemento FindAllocationMemento(Map map, HeapObject* object);
|
||||
inline AllocationMemento FindAllocationMemento(Map map, HeapObject object);
|
||||
|
||||
// Returns false if not able to reserve.
|
||||
bool ReserveSpace(Reservation* reservations, std::vector<Address>* maps);
|
||||
@ -554,10 +553,10 @@ class Heap {
|
||||
// by runtime. Allocations of target space for object evacuation do not
|
||||
// trigger the event. In order to track ALL allocations one must turn off
|
||||
// FLAG_inline_new.
|
||||
inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
|
||||
inline void OnAllocationEvent(HeapObject object, int size_in_bytes);
|
||||
|
||||
// This event is triggered after object is moved to a new place.
|
||||
inline void OnMoveEvent(HeapObject* target, HeapObject* source,
|
||||
inline void OnMoveEvent(HeapObject target, HeapObject source,
|
||||
int size_in_bytes);
|
||||
|
||||
inline bool CanAllocateInReadOnlySpace();
|
||||
@ -704,7 +703,7 @@ class Heap {
|
||||
// Add weak_factory into the dirty_js_weak_factories list.
|
||||
void AddDirtyJSWeakFactory(
|
||||
JSWeakFactory weak_factory,
|
||||
std::function<void(HeapObject* object, ObjectSlot slot, Object* target)>
|
||||
std::function<void(HeapObject object, ObjectSlot slot, Object* target)>
|
||||
gc_notify_updated_slot);
|
||||
|
||||
void AddKeepDuringJobTarget(Handle<JSReceiver> target);
|
||||
@ -808,11 +807,11 @@ class Heap {
|
||||
static intptr_t store_buffer_mask_constant();
|
||||
static Address store_buffer_overflow_function_address();
|
||||
|
||||
void ClearRecordedSlot(HeapObject* object, ObjectSlot slot);
|
||||
void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
|
||||
void ClearRecordedSlotRange(Address start, Address end);
|
||||
|
||||
#ifdef DEBUG
|
||||
void VerifyClearedSlot(HeapObject* object, ObjectSlot slot);
|
||||
void VerifyClearedSlot(HeapObject object, ObjectSlot slot);
|
||||
#endif
|
||||
|
||||
// ===========================================================================
|
||||
@ -845,7 +844,7 @@ class Heap {
|
||||
void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason);
|
||||
|
||||
void RegisterDeserializedObjectsForBlackAllocation(
|
||||
Reservation* reservations, const std::vector<HeapObject*>& large_objects,
|
||||
Reservation* reservations, const std::vector<HeapObject>& large_objects,
|
||||
const std::vector<Address>& maps);
|
||||
|
||||
IncrementalMarking* incremental_marking() { return incremental_marking_; }
|
||||
@ -859,14 +858,14 @@ class Heap {
|
||||
// The runtime uses this function to notify potentially unsafe object layout
|
||||
// changes that require special synchronization with the concurrent marker.
|
||||
// The old size is the size of the object before layout change.
|
||||
void NotifyObjectLayoutChange(HeapObject* object, int old_size,
|
||||
void NotifyObjectLayoutChange(HeapObject object, int old_size,
|
||||
const DisallowHeapAllocation&);
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
// This function checks that either
|
||||
// - the map transition is safe,
|
||||
// - or it was communicated to GC using NotifyObjectLayoutChange.
|
||||
void VerifyObjectLayoutChange(HeapObject* object, Map new_map);
|
||||
void VerifyObjectLayoutChange(HeapObject object, Map new_map);
|
||||
#endif
|
||||
|
||||
// ===========================================================================
|
||||
@ -928,15 +927,13 @@ class Heap {
|
||||
// Returns whether the object resides in new space.
|
||||
static inline bool InNewSpace(Object* object);
|
||||
static inline bool InNewSpace(MaybeObject object);
|
||||
static inline bool InNewSpace(HeapObject* heap_object);
|
||||
static inline bool InNewSpace(HeapObjectPtr heap_object);
|
||||
static inline bool InNewSpace(HeapObject heap_object);
|
||||
static inline bool InFromSpace(Object* object);
|
||||
static inline bool InFromSpace(MaybeObject object);
|
||||
static inline bool InFromSpace(HeapObject* heap_object);
|
||||
static inline bool InFromSpace(HeapObject heap_object);
|
||||
static inline bool InToSpace(Object* object);
|
||||
static inline bool InToSpace(MaybeObject object);
|
||||
static inline bool InToSpace(HeapObject* heap_object);
|
||||
static inline bool InToSpace(HeapObjectPtr heap_object);
|
||||
static inline bool InToSpace(HeapObject heap_object);
|
||||
|
||||
// Returns whether the object resides in old space.
|
||||
inline bool InOldSpace(Object* object);
|
||||
@ -946,24 +943,22 @@ class Heap {
|
||||
|
||||
// Checks whether an address/object in the heap (including auxiliary
|
||||
// area and unused area).
|
||||
bool Contains(HeapObject* value);
|
||||
bool Contains(HeapObject value);
|
||||
|
||||
// Checks whether an address/object in a space.
|
||||
// Currently used by tests, serialization and heap verification only.
|
||||
bool InSpace(HeapObject* value, AllocationSpace space);
|
||||
bool InSpace(HeapObject value, AllocationSpace space);
|
||||
|
||||
// Slow methods that can be used for verification as they can also be used
|
||||
// with off-heap Addresses.
|
||||
bool InSpaceSlow(Address addr, AllocationSpace space);
|
||||
|
||||
// Find the heap which owns this HeapObject. Should never be called for
|
||||
// objects in RO space.
|
||||
// This takes a HeapObject* (as opposed to a plain HeapObject)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the old HeapObject*
|
||||
// version.
|
||||
// TODO(3770): This should probably take a HeapObject eventually.
|
||||
static inline Heap* FromWritableHeapObject(const HeapObject* obj);
|
||||
// This takes a HeapObjectPtr* (as opposed to a plain HeapObjectPtr)
|
||||
// to keep the WRITE_BARRIER macro syntax-compatible to the HeapObject*
|
||||
// version above.
|
||||
// TODO(3770): This should probably take a HeapObjectPtr eventually.
|
||||
static inline Heap* FromWritableHeapObject(const HeapObjectPtr* obj);
|
||||
static inline Heap* FromWritableHeapObject(const HeapObject obj);
|
||||
|
||||
// ===========================================================================
|
||||
// Object statistics tracking. ===============================================
|
||||
@ -1166,15 +1161,15 @@ class Heap {
|
||||
// ===========================================================================
|
||||
|
||||
// Creates a filler object and returns a heap object immediately after it.
|
||||
V8_WARN_UNUSED_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
|
||||
int filler_size);
|
||||
V8_WARN_UNUSED_RESULT HeapObject PrecedeWithFiller(HeapObject object,
|
||||
int filler_size);
|
||||
|
||||
// Creates a filler object if needed for alignment and returns a heap object
|
||||
// immediately after it. If any space is left after the returned object,
|
||||
// another filler object is created so the over allocated memory is iterable.
|
||||
V8_WARN_UNUSED_RESULT HeapObject* AlignWithFiller(
|
||||
HeapObject* object, int object_size, int allocation_size,
|
||||
AllocationAlignment alignment);
|
||||
V8_WARN_UNUSED_RESULT HeapObject
|
||||
AlignWithFiller(HeapObject object, int object_size, int allocation_size,
|
||||
AllocationAlignment alignment);
|
||||
|
||||
// ===========================================================================
|
||||
// ArrayBuffer tracking. =====================================================
|
||||
@ -1194,8 +1189,7 @@ class Heap {
|
||||
// Updates the AllocationSite of a given {object}. The entry (including the
|
||||
// count) is cached on the local pretenuring feedback.
|
||||
inline void UpdateAllocationSite(
|
||||
Map map, HeapObject* object,
|
||||
PretenuringFeedbackMap* pretenuring_feedback);
|
||||
Map map, HeapObject object, PretenuringFeedbackMap* pretenuring_feedback);
|
||||
|
||||
// Merges local pretenuring feedback into the global one. Note that this
|
||||
// method needs to be called after evacuation, as allocation sites may be
|
||||
@ -1256,7 +1250,7 @@ class Heap {
|
||||
#ifdef VERIFY_HEAP
|
||||
// Verify the heap is in its normal state before or after a GC.
|
||||
void Verify();
|
||||
void VerifyRememberedSetFor(HeapObject* object);
|
||||
void VerifyRememberedSetFor(HeapObject object);
|
||||
#endif
|
||||
|
||||
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
@ -1507,7 +1501,7 @@ class Heap {
|
||||
double deadline_in_ms);
|
||||
|
||||
int NextAllocationTimeout(int current_timeout = 0);
|
||||
inline void UpdateAllocationsHash(HeapObject* object);
|
||||
inline void UpdateAllocationsHash(HeapObject object);
|
||||
inline void UpdateAllocationsHash(uint32_t value);
|
||||
void PrintAllocationsHash();
|
||||
|
||||
@ -1711,7 +1705,7 @@ class Heap {
|
||||
// triggered and the allocation is retried. This is performed multiple times.
|
||||
// If after that retry procedure the allocation still fails nullptr is
|
||||
// returned.
|
||||
HeapObject* AllocateRawWithLightRetry(
|
||||
HeapObject AllocateRawWithLightRetry(
|
||||
int size, AllocationSpace space,
|
||||
AllocationAlignment alignment = kWordAligned);
|
||||
|
||||
@ -1721,10 +1715,10 @@ class Heap {
|
||||
// If after that retry procedure the allocation still fails a "hammer"
|
||||
// garbage collection is triggered which tries to significantly reduce memory.
|
||||
// If the allocation still fails after that a fatal error is thrown.
|
||||
HeapObject* AllocateRawWithRetryOrFail(
|
||||
HeapObject AllocateRawWithRetryOrFail(
|
||||
int size, AllocationSpace space,
|
||||
AllocationAlignment alignment = kWordAligned);
|
||||
HeapObject* AllocateRawCodeInLargeObjectSpace(int size);
|
||||
HeapObject AllocateRawCodeInLargeObjectSpace(int size);
|
||||
|
||||
// Allocates a heap object based on the map.
|
||||
V8_WARN_UNUSED_RESULT AllocationResult Allocate(Map map,
|
||||
@ -1733,7 +1727,7 @@ class Heap {
|
||||
// Takes a code object and checks if it is on memory which is not subject to
|
||||
// compaction. This method will return a new code object on an immovable
|
||||
// memory location if the original code object was movable.
|
||||
HeapObject* EnsureImmovableCode(HeapObject* heap_object, int object_size);
|
||||
HeapObject EnsureImmovableCode(HeapObject heap_object, int object_size);
|
||||
|
||||
// Allocates a partial map for bootstrapping.
|
||||
V8_WARN_UNUSED_RESULT AllocationResult
|
||||
@ -1751,13 +1745,13 @@ class Heap {
|
||||
// Retaining path tracing ====================================================
|
||||
// ===========================================================================
|
||||
|
||||
void AddRetainer(HeapObject* retainer, HeapObject* object);
|
||||
void AddEphemeronRetainer(HeapObject* retainer, HeapObject* object);
|
||||
void AddRetainingRoot(Root root, HeapObject* object);
|
||||
void AddRetainer(HeapObject retainer, HeapObject object);
|
||||
void AddEphemeronRetainer(HeapObject retainer, HeapObject object);
|
||||
void AddRetainingRoot(Root root, HeapObject object);
|
||||
// Returns true if the given object is a target of retaining path tracking.
|
||||
// Stores the option corresponding to the object in the provided *option.
|
||||
bool IsRetainingPathTarget(HeapObject* object, RetainingPathOption* option);
|
||||
void PrintRetainingPath(HeapObject* object, RetainingPathOption option);
|
||||
bool IsRetainingPathTarget(HeapObject object, RetainingPathOption* option);
|
||||
void PrintRetainingPath(HeapObject object, RetainingPathOption option);
|
||||
|
||||
#ifdef DEBUG
|
||||
void IncrementObjectCounters();
|
||||
@ -1998,7 +1992,7 @@ class Heap {
|
||||
bool force_oom_ = false;
|
||||
bool delay_sweeper_tasks_for_testing_ = false;
|
||||
|
||||
HeapObject* pending_layout_change_object_ = nullptr;
|
||||
HeapObject pending_layout_change_object_;
|
||||
|
||||
base::Mutex unprotected_memory_chunks_mutex_;
|
||||
std::unordered_set<MemoryChunk*> unprotected_memory_chunks_;
|
||||
@ -2011,11 +2005,11 @@ class Heap {
|
||||
int allocation_timeout_ = 0;
|
||||
#endif // V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
|
||||
std::map<HeapObject*, HeapObject*> retainer_;
|
||||
std::map<HeapObject*, Root> retaining_root_;
|
||||
std::map<HeapObject, HeapObject, HeapObject::Compare> retainer_;
|
||||
std::map<HeapObject, Root, HeapObject::Compare> retaining_root_;
|
||||
// If an object is retained by an ephemeron, then the retaining key of the
|
||||
// ephemeron is stored in this map.
|
||||
std::map<HeapObject*, HeapObject*> ephemeron_retainer_;
|
||||
std::map<HeapObject, HeapObject, HeapObject::Compare> ephemeron_retainer_;
|
||||
// For each index inthe retaining_path_targets_ array this map
|
||||
// stores the option of the corresponding target.
|
||||
std::map<int, RetainingPathOption> retaining_path_target_option_;
|
||||
@ -2161,9 +2155,9 @@ class CodePageMemoryModificationScope {
|
||||
class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
explicit VerifyPointersVisitor(Heap* heap) : heap_(heap) {}
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override;
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override;
|
||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) override;
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override;
|
||||
@ -2172,12 +2166,12 @@ class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
|
||||
FullObjectSlot start, FullObjectSlot end) override;
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject* heap_object);
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object);
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end);
|
||||
|
||||
virtual void VerifyPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
virtual void VerifyPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end);
|
||||
|
||||
Heap* heap_;
|
||||
@ -2245,10 +2239,10 @@ class HeapIterator {
|
||||
HeapObjectsFiltering filtering = kNoFiltering);
|
||||
~HeapIterator();
|
||||
|
||||
HeapObject* next();
|
||||
HeapObject next();
|
||||
|
||||
private:
|
||||
HeapObject* NextObject();
|
||||
HeapObject NextObject();
|
||||
|
||||
DISALLOW_HEAP_ALLOCATION(no_heap_allocation_);
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void IncrementalMarking::TransferColor(HeapObject* from, HeapObject* to) {
|
||||
void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) {
|
||||
if (atomic_marking_state()->IsBlack(to)) {
|
||||
DCHECK(black_allocation());
|
||||
return;
|
||||
@ -33,7 +33,7 @@ void IncrementalMarking::TransferColor(HeapObject* from, HeapObject* to) {
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::RecordWrite(HeapObject* obj, ObjectSlot slot,
|
||||
void IncrementalMarking::RecordWrite(HeapObject obj, ObjectSlot slot,
|
||||
Object* value) {
|
||||
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!HasWeakHeapObjectTag(value));
|
||||
@ -42,12 +42,12 @@ void IncrementalMarking::RecordWrite(HeapObject* obj, ObjectSlot slot,
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::RecordMaybeWeakWrite(HeapObject* obj,
|
||||
void IncrementalMarking::RecordMaybeWeakWrite(HeapObject obj,
|
||||
MaybeObjectSlot slot,
|
||||
MaybeObject value) {
|
||||
// When writing a weak reference, treat it as strong for the purposes of the
|
||||
// marking barrier.
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (IsMarking() && value->GetHeapObject(&heap_object)) {
|
||||
RecordWriteSlow(obj, HeapObjectSlot(slot), heap_object);
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
|
||||
if (incremental_marking_.black_allocation() && addr != kNullAddress) {
|
||||
// AdvanceIncrementalMarkingOnAllocation can start black allocation.
|
||||
// Ensure that the new object is marked black.
|
||||
HeapObject* object = HeapObject::FromAddress(addr);
|
||||
HeapObject object = HeapObject::FromAddress(addr);
|
||||
if (incremental_marking_.marking_state()->IsWhite(object) &&
|
||||
!(Heap::InNewSpace(object) || heap->new_lo_space()->Contains(object))) {
|
||||
if (heap->IsLargeObject(object)) {
|
||||
@ -78,8 +78,8 @@ IncrementalMarking::IncrementalMarking(
|
||||
SetState(STOPPED);
|
||||
}
|
||||
|
||||
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
|
||||
HeapObject* value_heap_obj = HeapObject::cast(value);
|
||||
bool IncrementalMarking::BaseRecordWrite(HeapObject obj, Object* value) {
|
||||
HeapObject value_heap_obj = HeapObject::cast(value);
|
||||
DCHECK(!marking_state()->IsImpossible(value_heap_obj));
|
||||
DCHECK(!marking_state()->IsImpossible(obj));
|
||||
#ifdef V8_CONCURRENT_MARKING
|
||||
@ -96,7 +96,7 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
|
||||
return is_compacting_ && need_recording;
|
||||
}
|
||||
|
||||
void IncrementalMarking::RecordWriteSlow(HeapObject* obj, HeapObjectSlot slot,
|
||||
void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
|
||||
Object* value) {
|
||||
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
|
||||
// Object is not going to be rescanned we need to record the slot.
|
||||
@ -105,10 +105,10 @@ void IncrementalMarking::RecordWriteSlow(HeapObject* obj, HeapObjectSlot slot,
|
||||
}
|
||||
}
|
||||
|
||||
int IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
|
||||
int IncrementalMarking::RecordWriteFromCode(Address raw_obj,
|
||||
Address slot_address,
|
||||
Isolate* isolate) {
|
||||
DCHECK(obj->IsHeapObject());
|
||||
HeapObject obj = HeapObject::cast(ObjectPtr(raw_obj));
|
||||
MaybeObjectSlot slot(slot_address);
|
||||
isolate->heap()->incremental_marking()->RecordMaybeWeakWrite(obj, slot,
|
||||
*slot);
|
||||
@ -117,7 +117,7 @@ int IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
|
||||
}
|
||||
|
||||
void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
|
||||
HeapObject* value) {
|
||||
HeapObject value) {
|
||||
DCHECK(IsMarking());
|
||||
if (BaseRecordWrite(host, value)) {
|
||||
// Object is not going to be rescanned. We need to record the slot.
|
||||
@ -125,7 +125,7 @@ void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
|
||||
}
|
||||
}
|
||||
|
||||
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
marking_worklist()->Push(obj);
|
||||
return true;
|
||||
@ -133,7 +133,7 @@ bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
return false;
|
||||
}
|
||||
|
||||
void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
|
||||
void IncrementalMarking::MarkBlackAndPush(HeapObject obj) {
|
||||
// Marking left-trimmable fixed array black is unsafe because left-trimming
|
||||
// re-pushes only grey arrays onto the marking worklist.
|
||||
DCHECK(!obj->IsFixedArray() && !obj->IsFixedDoubleArray());
|
||||
@ -148,7 +148,7 @@ void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
|
||||
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
|
||||
DCHECK(IsMarking());
|
||||
DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
|
||||
DCHECK_EQ(MemoryChunk::FromAddress(from->address()),
|
||||
@ -506,7 +506,7 @@ void IncrementalMarking::RetainMaps() {
|
||||
int number_of_disposed_maps = heap()->number_of_disposed_maps_;
|
||||
for (int i = 0; i < length; i += 2) {
|
||||
MaybeObject value = retained_maps->Get(i);
|
||||
HeapObject* map_heap_object;
|
||||
HeapObject map_heap_object;
|
||||
if (!value->GetHeapObjectIfWeak(&map_heap_object)) {
|
||||
continue;
|
||||
}
|
||||
@ -586,11 +586,11 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
|
||||
marking_worklist()->Update([
|
||||
#ifdef DEBUG
|
||||
// this is referred inside DCHECK.
|
||||
// this is referred inside DCHECK.
|
||||
this,
|
||||
#endif
|
||||
filler_map, minor_marking_state](
|
||||
HeapObject* obj, HeapObject** out) -> bool {
|
||||
HeapObject obj, HeapObject* out) -> bool {
|
||||
DCHECK(obj->IsHeapObject());
|
||||
// Only pointers to from space have to be updated.
|
||||
if (Heap::InFromSpace(obj)) {
|
||||
@ -603,7 +603,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
// them.
|
||||
return false;
|
||||
}
|
||||
HeapObject* dest = map_word.ToForwardingAddress();
|
||||
HeapObject dest = map_word.ToForwardingAddress();
|
||||
DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
|
||||
*out = dest;
|
||||
return true;
|
||||
@ -646,23 +646,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
}
|
||||
|
||||
namespace {
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<HeapObject, T>::value>::type>
|
||||
T* ForwardingAddress(T* heap_obj) {
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
return T::cast(map_word.ToForwardingAddress());
|
||||
} else if (Heap::InNewSpace(heap_obj)) {
|
||||
return nullptr;
|
||||
} else {
|
||||
return heap_obj;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(3770): Replacement for the above.
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<HeapObjectPtr, T>::value>::type>
|
||||
template <typename T>
|
||||
T ForwardingAddress(T heap_obj) {
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
|
||||
@ -678,16 +662,15 @@ T ForwardingAddress(T heap_obj) {
|
||||
|
||||
void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
|
||||
weak_objects_->weak_references.Update(
|
||||
[](std::pair<HeapObject*, HeapObjectSlot> slot_in,
|
||||
std::pair<HeapObject*, HeapObjectSlot>* slot_out) -> bool {
|
||||
HeapObject* heap_obj = slot_in.first;
|
||||
HeapObject* forwarded = ForwardingAddress(heap_obj);
|
||||
[](std::pair<HeapObject, HeapObjectSlot> slot_in,
|
||||
std::pair<HeapObject, HeapObjectSlot>* slot_out) -> bool {
|
||||
HeapObject heap_obj = slot_in.first;
|
||||
HeapObject forwarded = ForwardingAddress(heap_obj);
|
||||
|
||||
if (forwarded) {
|
||||
ptrdiff_t distance_to_slot = slot_in.second.address() -
|
||||
reinterpret_cast<Address>(slot_in.first);
|
||||
Address new_slot =
|
||||
reinterpret_cast<Address>(forwarded) + distance_to_slot;
|
||||
if (!forwarded.is_null()) {
|
||||
ptrdiff_t distance_to_slot =
|
||||
slot_in.second.address() - slot_in.first.ptr();
|
||||
Address new_slot = forwarded.ptr() + distance_to_slot;
|
||||
slot_out->first = forwarded;
|
||||
slot_out->second = HeapObjectSlot(new_slot);
|
||||
return true;
|
||||
@ -696,12 +679,12 @@ void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
|
||||
return false;
|
||||
});
|
||||
weak_objects_->weak_objects_in_code.Update(
|
||||
[](std::pair<HeapObject*, Code> slot_in,
|
||||
std::pair<HeapObject*, Code>* slot_out) -> bool {
|
||||
HeapObject* heap_obj = slot_in.first;
|
||||
HeapObject* forwarded = ForwardingAddress(heap_obj);
|
||||
[](std::pair<HeapObject, Code> slot_in,
|
||||
std::pair<HeapObject, Code>* slot_out) -> bool {
|
||||
HeapObject heap_obj = slot_in.first;
|
||||
HeapObject forwarded = ForwardingAddress(heap_obj);
|
||||
|
||||
if (forwarded) {
|
||||
if (!forwarded.is_null()) {
|
||||
slot_out->first = forwarded;
|
||||
slot_out->second = slot_in.second;
|
||||
return true;
|
||||
@ -722,12 +705,12 @@ void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
|
||||
});
|
||||
|
||||
auto ephemeron_updater = [](Ephemeron slot_in, Ephemeron* slot_out) -> bool {
|
||||
HeapObject* key = slot_in.key;
|
||||
HeapObject* value = slot_in.value;
|
||||
HeapObject* forwarded_key = ForwardingAddress(key);
|
||||
HeapObject* forwarded_value = ForwardingAddress(value);
|
||||
HeapObject key = slot_in.key;
|
||||
HeapObject value = slot_in.value;
|
||||
HeapObject forwarded_key = ForwardingAddress(key);
|
||||
HeapObject forwarded_value = ForwardingAddress(value);
|
||||
|
||||
if (forwarded_key && forwarded_value) {
|
||||
if (!forwarded_key.is_null() && !forwarded_value.is_null()) {
|
||||
*slot_out = Ephemeron{forwarded_key, forwarded_value};
|
||||
return true;
|
||||
}
|
||||
@ -753,13 +736,13 @@ void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
|
||||
Min(bytes_marked_ahead_of_schedule_, dead_bytes_in_new_space);
|
||||
}
|
||||
|
||||
bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject* obj) {
|
||||
bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject obj) {
|
||||
if (!obj->IsFixedArray()) return false;
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
|
||||
}
|
||||
|
||||
int IncrementalMarking::VisitObject(Map map, HeapObject* obj) {
|
||||
int IncrementalMarking::VisitObject(Map map, HeapObject obj) {
|
||||
DCHECK(marking_state()->IsGrey(obj) || marking_state()->IsBlack(obj));
|
||||
if (!marking_state()->GreyToBlack(obj)) {
|
||||
// The object can already be black in these cases:
|
||||
@ -780,13 +763,13 @@ int IncrementalMarking::VisitObject(Map map, HeapObject* obj) {
|
||||
return visitor.Visit(map, obj);
|
||||
}
|
||||
|
||||
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject* obj) {
|
||||
void IncrementalMarking::ProcessBlackAllocatedObject(HeapObject obj) {
|
||||
if (IsMarking() && marking_state()->IsBlack(obj)) {
|
||||
RevisitObject(obj);
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::RevisitObject(HeapObject* obj) {
|
||||
void IncrementalMarking::RevisitObject(HeapObject obj) {
|
||||
DCHECK(IsMarking());
|
||||
DCHECK(FLAG_concurrent_marking || marking_state()->IsBlack(obj));
|
||||
Page* page = Page::FromAddress(obj->address());
|
||||
@ -812,13 +795,13 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
|
||||
intptr_t bytes_to_process, ForceCompletionAction completion) {
|
||||
intptr_t bytes_processed = 0;
|
||||
while (bytes_processed < bytes_to_process || completion == FORCE_COMPLETION) {
|
||||
HeapObject* obj;
|
||||
HeapObject obj;
|
||||
if (worklist_to_process == WorklistToProcess::kBailout) {
|
||||
obj = marking_worklist()->PopBailout();
|
||||
} else {
|
||||
obj = marking_worklist()->Pop();
|
||||
}
|
||||
if (obj == nullptr) break;
|
||||
if (obj.is_null()) break;
|
||||
// Left trimming may result in white, grey, or black filler objects on the
|
||||
// marking deque. Ignore these objects.
|
||||
if (obj->IsFiller()) {
|
||||
@ -842,7 +825,7 @@ void IncrementalMarking::EmbedderStep(double duration_ms) {
|
||||
{
|
||||
LocalEmbedderHeapTracer::ProcessingScope scope(
|
||||
heap_->local_embedder_heap_tracer());
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
size_t cnt = 0;
|
||||
empty_worklist = true;
|
||||
while (marking_worklist()->embedder()->Pop(0, &object)) {
|
||||
|
@ -98,9 +98,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
return &non_atomic_marking_state_;
|
||||
}
|
||||
|
||||
void NotifyLeftTrimming(HeapObject* from, HeapObject* to);
|
||||
void NotifyLeftTrimming(HeapObject from, HeapObject to);
|
||||
|
||||
V8_INLINE void TransferColor(HeapObject* from, HeapObject* to);
|
||||
V8_INLINE void TransferColor(HeapObject from, HeapObject to);
|
||||
|
||||
State state() const {
|
||||
DCHECK(state_ == STOPPED || FLAG_incremental_marking);
|
||||
@ -184,9 +184,10 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
|
||||
inline void RestartIfNotMarking();
|
||||
|
||||
// {slot_address} is a raw Address instead of a MaybeObjectSlot because
|
||||
// this is called from generated code via ExternalReference.
|
||||
static int RecordWriteFromCode(HeapObject* obj, Address slot_address,
|
||||
// {raw_obj} and {slot_address} are raw Address values instead of a
|
||||
// HeapObject and a MaybeObjectSlot because this is called from
|
||||
// generated code via ExternalReference.
|
||||
static int RecordWriteFromCode(Address raw_obj, Address slot_address,
|
||||
Isolate* isolate);
|
||||
|
||||
// Record a slot for compaction. Returns false for objects that are
|
||||
@ -195,26 +196,26 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
// No slots in white objects should be recorded, as some slots are typed and
|
||||
// cannot be interpreted correctly if the underlying object does not survive
|
||||
// the incremental cycle (stays white).
|
||||
V8_INLINE bool BaseRecordWrite(HeapObject* obj, Object* value);
|
||||
V8_INLINE void RecordWrite(HeapObject* obj, ObjectSlot slot, Object* value);
|
||||
V8_INLINE void RecordMaybeWeakWrite(HeapObject* obj, MaybeObjectSlot slot,
|
||||
V8_INLINE bool BaseRecordWrite(HeapObject obj, Object* value);
|
||||
V8_INLINE void RecordWrite(HeapObject obj, ObjectSlot slot, Object* value);
|
||||
V8_INLINE void RecordMaybeWeakWrite(HeapObject obj, MaybeObjectSlot slot,
|
||||
MaybeObject value);
|
||||
void RevisitObject(HeapObject* obj);
|
||||
void RevisitObject(HeapObject obj);
|
||||
// Ensures that all descriptors int range [0, number_of_own_descripts)
|
||||
// are visited.
|
||||
void VisitDescriptors(DescriptorArray array, int number_of_own_descriptors);
|
||||
|
||||
void RecordWriteSlow(HeapObject* obj, HeapObjectSlot slot, Object* value);
|
||||
void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject* value);
|
||||
void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, Object* value);
|
||||
void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
|
||||
|
||||
// Returns true if the function succeeds in transitioning the object
|
||||
// from white to grey.
|
||||
bool WhiteToGreyAndPush(HeapObject* obj);
|
||||
bool WhiteToGreyAndPush(HeapObject obj);
|
||||
|
||||
// This function is used to color the object black before it undergoes an
|
||||
// unsafe layout change. This is a part of synchronization protocol with
|
||||
// the concurrent marker.
|
||||
void MarkBlackAndPush(HeapObject* obj);
|
||||
void MarkBlackAndPush(HeapObject obj);
|
||||
|
||||
bool IsCompacting() { return IsMarking() && is_compacting_; }
|
||||
|
||||
@ -222,7 +223,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
unscanned_bytes_of_large_object_ = unscanned_bytes;
|
||||
}
|
||||
|
||||
void ProcessBlackAllocatedObject(HeapObject* obj);
|
||||
void ProcessBlackAllocatedObject(HeapObject obj);
|
||||
|
||||
Heap* heap() const { return heap_; }
|
||||
|
||||
@ -282,10 +283,10 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
intptr_t bytes_to_process,
|
||||
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION);
|
||||
|
||||
V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject* object);
|
||||
V8_INLINE bool IsFixedArrayWithProgressBar(HeapObject object);
|
||||
|
||||
// Visits the object and returns its size.
|
||||
V8_INLINE int VisitObject(Map map, HeapObject* obj);
|
||||
V8_INLINE int VisitObject(Map map, HeapObject obj);
|
||||
|
||||
void IncrementIdleMarkingDelayCounter();
|
||||
|
||||
|
@ -31,7 +31,7 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
|
||||
DCHECK_LE(invalidated_end_, iterator_->first->address());
|
||||
invalidated_start_ = iterator_->first->address();
|
||||
invalidated_end_ = invalidated_start_ + iterator_->second;
|
||||
invalidated_object_ = nullptr;
|
||||
invalidated_object_ = HeapObject();
|
||||
invalidated_object_size_ = 0;
|
||||
} else {
|
||||
invalidated_start_ = sentinel_;
|
||||
@ -45,7 +45,7 @@ bool InvalidatedSlotsFilter::IsValid(Address slot) {
|
||||
}
|
||||
// The invalidated region includes the slot.
|
||||
// Ask the object if the slot is valid.
|
||||
if (invalidated_object_ == nullptr) {
|
||||
if (invalidated_object_.is_null()) {
|
||||
invalidated_object_ = HeapObject::FromAddress(invalidated_start_);
|
||||
DCHECK(!invalidated_object_->IsFiller());
|
||||
invalidated_object_size_ =
|
||||
|
@ -31,7 +31,6 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter(MemoryChunk* chunk) {
|
||||
invalidated_end_ = sentinel_;
|
||||
}
|
||||
// These values will be lazily set when needed.
|
||||
invalidated_object_ = nullptr;
|
||||
invalidated_object_size_ = 0;
|
||||
#ifdef DEBUG
|
||||
last_slot_ = chunk->area_start();
|
||||
|
@ -10,18 +10,17 @@
|
||||
|
||||
#include "src/allocation.h"
|
||||
#include "src/base/atomic-utils.h"
|
||||
#include "src/objects/heap-object.h"
|
||||
#include "src/utils.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class HeapObject;
|
||||
|
||||
// This data structure stores objects that went through object layout change
|
||||
// that potentially invalidates slots recorded concurrently. The second part
|
||||
// of each element is the size of the corresponding object before the layout
|
||||
// change.
|
||||
using InvalidatedSlots = std::map<HeapObject*, int>;
|
||||
using InvalidatedSlots = std::map<HeapObject, int, HeapObject::Compare>;
|
||||
|
||||
// This class provides IsValid predicate that takes into account the set
|
||||
// of invalidated objects in the given memory chunk.
|
||||
@ -40,7 +39,7 @@ class InvalidatedSlotsFilter {
|
||||
Address sentinel_;
|
||||
Address invalidated_start_;
|
||||
Address invalidated_end_;
|
||||
HeapObject* invalidated_object_;
|
||||
HeapObject invalidated_object_;
|
||||
int invalidated_object_size_;
|
||||
bool slots_in_free_space_are_valid_;
|
||||
InvalidatedSlots empty_;
|
||||
|
@ -30,7 +30,7 @@ AllocationResult LocalAllocator::Allocate(AllocationSpace space,
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAllocator::FreeLast(AllocationSpace space, HeapObject* object,
|
||||
void LocalAllocator::FreeLast(AllocationSpace space, HeapObject object,
|
||||
int object_size) {
|
||||
switch (space) {
|
||||
case NEW_SPACE:
|
||||
@ -46,7 +46,7 @@ void LocalAllocator::FreeLast(AllocationSpace space, HeapObject* object,
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAllocator::FreeLastInNewSpace(HeapObject* object, int object_size) {
|
||||
void LocalAllocator::FreeLastInNewSpace(HeapObject object, int object_size) {
|
||||
if (!new_space_lab_.TryFreeLast(object, object_size)) {
|
||||
// We couldn't free the last object so we have to write a proper filler.
|
||||
heap_->CreateFillerObjectAt(object->address(), object_size,
|
||||
@ -54,7 +54,7 @@ void LocalAllocator::FreeLastInNewSpace(HeapObject* object, int object_size) {
|
||||
}
|
||||
}
|
||||
|
||||
void LocalAllocator::FreeLastInOldSpace(HeapObject* object, int object_size) {
|
||||
void LocalAllocator::FreeLastInOldSpace(HeapObject object, int object_size) {
|
||||
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
|
||||
// We couldn't free the last object so we have to write a proper filler.
|
||||
heap_->CreateFillerObjectAt(object->address(), object_size,
|
||||
|
@ -43,7 +43,7 @@ class LocalAllocator {
|
||||
|
||||
inline AllocationResult Allocate(AllocationSpace space, int object_size,
|
||||
AllocationAlignment alignment);
|
||||
inline void FreeLast(AllocationSpace space, HeapObject* object,
|
||||
inline void FreeLast(AllocationSpace space, HeapObject object,
|
||||
int object_size);
|
||||
|
||||
private:
|
||||
@ -52,8 +52,8 @@ class LocalAllocator {
|
||||
inline bool NewLocalAllocationBuffer();
|
||||
inline AllocationResult AllocateInLAB(int object_size,
|
||||
AllocationAlignment alignment);
|
||||
inline void FreeLastInNewSpace(HeapObject* object, int object_size);
|
||||
inline void FreeLastInOldSpace(HeapObject* object, int object_size);
|
||||
inline void FreeLastInNewSpace(HeapObject object, int object_size);
|
||||
inline void FreeLastInOldSpace(HeapObject object, int object_size);
|
||||
|
||||
Heap* const heap_;
|
||||
NewSpace* const new_space_;
|
||||
|
@ -18,8 +18,7 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
template <typename ConcreteState, AccessMode access_mode>
|
||||
bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(
|
||||
HeapObject* obj) {
|
||||
bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
|
||||
MemoryChunk* p = MemoryChunk::FromAddress(obj->address());
|
||||
MarkBit markbit = MarkBitFrom(p, obj->address());
|
||||
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
|
||||
@ -28,14 +27,13 @@ bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(
|
||||
}
|
||||
|
||||
template <typename ConcreteState, AccessMode access_mode>
|
||||
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(
|
||||
HeapObject* obj) {
|
||||
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) {
|
||||
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
template <typename ConcreteState, AccessMode access_mode>
|
||||
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
|
||||
HeapObject* obj) {
|
||||
HeapObject obj) {
|
||||
return WhiteToGrey(obj) && GreyToBlack(obj);
|
||||
}
|
||||
|
||||
@ -154,7 +152,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
|
||||
for (int i = 0; i < table->Capacity(); i++) {
|
||||
ObjectSlot key_slot =
|
||||
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
|
||||
HeapObject* key = HeapObject::cast(table->KeyAt(i));
|
||||
HeapObject key = HeapObject::cast(table->KeyAt(i));
|
||||
collector_->RecordSlot(table, key_slot, key);
|
||||
|
||||
ObjectSlot value_slot =
|
||||
@ -167,7 +165,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
|
||||
Object* value_obj = *value_slot;
|
||||
|
||||
if (value_obj->IsHeapObject()) {
|
||||
HeapObject* value = HeapObject::cast(value_obj);
|
||||
HeapObject value = HeapObject::cast(value_obj);
|
||||
collector_->RecordSlot(table, value_slot, value);
|
||||
|
||||
// Revisit ephemerons with both key and value unreachable at end
|
||||
@ -215,7 +213,7 @@ template <FixedArrayVisitationMode fixed_array_mode,
|
||||
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
|
||||
if (weak_ref->target()->IsHeapObject()) {
|
||||
HeapObject* target = HeapObject::cast(weak_ref->target());
|
||||
HeapObject target = HeapObject::cast(weak_ref->target());
|
||||
if (marking_state()->IsBlackOrGrey(target)) {
|
||||
// Record the slot inside the JSWeakRef, since the IterateBody below
|
||||
// won't visit it.
|
||||
@ -239,7 +237,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitJSWeakCell(Map map,
|
||||
JSWeakCell weak_cell) {
|
||||
if (weak_cell->target()->IsHeapObject()) {
|
||||
HeapObject* target = HeapObject::cast(weak_cell->target());
|
||||
HeapObject target = HeapObject::cast(weak_cell->target());
|
||||
if (marking_state()->IsBlackOrGrey(target)) {
|
||||
// Record the slot inside the JSWeakCell, since the IterateBody below
|
||||
// won't visit it.
|
||||
@ -263,13 +261,13 @@ template <FixedArrayVisitationMode fixed_array_mode,
|
||||
// method template arguments
|
||||
template <typename TSlot>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitPointerImpl(HeapObject* host,
|
||||
MarkingState>::VisitPointerImpl(HeapObject host,
|
||||
TSlot slot) {
|
||||
static_assert(std::is_same<TSlot, ObjectSlot>::value ||
|
||||
std::is_same<TSlot, MaybeObjectSlot>::value,
|
||||
"Only ObjectSlot and MaybeObjectSlot are expected here");
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* target_object;
|
||||
HeapObject target_object;
|
||||
if (object.GetHeapObjectIfStrong(&target_object)) {
|
||||
collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
|
||||
MarkObject(host, target_object);
|
||||
@ -293,7 +291,7 @@ template <FixedArrayVisitationMode fixed_array_mode,
|
||||
// method template arguments
|
||||
template <typename TSlot>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitPointersImpl(HeapObject* host,
|
||||
MarkingState>::VisitPointersImpl(HeapObject host,
|
||||
TSlot start, TSlot end) {
|
||||
for (TSlot p = start; p < end; ++p) {
|
||||
VisitPointer(host, p);
|
||||
@ -306,7 +304,7 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitEmbeddedPointer(Code host,
|
||||
RelocInfo* rinfo) {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* object = HeapObject::cast(rinfo->target_object());
|
||||
HeapObject object = HeapObject::cast(rinfo->target_object());
|
||||
collector_->RecordRelocSlot(host, rinfo, object);
|
||||
if (!marking_state()->IsBlackOrGrey(object)) {
|
||||
if (host->IsWeakObject(object)) {
|
||||
@ -331,8 +329,8 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::MarkObjectWithoutPush(HeapObject* host,
|
||||
HeapObject* object) {
|
||||
MarkingState>::MarkObjectWithoutPush(HeapObject host,
|
||||
HeapObject object) {
|
||||
if (marking_state()->WhiteToBlack(object)) {
|
||||
if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
|
||||
V8_UNLIKELY(FLAG_track_retaining_path)) {
|
||||
@ -346,8 +344,8 @@ bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::MarkObject(HeapObject* host,
|
||||
HeapObject* object) {
|
||||
MarkingState>::MarkObject(HeapObject host,
|
||||
HeapObject object) {
|
||||
if (marking_state()->WhiteToGrey(object)) {
|
||||
marking_worklist()->Push(object);
|
||||
if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
|
||||
@ -444,7 +442,7 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
|
||||
void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
marking_worklist()->Push(obj);
|
||||
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
|
||||
@ -453,7 +451,7 @@ void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
|
||||
void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
marking_worklist()->Push(obj);
|
||||
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
|
||||
@ -464,7 +462,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
|
||||
|
||||
#ifdef ENABLE_MINOR_MC
|
||||
|
||||
void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
|
||||
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
|
||||
if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
|
||||
worklist_->Push(kMainThread, obj);
|
||||
}
|
||||
@ -472,7 +470,7 @@ void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
|
||||
|
||||
#endif
|
||||
|
||||
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
|
||||
void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
marking_worklist()->Push(obj);
|
||||
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
|
||||
@ -481,15 +479,15 @@ void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::RecordSlot(HeapObject* object, ObjectSlot slot,
|
||||
HeapObject* target) {
|
||||
void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
|
||||
HeapObject target) {
|
||||
RecordSlot(object, HeapObjectSlot(slot), target);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::RecordSlot(HeapObject* object, HeapObjectSlot slot,
|
||||
HeapObject* target) {
|
||||
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
|
||||
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
|
||||
void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
|
||||
HeapObject target) {
|
||||
Page* target_page = Page::FromHeapObject(target);
|
||||
Page* source_page = Page::FromHeapObject(object);
|
||||
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
|
||||
!source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
|
||||
RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
|
||||
@ -521,8 +519,6 @@ LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
|
||||
cell_base_ = it_.CurrentCellBase();
|
||||
current_cell_ = *it_.CurrentCell();
|
||||
AdvanceToNextValidObject();
|
||||
} else {
|
||||
current_object_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
@ -544,7 +540,7 @@ operator++(int) {
|
||||
template <LiveObjectIterationMode mode>
|
||||
void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
while (!it_.Done()) {
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
int size = 0;
|
||||
while (current_cell_ != 0) {
|
||||
uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
|
||||
@ -563,7 +559,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
// that case we can return immediately.
|
||||
if (!it_.Advance()) {
|
||||
DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
|
||||
current_object_ = nullptr;
|
||||
current_object_ = HeapObject();
|
||||
return;
|
||||
}
|
||||
cell_base_ = it_.CurrentCellBase();
|
||||
@ -577,7 +573,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
// We found a black object. If the black object is within a black area,
|
||||
// make sure that we skip all set bits in the black area until the
|
||||
// object ends.
|
||||
HeapObject* black_object = HeapObject::FromAddress(addr);
|
||||
HeapObject black_object = HeapObject::FromAddress(addr);
|
||||
map = Map::cast(ObjectSlot(addr).Acquire_Load());
|
||||
size = black_object->SizeFromMap(map);
|
||||
Address end = addr + size - kTaggedSize;
|
||||
@ -611,7 +607,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
}
|
||||
|
||||
// We found a live object.
|
||||
if (object != nullptr) {
|
||||
if (!object.is_null()) {
|
||||
// Do not use IsFiller() here. This may cause a data race for reading
|
||||
// out the instance type when a new map concurrently is written into
|
||||
// this object while iterating over the object.
|
||||
@ -623,7 +619,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
// 2) Left trimming may leave black or grey fillers behind because we
|
||||
// do not clear the old location of the object start.
|
||||
// We filter these objects out in the iterator.
|
||||
object = nullptr;
|
||||
object = HeapObject();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@ -636,13 +632,13 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
|
||||
current_cell_ = *it_.CurrentCell();
|
||||
}
|
||||
}
|
||||
if (object != nullptr) {
|
||||
if (!object.is_null()) {
|
||||
current_object_ = object;
|
||||
current_size_ = size;
|
||||
return;
|
||||
}
|
||||
}
|
||||
current_object_ = nullptr;
|
||||
current_object_ = HeapObject();
|
||||
}
|
||||
|
||||
template <LiveObjectIterationMode mode>
|
||||
|
@ -69,16 +69,16 @@ class MarkingVerifier : public ObjectVisitor, public RootVisitor {
|
||||
virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0;
|
||||
virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0;
|
||||
|
||||
virtual bool IsMarked(HeapObject* object) = 0;
|
||||
virtual bool IsMarked(HeapObject object) = 0;
|
||||
|
||||
virtual bool IsBlackOrGrey(HeapObject* object) = 0;
|
||||
virtual bool IsBlackOrGrey(HeapObject object) = 0;
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
@ -103,7 +103,7 @@ void MarkingVerifier::VerifyRoots(VisitMode mode) {
|
||||
|
||||
void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
|
||||
Address end) {
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
Address next_object_must_be_here_or_later = start;
|
||||
for (Address current = start; current < end;) {
|
||||
object = HeapObject::FromAddress(current);
|
||||
@ -154,7 +154,7 @@ void MarkingVerifier::VerifyMarking(PagedSpace* space) {
|
||||
|
||||
void MarkingVerifier::VerifyMarking(LargeObjectSpace* lo_space) {
|
||||
LargeObjectIterator it(lo_space);
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
if (IsBlackOrGrey(obj)) {
|
||||
obj->Iterate(this);
|
||||
}
|
||||
@ -183,11 +183,11 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
return marking_state_->bitmap(chunk);
|
||||
}
|
||||
|
||||
bool IsMarked(HeapObject* object) override {
|
||||
bool IsMarked(HeapObject object) override {
|
||||
return marking_state_->IsBlack(object);
|
||||
}
|
||||
|
||||
bool IsBlackOrGrey(HeapObject* object) override {
|
||||
bool IsBlackOrGrey(HeapObject object) override {
|
||||
return marking_state_->IsBlackOrGrey(object);
|
||||
}
|
||||
|
||||
@ -211,13 +211,13 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
if (!host->IsWeakObject(rinfo->target_object())) {
|
||||
HeapObject* object = rinfo->target_object();
|
||||
HeapObject object = rinfo->target_object();
|
||||
VerifyHeapObjectImpl(object);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject* heap_object) {
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK(marking_state_->IsBlackOrGrey(heap_object));
|
||||
}
|
||||
|
||||
@ -225,7 +225,7 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObjectIfStrong(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
@ -239,12 +239,12 @@ class EvacuationVerifier : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
virtual void Run() = 0;
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
@ -278,7 +278,7 @@ void EvacuationVerifier::VerifyRoots(VisitMode mode) {
|
||||
void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
|
||||
Address current = start;
|
||||
while (current < end) {
|
||||
HeapObject* object = HeapObject::FromAddress(current);
|
||||
HeapObject object = HeapObject::FromAddress(current);
|
||||
if (!object->IsFiller()) object->Iterate(this);
|
||||
current += object->Size();
|
||||
}
|
||||
@ -321,7 +321,7 @@ class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
}
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject* heap_object) {
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), Heap::InToSpace(heap_object));
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object));
|
||||
}
|
||||
@ -330,7 +330,7 @@ class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObjectIfStrong(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
@ -510,7 +510,8 @@ void MarkCompactCollector::CollectGarbage() {
|
||||
#ifdef VERIFY_HEAP
|
||||
void MarkCompactCollector::VerifyMarkbitsAreDirty(PagedSpace* space) {
|
||||
HeapObjectIterator iterator(space);
|
||||
while (HeapObject* object = iterator.Next()) {
|
||||
for (HeapObject object = iterator.Next(); !object.is_null();
|
||||
object = iterator.Next()) {
|
||||
CHECK(non_atomic_marking_state()->IsBlack(object));
|
||||
}
|
||||
}
|
||||
@ -532,7 +533,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
||||
|
||||
void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
|
||||
LargeObjectIterator it(space);
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
CHECK(non_atomic_marking_state()->IsWhite(obj));
|
||||
CHECK_EQ(0, non_atomic_marking_state()->live_bytes(
|
||||
MemoryChunk::FromAddress(obj->address())));
|
||||
@ -926,18 +927,18 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final
|
||||
explicit CustomRootBodyMarkingVisitor(MarkCompactCollector* collector)
|
||||
: collector_(collector) {}
|
||||
|
||||
void VisitPointer(HeapObject* host, ObjectSlot p) final {
|
||||
void VisitPointer(HeapObject host, ObjectSlot p) final {
|
||||
MarkObject(host, *p);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start, ObjectSlot end) final {
|
||||
void VisitPointers(HeapObject host, ObjectSlot start, ObjectSlot end) final {
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
DCHECK(!HasWeakHeapObjectTag(*p));
|
||||
MarkObject(host, *p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
// At the moment, custom roots cannot contain weak pointers.
|
||||
UNREACHABLE();
|
||||
@ -953,7 +954,7 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final
|
||||
}
|
||||
|
||||
private:
|
||||
V8_INLINE void MarkObject(HeapObject* host, Object* object) {
|
||||
V8_INLINE void MarkObject(HeapObject host, Object* object) {
|
||||
if (!object->IsHeapObject()) return;
|
||||
collector_->MarkObject(host, HeapObject::cast(object));
|
||||
}
|
||||
@ -963,10 +964,10 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final
|
||||
|
||||
class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
public:
|
||||
InternalizedStringTableCleaner(Heap* heap, HeapObject* table)
|
||||
InternalizedStringTableCleaner(Heap* heap, HeapObject table)
|
||||
: heap_(heap), pointers_removed_(0), table_(table) {}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
// Visit all HeapObject pointers in [start, end).
|
||||
Object* the_hole = ReadOnlyRoots(heap_).the_hole_value();
|
||||
@ -975,7 +976,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
Object* o = *p;
|
||||
if (o->IsHeapObject()) {
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
HeapObject heap_object = HeapObject::cast(o);
|
||||
if (marking_state->IsWhite(heap_object)) {
|
||||
pointers_removed_++;
|
||||
// Set the entry to the_hole_value (as deleted).
|
||||
@ -989,7 +990,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
UNREACHABLE();
|
||||
}
|
||||
@ -1007,7 +1008,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
private:
|
||||
Heap* heap_;
|
||||
int pointers_removed_;
|
||||
HeapObject* table_;
|
||||
HeapObject table_;
|
||||
};
|
||||
|
||||
class ExternalStringTableCleaner : public RootVisitor {
|
||||
@ -1023,7 +1024,7 @@ class ExternalStringTableCleaner : public RootVisitor {
|
||||
for (FullObjectSlot p = start; p < end; ++p) {
|
||||
Object* o = *p;
|
||||
if (o->IsHeapObject()) {
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
HeapObject heap_object = HeapObject::cast(o);
|
||||
if (marking_state->IsWhite(heap_object)) {
|
||||
if (o->IsExternalString()) {
|
||||
heap_->FinalizeExternalString(String::cast(o));
|
||||
@ -1051,7 +1052,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
||||
: marking_state_(marking_state) {}
|
||||
|
||||
Object* RetainAs(Object* object) override {
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
DCHECK(!marking_state_->IsGrey(heap_object));
|
||||
if (marking_state_->IsBlack(heap_object)) {
|
||||
return object;
|
||||
@ -1085,16 +1086,16 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector)
|
||||
: collector_(collector) {}
|
||||
|
||||
inline void VisitPointer(HeapObject* host, ObjectSlot p) final {
|
||||
inline void VisitPointer(HeapObject host, ObjectSlot p) final {
|
||||
DCHECK(!HasWeakHeapObjectTag(*p));
|
||||
RecordMigratedSlot(host, MaybeObject::FromObject(*p), p.address());
|
||||
}
|
||||
|
||||
inline void VisitPointer(HeapObject* host, MaybeObjectSlot p) final {
|
||||
inline void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
|
||||
RecordMigratedSlot(host, *p, p.address());
|
||||
}
|
||||
|
||||
inline void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
inline void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
while (start < end) {
|
||||
VisitPointer(host, start);
|
||||
@ -1102,7 +1103,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
}
|
||||
}
|
||||
|
||||
inline void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
inline void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
while (start < end) {
|
||||
VisitPointer(host, start);
|
||||
@ -1123,7 +1124,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
inline void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
|
||||
DCHECK_EQ(host, rinfo->host());
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* object = HeapObject::cast(rinfo->target_object());
|
||||
HeapObject object = HeapObject::cast(rinfo->target_object());
|
||||
GenerationalBarrierForCode(host, rinfo, object);
|
||||
collector_->RecordRelocSlot(host, rinfo, object);
|
||||
}
|
||||
@ -1135,7 +1136,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {}
|
||||
|
||||
protected:
|
||||
inline virtual void RecordMigratedSlot(HeapObject* host, MaybeObject value,
|
||||
inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value,
|
||||
Address slot) {
|
||||
if (value->IsStrongOrWeak()) {
|
||||
Page* p = Page::FromAddress(value.ptr());
|
||||
@ -1159,7 +1160,7 @@ class MigrationObserver {
|
||||
explicit MigrationObserver(Heap* heap) : heap_(heap) {}
|
||||
|
||||
virtual ~MigrationObserver() = default;
|
||||
virtual void Move(AllocationSpace dest, HeapObject* src, HeapObject* dst,
|
||||
virtual void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
|
||||
int size) = 0;
|
||||
|
||||
protected:
|
||||
@ -1170,7 +1171,7 @@ class ProfilingMigrationObserver final : public MigrationObserver {
|
||||
public:
|
||||
explicit ProfilingMigrationObserver(Heap* heap) : MigrationObserver(heap) {}
|
||||
|
||||
inline void Move(AllocationSpace dest, HeapObject* src, HeapObject* dst,
|
||||
inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
|
||||
int size) final {
|
||||
if (dest == CODE_SPACE || (dest == OLD_SPACE && dst->IsBytecodeArray())) {
|
||||
PROFILE(heap_->isolate(),
|
||||
@ -1183,7 +1184,7 @@ class ProfilingMigrationObserver final : public MigrationObserver {
|
||||
class HeapObjectVisitor {
|
||||
public:
|
||||
virtual ~HeapObjectVisitor() = default;
|
||||
virtual bool Visit(HeapObject* object, int size) = 0;
|
||||
virtual bool Visit(HeapObject object, int size) = 0;
|
||||
};
|
||||
|
||||
class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
@ -1196,14 +1197,13 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
protected:
|
||||
enum MigrationMode { kFast, kObserved };
|
||||
|
||||
typedef void (*MigrateFunction)(EvacuateVisitorBase* base, HeapObject* dst,
|
||||
HeapObject* src, int size,
|
||||
typedef void (*MigrateFunction)(EvacuateVisitorBase* base, HeapObject dst,
|
||||
HeapObject src, int size,
|
||||
AllocationSpace dest);
|
||||
|
||||
template <MigrationMode mode>
|
||||
static void RawMigrateObject(EvacuateVisitorBase* base, HeapObject* dst,
|
||||
HeapObject* src, int size,
|
||||
AllocationSpace dest) {
|
||||
static void RawMigrateObject(EvacuateVisitorBase* base, HeapObject dst,
|
||||
HeapObject src, int size, AllocationSpace dest) {
|
||||
Address dst_addr = dst->address();
|
||||
Address src_addr = src->address();
|
||||
DCHECK(base->heap_->AllowedToBeMigrated(src, dest));
|
||||
@ -1241,9 +1241,8 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
migration_function_ = RawMigrateObject<MigrationMode::kFast>;
|
||||
}
|
||||
|
||||
inline bool TryEvacuateObject(AllocationSpace target_space,
|
||||
HeapObject* object, int size,
|
||||
HeapObject** target_object) {
|
||||
inline bool TryEvacuateObject(AllocationSpace target_space, HeapObject object,
|
||||
int size, HeapObject* target_object) {
|
||||
#ifdef VERIFY_HEAP
|
||||
if (AbortCompactionForTesting(object)) return false;
|
||||
#endif // VERIFY_HEAP
|
||||
@ -1258,20 +1257,20 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline void ExecuteMigrationObservers(AllocationSpace dest, HeapObject* src,
|
||||
HeapObject* dst, int size) {
|
||||
inline void ExecuteMigrationObservers(AllocationSpace dest, HeapObject src,
|
||||
HeapObject dst, int size) {
|
||||
for (MigrationObserver* obs : observers_) {
|
||||
obs->Move(dest, src, dst, size);
|
||||
}
|
||||
}
|
||||
|
||||
inline void MigrateObject(HeapObject* dst, HeapObject* src, int size,
|
||||
inline void MigrateObject(HeapObject dst, HeapObject src, int size,
|
||||
AllocationSpace dest) {
|
||||
migration_function_(this, dst, src, size, dest);
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
bool AbortCompactionForTesting(HeapObject* object) {
|
||||
bool AbortCompactionForTesting(HeapObject object) {
|
||||
if (FLAG_stress_compaction) {
|
||||
const uintptr_t mask = static_cast<uintptr_t>(FLAG_random_seed) &
|
||||
kPageAlignmentMask & ~kObjectAlignmentMask;
|
||||
@ -1309,9 +1308,9 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
|
||||
local_pretenuring_feedback_(local_pretenuring_feedback),
|
||||
is_incremental_marking_(heap->incremental_marking()->IsMarking()) {}
|
||||
|
||||
inline bool Visit(HeapObject* object, int size) override {
|
||||
inline bool Visit(HeapObject object, int size) override {
|
||||
if (TryEvacuateWithoutCopy(object)) return true;
|
||||
HeapObject* target_object = nullptr;
|
||||
HeapObject target_object;
|
||||
if (heap_->ShouldBePromoted(object->address()) &&
|
||||
TryEvacuateObject(OLD_SPACE, object, size, &target_object)) {
|
||||
promoted_size_ += size;
|
||||
@ -1319,7 +1318,7 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
|
||||
}
|
||||
heap_->UpdateAllocationSite(object->map(), object,
|
||||
local_pretenuring_feedback_);
|
||||
HeapObject* target = nullptr;
|
||||
HeapObject target;
|
||||
AllocationSpace space = AllocateTargetObject(object, size, &target);
|
||||
MigrateObject(HeapObject::cast(target), object, size, space);
|
||||
semispace_copied_size_ += size;
|
||||
@ -1330,14 +1329,14 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
|
||||
intptr_t semispace_copied_size() { return semispace_copied_size_; }
|
||||
|
||||
private:
|
||||
inline bool TryEvacuateWithoutCopy(HeapObject* object) {
|
||||
inline bool TryEvacuateWithoutCopy(HeapObject object) {
|
||||
if (is_incremental_marking_) return false;
|
||||
|
||||
Map map = object->map();
|
||||
|
||||
// Some objects can be evacuated without creating a copy.
|
||||
if (map->visitor_id() == kVisitThinString) {
|
||||
HeapObject* actual = ThinString::cast(object)->unchecked_actual();
|
||||
HeapObject actual = ThinString::cast(object)->unchecked_actual();
|
||||
if (MarkCompactCollector::IsOnEvacuationCandidate(actual)) return false;
|
||||
object->map_slot().Relaxed_Store(
|
||||
MapWord::FromForwardingAddress(actual).ToMap());
|
||||
@ -1348,8 +1347,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline AllocationSpace AllocateTargetObject(HeapObject* old_object, int size,
|
||||
HeapObject** target_object) {
|
||||
inline AllocationSpace AllocateTargetObject(HeapObject old_object, int size,
|
||||
HeapObject* target_object) {
|
||||
AllocationAlignment alignment =
|
||||
HeapObject::RequiredAlignment(old_object->map());
|
||||
AllocationSpace space_allocated_in = NEW_SPACE;
|
||||
@ -1410,7 +1409,7 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
|
||||
}
|
||||
}
|
||||
|
||||
inline bool Visit(HeapObject* object, int size) override {
|
||||
inline bool Visit(HeapObject object, int size) override {
|
||||
if (mode == NEW_TO_NEW) {
|
||||
heap_->UpdateAllocationSite(object->map(), object,
|
||||
local_pretenuring_feedback_);
|
||||
@ -1436,8 +1435,8 @@ class EvacuateOldSpaceVisitor final : public EvacuateVisitorBase {
|
||||
RecordMigratedSlotVisitor* record_visitor)
|
||||
: EvacuateVisitorBase(heap, local_allocator, record_visitor) {}
|
||||
|
||||
inline bool Visit(HeapObject* object, int size) override {
|
||||
HeapObject* target_object = nullptr;
|
||||
inline bool Visit(HeapObject object, int size) override {
|
||||
HeapObject target_object;
|
||||
if (TryEvacuateObject(
|
||||
Page::FromAddress(object->address())->owner()->identity(), object,
|
||||
size, &target_object)) {
|
||||
@ -1452,7 +1451,7 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
|
||||
public:
|
||||
explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
|
||||
|
||||
inline bool Visit(HeapObject* object, int size) override {
|
||||
inline bool Visit(HeapObject object, int size) override {
|
||||
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
|
||||
object->IterateBodyFast(&visitor);
|
||||
return true;
|
||||
@ -1465,7 +1464,7 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
|
||||
bool MarkCompactCollector::IsUnmarkedHeapObject(Heap* heap, FullObjectSlot p) {
|
||||
Object* o = *p;
|
||||
if (!o->IsHeapObject()) return false;
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
HeapObject heap_object = HeapObject::cast(o);
|
||||
return heap->mark_compact_collector()->non_atomic_marking_state()->IsWhite(
|
||||
heap_object);
|
||||
}
|
||||
@ -1574,7 +1573,8 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
|
||||
TRACE_GC(heap()->tracer(),
|
||||
GCTracer::Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR);
|
||||
CHECK(heap()->concurrent_marking()->IsStopped());
|
||||
std::unordered_multimap<HeapObject*, HeapObject*> key_to_values;
|
||||
std::unordered_multimap<HeapObject, HeapObject, HeapObject::Hasher>
|
||||
key_to_values;
|
||||
Ephemeron ephemeron;
|
||||
|
||||
DCHECK(weak_objects_.current_ephemerons.IsEmpty());
|
||||
@ -1629,10 +1629,10 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
|
||||
// This is the good case: newly_discovered stores all discovered
|
||||
// objects. Now use key_to_values to see if discovered objects keep more
|
||||
// objects alive due to ephemeron semantics.
|
||||
for (HeapObject* object : ephemeron_marking_.newly_discovered) {
|
||||
for (HeapObject object : ephemeron_marking_.newly_discovered) {
|
||||
auto range = key_to_values.equal_range(object);
|
||||
for (auto it = range.first; it != range.second; ++it) {
|
||||
HeapObject* value = it->second;
|
||||
HeapObject value = it->second;
|
||||
MarkObject(object, value);
|
||||
}
|
||||
}
|
||||
@ -1659,7 +1659,7 @@ void MarkCompactCollector::PerformWrapperTracing() {
|
||||
{
|
||||
LocalEmbedderHeapTracer::ProcessingScope scope(
|
||||
heap_->local_embedder_heap_tracer());
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
|
||||
scope.TracePossibleWrapper(JSObject::cast(object));
|
||||
}
|
||||
@ -1676,9 +1676,9 @@ void MarkCompactCollector::ProcessMarkingWorklist() {
|
||||
|
||||
template <MarkCompactCollector::MarkingWorklistProcessingMode mode>
|
||||
void MarkCompactCollector::ProcessMarkingWorklistInternal() {
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
MarkCompactMarkingVisitor visitor(this, marking_state());
|
||||
while ((object = marking_worklist()->Pop()) != nullptr) {
|
||||
while (!(object = marking_worklist()->Pop()).is_null()) {
|
||||
DCHECK(!object->IsFiller());
|
||||
DCHECK(object->IsHeapObject());
|
||||
DCHECK(heap()->Contains(object));
|
||||
@ -1695,7 +1695,7 @@ void MarkCompactCollector::ProcessMarkingWorklistInternal() {
|
||||
DCHECK(marking_worklist()->IsBailoutEmpty());
|
||||
}
|
||||
|
||||
bool MarkCompactCollector::VisitEphemeron(HeapObject* key, HeapObject* value) {
|
||||
bool MarkCompactCollector::VisitEphemeron(HeapObject key, HeapObject value) {
|
||||
if (marking_state()->IsBlackOrGrey(key)) {
|
||||
if (marking_state()->WhiteToGrey(value)) {
|
||||
marking_worklist()->Push(value);
|
||||
@ -1940,10 +1940,10 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkDependentCodeForDeoptimization() {
|
||||
std::pair<HeapObject*, Code> weak_object_in_code;
|
||||
std::pair<HeapObject, Code> weak_object_in_code;
|
||||
while (weak_objects_.weak_objects_in_code.Pop(kMainThread,
|
||||
&weak_object_in_code)) {
|
||||
HeapObject* object = weak_object_in_code.first;
|
||||
HeapObject object = weak_object_in_code.first;
|
||||
Code code = weak_object_in_code.second;
|
||||
if (!non_atomic_marking_state()->IsBlackOrGrey(object) &&
|
||||
!code->embedded_objects_cleared()) {
|
||||
@ -1997,8 +1997,7 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
|
||||
int function_literal_id = shared_info->FunctionLiteralId(isolate());
|
||||
|
||||
shared_info->DiscardCompiledMetadata(
|
||||
isolate(),
|
||||
[](HeapObjectPtr object, ObjectSlot slot, HeapObjectPtr target) {
|
||||
isolate(), [](HeapObject object, ObjectSlot slot, HeapObject target) {
|
||||
RecordSlot(object, slot, target);
|
||||
});
|
||||
|
||||
@ -2008,7 +2007,7 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
|
||||
UncompiledDataWithoutPreParsedScope::kSize);
|
||||
|
||||
// Replace bytecode array with an uncompiled data array.
|
||||
HeapObject* compiled_data = shared_info->GetBytecodeArray();
|
||||
HeapObject compiled_data = shared_info->GetBytecodeArray();
|
||||
Address compiled_data_start = compiled_data->address();
|
||||
int compiled_data_size = compiled_data->Size();
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(compiled_data_start);
|
||||
@ -2040,7 +2039,7 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
|
||||
UncompiledData::Initialize(
|
||||
uncompiled_data, inferred_name, start_position, end_position,
|
||||
function_literal_id,
|
||||
[](HeapObjectPtr object, ObjectSlot slot, HeapObjectPtr target) {
|
||||
[](HeapObject object, ObjectSlot slot, HeapObject target) {
|
||||
RecordSlot(object, slot, target);
|
||||
});
|
||||
|
||||
@ -2224,7 +2223,7 @@ void MarkCompactCollector::ClearWeakCollections() {
|
||||
|
||||
while (weak_objects_.ephemeron_hash_tables.Pop(kMainThread, &table)) {
|
||||
for (int i = 0; i < table->Capacity(); i++) {
|
||||
HeapObject* key = HeapObject::cast(table->KeyAt(i));
|
||||
HeapObject key = HeapObject::cast(table->KeyAt(i));
|
||||
#ifdef VERIFY_HEAP
|
||||
Object* value = table->ValueAt(i);
|
||||
|
||||
@ -2243,11 +2242,11 @@ void MarkCompactCollector::ClearWeakCollections() {
|
||||
|
||||
void MarkCompactCollector::ClearWeakReferences() {
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_REFERENCES);
|
||||
std::pair<HeapObject*, HeapObjectSlot> slot;
|
||||
std::pair<HeapObject, HeapObjectSlot> slot;
|
||||
HeapObjectReference cleared_weak_ref =
|
||||
HeapObjectReference::ClearedValue(isolate());
|
||||
while (weak_objects_.weak_references.Pop(kMainThread, &slot)) {
|
||||
HeapObject* value;
|
||||
HeapObject value;
|
||||
// The slot could have been overwritten, so we have to treat it
|
||||
// as MaybeObjectSlot.
|
||||
MaybeObjectSlot location(slot.second);
|
||||
@ -2289,14 +2288,14 @@ void MarkCompactCollector::ClearJSWeakCells() {
|
||||
while (weak_objects_.js_weak_cells.Pop(kMainThread, &weak_cell)) {
|
||||
// We do not insert cleared weak cells into the list, so the value
|
||||
// cannot be a Smi here.
|
||||
HeapObject* target = HeapObject::cast(weak_cell->target());
|
||||
HeapObject target = HeapObject::cast(weak_cell->target());
|
||||
if (!non_atomic_marking_state()->IsBlackOrGrey(target)) {
|
||||
// The value of the JSWeakCell is dead.
|
||||
JSWeakFactory weak_factory = JSWeakFactory::cast(weak_cell->factory());
|
||||
if (!weak_factory->scheduled_for_cleanup()) {
|
||||
heap()->AddDirtyJSWeakFactory(
|
||||
weak_factory,
|
||||
[](HeapObject* object, ObjectSlot slot, Object* target) {
|
||||
[](HeapObject object, ObjectSlot slot, Object* target) {
|
||||
if (target->IsHeapObject()) {
|
||||
RecordSlot(object, slot, HeapObject::cast(target));
|
||||
}
|
||||
@ -2306,7 +2305,7 @@ void MarkCompactCollector::ClearJSWeakCells() {
|
||||
// thus we need to record the slots it writes. The normal write barrier is
|
||||
// not enough, since it's disabled before GC.
|
||||
weak_cell->Nullify(
|
||||
isolate(), [](HeapObject* object, ObjectSlot slot, Object* target) {
|
||||
isolate(), [](HeapObject object, ObjectSlot slot, Object* target) {
|
||||
if (target->IsHeapObject()) {
|
||||
RecordSlot(object, slot, HeapObject::cast(target));
|
||||
}
|
||||
@ -2341,7 +2340,7 @@ bool MarkCompactCollector::IsOnEvacuationCandidate(MaybeObject obj) {
|
||||
|
||||
MarkCompactCollector::RecordRelocSlotInfo
|
||||
MarkCompactCollector::PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
|
||||
HeapObject* target) {
|
||||
HeapObject target) {
|
||||
RecordRelocSlotInfo result;
|
||||
result.should_record = false;
|
||||
Page* target_page = Page::FromAddress(target->ptr());
|
||||
@ -2372,7 +2371,7 @@ MarkCompactCollector::PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
|
||||
}
|
||||
|
||||
void MarkCompactCollector::RecordRelocSlot(Code host, RelocInfo* rinfo,
|
||||
HeapObject* target) {
|
||||
HeapObject target) {
|
||||
RecordRelocSlotInfo info = PrepareRecordRelocSlot(host, rinfo, target);
|
||||
if (info.should_record) {
|
||||
RememberedSet<OLD_TO_OLD>::InsertTyped(info.memory_chunk, info.slot_type,
|
||||
@ -2385,36 +2384,36 @@ namespace {
|
||||
// Missing specialization MakeSlotValue<FullObjectSlot, WEAK>() will turn
|
||||
// attempt to store a weak reference to strong-only slot to a compilation error.
|
||||
template <typename TSlot, HeapObjectReferenceType reference_type>
|
||||
typename TSlot::TObject MakeSlotValue(HeapObject* heap_object);
|
||||
typename TSlot::TObject MakeSlotValue(HeapObject heap_object);
|
||||
|
||||
template <>
|
||||
ObjectPtr MakeSlotValue<ObjectSlot, HeapObjectReferenceType::STRONG>(
|
||||
HeapObject* heap_object) {
|
||||
return ObjectPtr(heap_object->ptr());
|
||||
HeapObject heap_object) {
|
||||
return heap_object;
|
||||
}
|
||||
|
||||
template <>
|
||||
MaybeObject MakeSlotValue<MaybeObjectSlot, HeapObjectReferenceType::STRONG>(
|
||||
HeapObject* heap_object) {
|
||||
HeapObject heap_object) {
|
||||
return HeapObjectReference::Strong(heap_object);
|
||||
}
|
||||
|
||||
template <>
|
||||
MaybeObject MakeSlotValue<MaybeObjectSlot, HeapObjectReferenceType::WEAK>(
|
||||
HeapObject* heap_object) {
|
||||
HeapObject heap_object) {
|
||||
return HeapObjectReference::Weak(heap_object);
|
||||
}
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
template <>
|
||||
ObjectPtr MakeSlotValue<FullObjectSlot, HeapObjectReferenceType::STRONG>(
|
||||
HeapObject* heap_object) {
|
||||
return ObjectPtr(heap_object->ptr());
|
||||
HeapObject heap_object) {
|
||||
return heap_object;
|
||||
}
|
||||
|
||||
template <>
|
||||
MaybeObject MakeSlotValue<FullMaybeObjectSlot, HeapObjectReferenceType::STRONG>(
|
||||
HeapObject* heap_object) {
|
||||
HeapObject heap_object) {
|
||||
return HeapObjectReference::Strong(heap_object);
|
||||
}
|
||||
|
||||
@ -2427,7 +2426,7 @@ template <AccessMode access_mode, HeapObjectReferenceType reference_type,
|
||||
typename TSlot>
|
||||
static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
||||
typename TSlot::TObject old,
|
||||
HeapObject* heap_obj) {
|
||||
HeapObject heap_obj) {
|
||||
static_assert(
|
||||
std::is_same<TSlot, FullObjectSlot>::value ||
|
||||
std::is_same<TSlot, ObjectSlot>::value ||
|
||||
@ -2459,7 +2458,7 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
||||
template <AccessMode access_mode, typename TSlot>
|
||||
static inline SlotCallbackResult UpdateSlot(TSlot slot) {
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load();
|
||||
HeapObject* heap_obj;
|
||||
HeapObject heap_obj;
|
||||
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
|
||||
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
|
||||
} else if (obj->GetHeapObjectIfStrong(&heap_obj)) {
|
||||
@ -2473,7 +2472,7 @@ template <AccessMode access_mode, typename TSlot>
|
||||
static inline SlotCallbackResult UpdateStrongSlot(TSlot slot) {
|
||||
DCHECK(!HasWeakHeapObjectTag(slot.load().ptr()));
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load();
|
||||
HeapObject* heap_obj;
|
||||
HeapObject heap_obj;
|
||||
if (obj.GetHeapObject(&heap_obj)) {
|
||||
return UpdateSlot<access_mode, HeapObjectReferenceType::STRONG>(slot, obj,
|
||||
heap_obj);
|
||||
@ -2489,22 +2488,22 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
PointersUpdatingVisitor() {}
|
||||
|
||||
void VisitPointer(HeapObject* host, ObjectSlot p) override {
|
||||
void VisitPointer(HeapObject host, ObjectSlot p) override {
|
||||
UpdateStrongSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitPointer(HeapObject* host, MaybeObjectSlot p) override {
|
||||
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
|
||||
UpdateSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
UpdateStrongSlotInternal(p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
for (MaybeObjectSlot p = start; p < end; ++p) {
|
||||
UpdateSlotInternal(p);
|
||||
@ -2772,7 +2771,7 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
|
||||
MarkCompactCollector::NonAtomicMarkingState* marking_state =
|
||||
collector_->non_atomic_marking_state();
|
||||
*live_bytes = marking_state->live_bytes(chunk);
|
||||
HeapObject* failed_object = nullptr;
|
||||
HeapObject failed_object;
|
||||
switch (evacuation_mode) {
|
||||
case kObjectsNewToOld:
|
||||
LiveObjectVisitor::VisitBlackObjectsNoFail(
|
||||
@ -2930,7 +2929,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
|
||||
heap()->incremental_marking()->non_atomic_marking_state();
|
||||
while (current) {
|
||||
LargePage* next_current = current->next_page();
|
||||
HeapObject* object = current->GetObject();
|
||||
HeapObject object = current->GetObject();
|
||||
DCHECK(!marking_state->IsGrey(object));
|
||||
if (marking_state->IsBlack(object)) {
|
||||
heap_->lo_space()->PromoteNewLargeObject(current);
|
||||
@ -2952,7 +2951,7 @@ class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
|
||||
public:
|
||||
Object* RetainAs(Object* object) override {
|
||||
if (object->IsHeapObject()) {
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
MapWord map_word = heap_object->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
return map_word.ToForwardingAddress();
|
||||
@ -2974,12 +2973,12 @@ bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
|
||||
MarkingState* marking_state,
|
||||
Visitor* visitor,
|
||||
IterationMode iteration_mode,
|
||||
HeapObject** failed_object) {
|
||||
HeapObject* failed_object) {
|
||||
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
|
||||
"LiveObjectVisitor::VisitBlackObjects");
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
|
||||
HeapObject* const object = object_and_size.first;
|
||||
HeapObject const object = object_and_size.first;
|
||||
if (!visitor->Visit(object, object_and_size.second)) {
|
||||
if (iteration_mode == kClearMarkbits) {
|
||||
marking_state->bitmap(chunk)->ClearRange(
|
||||
@ -3005,7 +3004,7 @@ void LiveObjectVisitor::VisitBlackObjectsNoFail(MemoryChunk* chunk,
|
||||
"LiveObjectVisitor::VisitBlackObjectsNoFail");
|
||||
DCHECK_NE(chunk->owner()->identity(), NEW_LO_SPACE);
|
||||
if (chunk->owner()->identity() == LO_SPACE) {
|
||||
HeapObject* object = reinterpret_cast<LargePage*>(chunk)->GetObject();
|
||||
HeapObject object = reinterpret_cast<LargePage*>(chunk)->GetObject();
|
||||
DCHECK(marking_state->IsBlack(object));
|
||||
const bool success = visitor->Visit(object, object->Size());
|
||||
USE(success);
|
||||
@ -3013,7 +3012,7 @@ void LiveObjectVisitor::VisitBlackObjectsNoFail(MemoryChunk* chunk,
|
||||
} else {
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kBlackObjects>(chunk, marking_state->bitmap(chunk))) {
|
||||
HeapObject* const object = object_and_size.first;
|
||||
HeapObject const object = object_and_size.first;
|
||||
DCHECK(marking_state->IsBlack(object));
|
||||
const bool success = visitor->Visit(object, object_and_size.second);
|
||||
USE(success);
|
||||
@ -3034,7 +3033,7 @@ void LiveObjectVisitor::VisitGreyObjectsNoFail(MemoryChunk* chunk,
|
||||
"LiveObjectVisitor::VisitGreyObjectsNoFail");
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kGreyObjects>(chunk, marking_state->bitmap(chunk))) {
|
||||
HeapObject* const object = object_and_size.first;
|
||||
HeapObject const object = object_and_size.first;
|
||||
DCHECK(marking_state->IsGrey(object));
|
||||
const bool success = visitor->Visit(object, object_and_size.second);
|
||||
USE(success);
|
||||
@ -3182,7 +3181,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
|
||||
"ToSpaceUpdatingItem::ProcessVisitAll");
|
||||
PointersUpdatingVisitor visitor;
|
||||
for (Address cur = start_; cur < end_;) {
|
||||
HeapObject* object = HeapObject::FromAddress(cur);
|
||||
HeapObject object = HeapObject::FromAddress(cur);
|
||||
Map map = object->map();
|
||||
int size = object->SizeFromMap(map);
|
||||
object->IterateBodyFast(map, size, &visitor);
|
||||
@ -3237,7 +3236,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
std::is_same<TSlot, MaybeObjectSlot>::value,
|
||||
"Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
|
||||
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (!slot.load().GetHeapObject(&heap_object)) {
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
@ -3305,7 +3304,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
chunk_->invalidated_slots() != nullptr) {
|
||||
#ifdef DEBUG
|
||||
for (auto object_size : *chunk_->invalidated_slots()) {
|
||||
HeapObject* object = object_size.first;
|
||||
HeapObject object = object_size.first;
|
||||
int size = object_size.second;
|
||||
DCHECK_LE(object->SizeFromMap(object->map()), size);
|
||||
}
|
||||
@ -3587,7 +3586,7 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::ReportAbortedEvacuationCandidate(
|
||||
HeapObject* failed_object, MemoryChunk* chunk) {
|
||||
HeapObject failed_object, MemoryChunk* chunk) {
|
||||
base::MutexGuard guard(&mutex_);
|
||||
|
||||
aborted_evacuation_candidates_.push_back(
|
||||
@ -3596,7 +3595,7 @@ void MarkCompactCollector::ReportAbortedEvacuationCandidate(
|
||||
|
||||
void MarkCompactCollector::PostProcessEvacuationCandidates() {
|
||||
for (auto object_and_page : aborted_evacuation_candidates_) {
|
||||
HeapObject* failed_object = object_and_page.first;
|
||||
HeapObject failed_object = object_and_page.first;
|
||||
Page* page = object_and_page.second;
|
||||
page->SetFlag(Page::COMPACTION_WAS_ABORTED);
|
||||
// Aborted compaction page. We have to record slots here, since we
|
||||
@ -3737,7 +3736,7 @@ void MarkCompactCollector::MarkingWorklist::PrintWorklist(
|
||||
const char* worklist_name, ConcurrentMarkingWorklist* worklist) {
|
||||
std::map<InstanceType, int> count;
|
||||
int total_count = 0;
|
||||
worklist->IterateGlobalPool([&count, &total_count](HeapObject* obj) {
|
||||
worklist->IterateGlobalPool([&count, &total_count](HeapObject obj) {
|
||||
++total_count;
|
||||
count[obj->map()->instance_type()]++;
|
||||
});
|
||||
@ -3775,11 +3774,11 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
return marking_state_->bitmap(chunk);
|
||||
}
|
||||
|
||||
bool IsMarked(HeapObject* object) override {
|
||||
bool IsMarked(HeapObject object) override {
|
||||
return marking_state_->IsGrey(object);
|
||||
}
|
||||
|
||||
bool IsBlackOrGrey(HeapObject* object) override {
|
||||
bool IsBlackOrGrey(HeapObject object) override {
|
||||
return marking_state_->IsBlackOrGrey(object);
|
||||
}
|
||||
|
||||
@ -3809,7 +3808,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
}
|
||||
|
||||
private:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject* heap_object) {
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), IsMarked(heap_object));
|
||||
}
|
||||
|
||||
@ -3817,7 +3816,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
// Minor MC treats weak references as strong.
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
@ -3842,7 +3841,7 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
}
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject* heap_object) {
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), Heap::InToSpace(heap_object));
|
||||
}
|
||||
|
||||
@ -3850,7 +3849,7 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
void VerifyPointersImpl(TSlot start, TSlot end) {
|
||||
for (TSlot current = start; current < end; ++current) {
|
||||
typename TSlot::TObject object = current.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VerifyHeapObjectImpl(heap_object);
|
||||
}
|
||||
@ -3907,21 +3906,21 @@ class YoungGenerationMarkingVisitor final
|
||||
MinorMarkCompactCollector::MarkingWorklist* global_worklist, int task_id)
|
||||
: worklist_(global_worklist, task_id), marking_state_(marking_state) {}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject* host, ObjectSlot slot) final {
|
||||
V8_INLINE void VisitPointer(HeapObject host, ObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject* host, MaybeObjectSlot slot) final {
|
||||
V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot slot) final {
|
||||
VisitPointerImpl(host, slot);
|
||||
}
|
||||
|
||||
@ -3937,24 +3936,24 @@ class YoungGenerationMarkingVisitor final
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end) {
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
VisitPointer(host, slot);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointerImpl(HeapObject* host, TSlot slot) {
|
||||
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot slot) {
|
||||
typename TSlot::TObject target = slot.load();
|
||||
if (Heap::InNewSpace(target)) {
|
||||
// Treat weak references as strong.
|
||||
// TODO(marja): Proper weakness handling for minor-mcs.
|
||||
HeapObject* target_object = target.GetHeapObject();
|
||||
HeapObject target_object = target.GetHeapObject();
|
||||
MarkObjectViaMarkingWorklist(target_object);
|
||||
}
|
||||
}
|
||||
|
||||
inline void MarkObjectViaMarkingWorklist(HeapObject* object) {
|
||||
inline void MarkObjectViaMarkingWorklist(HeapObject object) {
|
||||
if (marking_state_->WhiteToGrey(object)) {
|
||||
// Marking deque overflow is unsupported for the young generation.
|
||||
CHECK(worklist_.Push(object));
|
||||
@ -4014,7 +4013,7 @@ class YoungGenerationMigrationObserver final : public MigrationObserver {
|
||||
: MigrationObserver(heap),
|
||||
mark_compact_collector_(mark_compact_collector) {}
|
||||
|
||||
inline void Move(AllocationSpace dest, HeapObject* src, HeapObject* dst,
|
||||
inline void Move(AllocationSpace dest, HeapObject src, HeapObject dst,
|
||||
int size) final {
|
||||
// Migrate color to old generation marking in case the object survived young
|
||||
// generation garbage collection.
|
||||
@ -4045,11 +4044,11 @@ class YoungGenerationRecordMigratedSlotVisitor final
|
||||
private:
|
||||
// Only record slots for host objects that are considered as live by the full
|
||||
// collector.
|
||||
inline bool IsLive(HeapObject* object) {
|
||||
inline bool IsLive(HeapObject object) {
|
||||
return collector_->non_atomic_marking_state()->IsBlack(object);
|
||||
}
|
||||
|
||||
inline void RecordMigratedSlot(HeapObject* host, MaybeObject value,
|
||||
inline void RecordMigratedSlot(HeapObject host, MaybeObject value,
|
||||
Address slot) final {
|
||||
if (value->IsStrongOrWeak()) {
|
||||
Page* p = Page::FromAddress(value.ptr());
|
||||
@ -4221,7 +4220,7 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kGreyObjects>(p, marking_state()->bitmap(p))) {
|
||||
HeapObject* const object = object_and_size.first;
|
||||
HeapObject const object = object_and_size.first;
|
||||
DCHECK(non_atomic_marking_state()->IsGrey(object));
|
||||
Address free_end = object->address();
|
||||
if (free_end != free_start) {
|
||||
@ -4278,7 +4277,7 @@ class YoungGenerationExternalStringTableCleaner : public RootVisitor {
|
||||
for (FullObjectSlot p = start; p < end; ++p) {
|
||||
Object* o = *p;
|
||||
if (o->IsHeapObject()) {
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
HeapObject heap_object = HeapObject::cast(o);
|
||||
if (marking_state_->IsWhite(heap_object)) {
|
||||
if (o->IsExternalString()) {
|
||||
heap_->FinalizeExternalString(String::cast(*p));
|
||||
@ -4307,7 +4306,7 @@ class MinorMarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
||||
: marking_state_(collector->non_atomic_marking_state()) {}
|
||||
|
||||
Object* RetainAs(Object* object) override {
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
if (!Heap::InNewSpace(heap_object)) return object;
|
||||
|
||||
// Young generation marking only marks to grey instead of black.
|
||||
@ -4423,7 +4422,7 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
|
||||
void MarkObject(Object* object) {
|
||||
if (!Heap::InNewSpace(object)) return;
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
if (marking_state_->WhiteToGrey(heap_object)) {
|
||||
const int size = visitor_.Visit(heap_object);
|
||||
IncrementLiveBytes(heap_object, size);
|
||||
@ -4432,7 +4431,7 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
|
||||
private:
|
||||
void EmptyLocalMarkingWorklist() {
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
while (marking_worklist_.Pop(&object)) {
|
||||
const int size = visitor_.Visit(object);
|
||||
IncrementLiveBytes(object, size);
|
||||
@ -4440,16 +4439,15 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
}
|
||||
|
||||
void EmptyMarkingWorklist() {
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
while (marking_worklist_.Pop(&object)) {
|
||||
const int size = visitor_.Visit(object);
|
||||
IncrementLiveBytes(object, size);
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementLiveBytes(HeapObject* object, intptr_t bytes) {
|
||||
local_live_bytes_[Page::FromAddress(reinterpret_cast<Address>(object))] +=
|
||||
bytes;
|
||||
void IncrementLiveBytes(HeapObject object, intptr_t bytes) {
|
||||
local_live_bytes_[Page::FromHeapObject(object)] += bytes;
|
||||
}
|
||||
|
||||
void FlushLiveBytes() {
|
||||
@ -4512,7 +4510,7 @@ class PageMarkingItem : public MarkingItem {
|
||||
// Marking happens before flipping the young generation, so the object
|
||||
// has to be in ToSpace.
|
||||
DCHECK(Heap::InToSpace(object));
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
bool success = object.GetHeapObject(&heap_object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
@ -4643,7 +4641,7 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
|
||||
|
||||
void MinorMarkCompactCollector::ProcessMarkingWorklist() {
|
||||
MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
while (marking_worklist.Pop(&object)) {
|
||||
DCHECK(!object->IsFiller());
|
||||
DCHECK(object->IsHeapObject());
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "src/heap/spaces.h"
|
||||
#include "src/heap/sweeper.h"
|
||||
#include "src/heap/worklist.h"
|
||||
#include "src/objects/heap-object.h" // For Worklist<HeapObject, ...>
|
||||
#include "src/objects/js-weak-refs.h" // For Worklist<JSWeakCell, ...>
|
||||
|
||||
namespace v8 {
|
||||
@ -30,7 +31,7 @@ class YoungGenerationMarkingVisitor;
|
||||
template <typename ConcreteState, AccessMode access_mode>
|
||||
class MarkingStateBase {
|
||||
public:
|
||||
V8_INLINE MarkBit MarkBitFrom(HeapObject* obj) {
|
||||
V8_INLINE MarkBit MarkBitFrom(HeapObject obj) {
|
||||
return MarkBitFrom(MemoryChunk::FromAddress(obj->address()),
|
||||
obj->address());
|
||||
}
|
||||
@ -40,33 +41,33 @@ class MarkingStateBase {
|
||||
p->AddressToMarkbitIndex(addr));
|
||||
}
|
||||
|
||||
Marking::ObjectColor Color(HeapObject* obj) {
|
||||
Marking::ObjectColor Color(HeapObject obj) {
|
||||
return Marking::Color(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool IsImpossible(HeapObject* obj) {
|
||||
V8_INLINE bool IsImpossible(HeapObject obj) {
|
||||
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool IsBlack(HeapObject* obj) {
|
||||
V8_INLINE bool IsBlack(HeapObject obj) {
|
||||
return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool IsWhite(HeapObject* obj) {
|
||||
V8_INLINE bool IsWhite(HeapObject obj) {
|
||||
return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool IsGrey(HeapObject* obj) {
|
||||
V8_INLINE bool IsGrey(HeapObject obj) {
|
||||
return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool IsBlackOrGrey(HeapObject* obj) {
|
||||
V8_INLINE bool IsBlackOrGrey(HeapObject obj) {
|
||||
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE bool WhiteToGrey(HeapObject* obj);
|
||||
V8_INLINE bool WhiteToBlack(HeapObject* obj);
|
||||
V8_INLINE bool GreyToBlack(HeapObject* obj);
|
||||
V8_INLINE bool WhiteToGrey(HeapObject obj);
|
||||
V8_INLINE bool WhiteToBlack(HeapObject obj);
|
||||
V8_INLINE bool GreyToBlack(HeapObject obj);
|
||||
|
||||
void ClearLiveness(MemoryChunk* chunk) {
|
||||
static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear();
|
||||
@ -145,7 +146,7 @@ class LiveObjectRange {
|
||||
public:
|
||||
class iterator {
|
||||
public:
|
||||
using value_type = std::pair<HeapObject*, int /* size */>;
|
||||
using value_type = std::pair<HeapObject, int /* size */>;
|
||||
using pointer = const value_type*;
|
||||
using reference = const value_type&;
|
||||
using iterator_category = std::forward_iterator_tag;
|
||||
@ -175,7 +176,7 @@ class LiveObjectRange {
|
||||
MarkBitCellIterator it_;
|
||||
Address cell_base_;
|
||||
MarkBit::CellType current_cell_;
|
||||
HeapObject* current_object_;
|
||||
HeapObject current_object_;
|
||||
int current_size_;
|
||||
};
|
||||
|
||||
@ -208,7 +209,7 @@ class LiveObjectVisitor : AllStatic {
|
||||
template <class Visitor, typename MarkingState>
|
||||
static bool VisitBlackObjects(MemoryChunk* chunk, MarkingState* state,
|
||||
Visitor* visitor, IterationMode iteration_mode,
|
||||
HeapObject** failed_object);
|
||||
HeapObject* failed_object);
|
||||
|
||||
// Visits black objects on a MemoryChunk. The visitor is not allowed to fail
|
||||
// visitation for an object.
|
||||
@ -407,8 +408,8 @@ class MajorNonAtomicMarkingState final
|
||||
};
|
||||
|
||||
struct Ephemeron {
|
||||
HeapObject* key;
|
||||
HeapObject* value;
|
||||
HeapObject key;
|
||||
HeapObject value;
|
||||
};
|
||||
|
||||
typedef Worklist<Ephemeron, 64> EphemeronWorklist;
|
||||
@ -441,8 +442,8 @@ struct WeakObjects {
|
||||
|
||||
// TODO(marja): For old space, we only need the slot, not the host
|
||||
// object. Optimize this by adding a different storage for old space.
|
||||
Worklist<std::pair<HeapObject*, HeapObjectSlot>, 64> weak_references;
|
||||
Worklist<std::pair<HeapObject*, Code>, 64> weak_objects_in_code;
|
||||
Worklist<std::pair<HeapObject, HeapObjectSlot>, 64> weak_references;
|
||||
Worklist<std::pair<HeapObject, Code>, 64> weak_objects_in_code;
|
||||
|
||||
Worklist<JSWeakRef, 64> js_weak_refs;
|
||||
Worklist<JSWeakCell, 64> js_weak_cells;
|
||||
@ -451,7 +452,7 @@ struct WeakObjects {
|
||||
};
|
||||
|
||||
struct EphemeronMarking {
|
||||
std::vector<HeapObject*> newly_discovered;
|
||||
std::vector<HeapObject> newly_discovered;
|
||||
bool newly_discovered_overflowed;
|
||||
size_t newly_discovered_limit;
|
||||
};
|
||||
@ -470,26 +471,26 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// Wrapper for the shared and bailout worklists.
|
||||
class MarkingWorklist {
|
||||
public:
|
||||
using ConcurrentMarkingWorklist = Worklist<HeapObject*, 64>;
|
||||
using EmbedderTracingWorklist = Worklist<HeapObject*, 16>;
|
||||
using ConcurrentMarkingWorklist = Worklist<HeapObject, 64>;
|
||||
using EmbedderTracingWorklist = Worklist<HeapObject, 16>;
|
||||
|
||||
// The heap parameter is not used but needed to match the sequential case.
|
||||
explicit MarkingWorklist(Heap* heap) {}
|
||||
|
||||
void Push(HeapObject* object) {
|
||||
void Push(HeapObject object) {
|
||||
bool success = shared_.Push(kMainThread, object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
}
|
||||
|
||||
void PushBailout(HeapObject* object) {
|
||||
void PushBailout(HeapObject object) {
|
||||
bool success = bailout_.Push(kMainThread, object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
}
|
||||
|
||||
HeapObject* Pop() {
|
||||
HeapObject* result;
|
||||
HeapObject Pop() {
|
||||
HeapObject result;
|
||||
#ifdef V8_CONCURRENT_MARKING
|
||||
if (bailout_.Pop(kMainThread, &result)) return result;
|
||||
#endif
|
||||
@ -499,15 +500,15 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// and we can thus avoid the emptiness checks by putting it last.
|
||||
if (on_hold_.Pop(kMainThread, &result)) return result;
|
||||
#endif
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
HeapObject* PopBailout() {
|
||||
HeapObject PopBailout() {
|
||||
#ifdef V8_CONCURRENT_MARKING
|
||||
HeapObject* result;
|
||||
HeapObject result;
|
||||
if (bailout_.Pop(kMainThread, &result)) return result;
|
||||
#endif
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
@ -541,7 +542,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// Calls the specified callback on each element of the deques and replaces
|
||||
// the element with the result of the callback. If the callback returns
|
||||
// nullptr then the element is removed from the deque.
|
||||
// The callback must accept HeapObject* and return HeapObject*.
|
||||
// The callback must accept HeapObject and return HeapObject.
|
||||
template <typename Callback>
|
||||
void Update(Callback callback) {
|
||||
bailout_.Update(callback);
|
||||
@ -635,12 +636,12 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
uint32_t offset;
|
||||
};
|
||||
static RecordRelocSlotInfo PrepareRecordRelocSlot(Code host, RelocInfo* rinfo,
|
||||
HeapObject* target);
|
||||
static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject* target);
|
||||
V8_INLINE static void RecordSlot(HeapObject* object, ObjectSlot slot,
|
||||
HeapObject* target);
|
||||
V8_INLINE static void RecordSlot(HeapObject* object, HeapObjectSlot slot,
|
||||
HeapObject* target);
|
||||
HeapObject target);
|
||||
static void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
|
||||
V8_INLINE static void RecordSlot(HeapObject object, ObjectSlot slot,
|
||||
HeapObject target);
|
||||
V8_INLINE static void RecordSlot(HeapObject object, HeapObjectSlot slot,
|
||||
HeapObject target);
|
||||
void RecordLiveSlotsOnPage(Page* page);
|
||||
|
||||
void UpdateSlots(SlotsBuffer* buffer);
|
||||
@ -670,16 +671,16 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
weak_objects_.ephemeron_hash_tables.Push(kMainThread, table);
|
||||
}
|
||||
|
||||
void AddEphemeron(HeapObject* key, HeapObject* value) {
|
||||
void AddEphemeron(HeapObject key, HeapObject value) {
|
||||
weak_objects_.discovered_ephemerons.Push(kMainThread,
|
||||
Ephemeron{key, value});
|
||||
}
|
||||
|
||||
void AddWeakReference(HeapObject* host, HeapObjectSlot slot) {
|
||||
void AddWeakReference(HeapObject host, HeapObjectSlot slot) {
|
||||
weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
|
||||
}
|
||||
|
||||
void AddWeakObjectInCode(HeapObject* object, Code code) {
|
||||
void AddWeakObjectInCode(HeapObject object, Code code) {
|
||||
weak_objects_.weak_objects_in_code.Push(kMainThread,
|
||||
std::make_pair(object, code));
|
||||
}
|
||||
@ -694,7 +695,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
inline void AddBytecodeFlushingCandidate(SharedFunctionInfo flush_candidate);
|
||||
|
||||
void AddNewlyDiscovered(HeapObject* object) {
|
||||
void AddNewlyDiscovered(HeapObject object) {
|
||||
if (ephemeron_marking_.newly_discovered_overflowed) return;
|
||||
|
||||
if (ephemeron_marking_.newly_discovered.size() <
|
||||
@ -747,14 +748,14 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
// Marks the object black and adds it to the marking work list.
|
||||
// This is for non-incremental marking only.
|
||||
V8_INLINE void MarkObject(HeapObject* host, HeapObject* obj);
|
||||
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
|
||||
|
||||
// Marks the object black and adds it to the marking work list.
|
||||
// This is for non-incremental marking only.
|
||||
V8_INLINE void MarkRootObject(Root root, HeapObject* obj);
|
||||
V8_INLINE void MarkRootObject(Root root, HeapObject obj);
|
||||
|
||||
// Used by wrapper tracing.
|
||||
V8_INLINE void MarkExternallyReferencedObject(HeapObject* obj);
|
||||
V8_INLINE void MarkExternallyReferencedObject(HeapObject obj);
|
||||
|
||||
// Mark the heap roots and all objects reachable from them.
|
||||
void MarkRoots(RootVisitor* root_visitor,
|
||||
@ -786,7 +787,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
// Implements ephemeron semantics: Marks value if key is already reachable.
|
||||
// Returns true if value was actually marked.
|
||||
bool VisitEphemeron(HeapObject* key, HeapObject* value);
|
||||
bool VisitEphemeron(HeapObject key, HeapObject value);
|
||||
|
||||
// Marks ephemerons and drains marking worklist iteratively
|
||||
// until a fixpoint is reached.
|
||||
@ -870,7 +871,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
void ReleaseEvacuationCandidates();
|
||||
void PostProcessEvacuationCandidates();
|
||||
void ReportAbortedEvacuationCandidate(HeapObject* failed_object,
|
||||
void ReportAbortedEvacuationCandidate(HeapObject failed_object,
|
||||
MemoryChunk* chunk);
|
||||
|
||||
static const int kEphemeronChunkSize = 8 * KB;
|
||||
@ -918,7 +919,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// Pages that are actually processed during evacuation.
|
||||
std::vector<Page*> old_space_evacuation_pages_;
|
||||
std::vector<Page*> new_space_evacuation_pages_;
|
||||
std::vector<std::pair<HeapObject*, Page*>> aborted_evacuation_candidates_;
|
||||
std::vector<std::pair<HeapObject, Page*>> aborted_evacuation_candidates_;
|
||||
|
||||
Sweeper* sweeper_;
|
||||
|
||||
@ -967,17 +968,17 @@ class MarkingVisitor final
|
||||
V8_INLINE int VisitJSWeakRef(Map map, JSWeakRef object);
|
||||
|
||||
// ObjectVisitor implementation.
|
||||
V8_INLINE void VisitPointer(HeapObject* host, ObjectSlot p) final {
|
||||
V8_INLINE void VisitPointer(HeapObject host, ObjectSlot p) final {
|
||||
VisitPointerImpl(host, p);
|
||||
}
|
||||
V8_INLINE void VisitPointer(HeapObject* host, MaybeObjectSlot p) final {
|
||||
V8_INLINE void VisitPointer(HeapObject host, MaybeObjectSlot p) final {
|
||||
VisitPointerImpl(host, p);
|
||||
}
|
||||
V8_INLINE void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
@ -986,7 +987,7 @@ class MarkingVisitor final
|
||||
|
||||
// Weak list pointers should be ignored during marking. The lists are
|
||||
// reconstructed after GC.
|
||||
void VisitCustomWeakPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitCustomWeakPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {}
|
||||
|
||||
V8_INLINE void VisitDescriptors(DescriptorArray descriptor_array,
|
||||
@ -998,10 +999,10 @@ class MarkingVisitor final
|
||||
static const int kProgressBarScanningChunk = 32 * 1024;
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointerImpl(HeapObject* host, TSlot p);
|
||||
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot p);
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end);
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
|
||||
|
||||
V8_INLINE int VisitFixedArrayIncremental(Map map, FixedArray object);
|
||||
|
||||
@ -1012,10 +1013,10 @@ class MarkingVisitor final
|
||||
|
||||
// Marks the object black without pushing it on the marking work list. Returns
|
||||
// true if the object needed marking and false otherwise.
|
||||
V8_INLINE bool MarkObjectWithoutPush(HeapObject* host, HeapObject* object);
|
||||
V8_INLINE bool MarkObjectWithoutPush(HeapObject host, HeapObject object);
|
||||
|
||||
// Marks the object grey and pushes it on the marking work list.
|
||||
V8_INLINE void MarkObject(HeapObject* host, HeapObject* obj);
|
||||
V8_INLINE void MarkObject(HeapObject host, HeapObject obj);
|
||||
|
||||
MarkingState* marking_state() { return marking_state_; }
|
||||
|
||||
@ -1068,7 +1069,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
void CleanupSweepToIteratePages();
|
||||
|
||||
private:
|
||||
using MarkingWorklist = Worklist<HeapObject*, 64 /* segment size */>;
|
||||
using MarkingWorklist = Worklist<HeapObject, 64 /* segment size */>;
|
||||
class RootMarkingVisitor;
|
||||
|
||||
static const int kNumMarkers = 8;
|
||||
@ -1082,7 +1083,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
void MarkLiveObjects() override;
|
||||
void MarkRootSetInParallel(RootMarkingVisitor* root_visitor);
|
||||
V8_INLINE void MarkRootObject(HeapObject* obj);
|
||||
V8_INLINE void MarkRootObject(HeapObject obj);
|
||||
void ProcessMarkingWorklist() override;
|
||||
void ClearNonLiveReferences() override;
|
||||
|
||||
|
@ -39,7 +39,7 @@ class FieldStatsCollector : public ObjectVisitor {
|
||||
unboxed_double_fields_count_(unboxed_double_fields_count),
|
||||
raw_fields_count_(raw_fields_count) {}
|
||||
|
||||
void RecordStats(HeapObject* host) {
|
||||
void RecordStats(HeapObject host) {
|
||||
size_t old_pointer_fields_count = *tagged_fields_count_;
|
||||
host->Iterate(this);
|
||||
size_t tagged_fields_count_in_object =
|
||||
@ -68,11 +68,11 @@ class FieldStatsCollector : public ObjectVisitor {
|
||||
*raw_fields_count_ += raw_fields_count_in_object;
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
*tagged_fields_count_ += (end - start);
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) override {
|
||||
*tagged_fields_count_ += (end - start);
|
||||
}
|
||||
@ -342,7 +342,7 @@ class ObjectStatsCollectorImpl {
|
||||
void CollectGlobalStatistics();
|
||||
|
||||
enum class CollectFieldStats { kNo, kYes };
|
||||
void CollectStatistics(HeapObject* obj, Phase phase,
|
||||
void CollectStatistics(HeapObject obj, Phase phase,
|
||||
CollectFieldStats collect_field_stats);
|
||||
|
||||
private:
|
||||
@ -353,7 +353,7 @@ class ObjectStatsCollectorImpl {
|
||||
|
||||
Isolate* isolate() { return heap_->isolate(); }
|
||||
|
||||
bool RecordVirtualObjectStats(HeapObject* parent, HeapObject* obj,
|
||||
bool RecordVirtualObjectStats(HeapObject parent, HeapObject obj,
|
||||
ObjectStats::VirtualInstanceType type,
|
||||
size_t size, size_t over_allocated,
|
||||
CowMode check_cow_array = kCheckCow);
|
||||
@ -361,28 +361,28 @@ class ObjectStatsCollectorImpl {
|
||||
ObjectStats::VirtualInstanceType type,
|
||||
size_t size);
|
||||
// Gets size from |ob| and assumes no over allocating.
|
||||
bool RecordSimpleVirtualObjectStats(HeapObject* parent, HeapObject* obj,
|
||||
bool RecordSimpleVirtualObjectStats(HeapObject parent, HeapObject obj,
|
||||
ObjectStats::VirtualInstanceType type);
|
||||
// For HashTable it is possible to compute over allocated memory.
|
||||
void RecordHashTableVirtualObjectStats(HeapObject* parent,
|
||||
void RecordHashTableVirtualObjectStats(HeapObject parent,
|
||||
FixedArray hash_table,
|
||||
ObjectStats::VirtualInstanceType type);
|
||||
|
||||
bool SameLiveness(HeapObject* obj1, HeapObject* obj2);
|
||||
bool SameLiveness(HeapObject obj1, HeapObject obj2);
|
||||
bool CanRecordFixedArray(FixedArrayBase array);
|
||||
bool IsCowArray(FixedArrayBase array);
|
||||
|
||||
// Blacklist for objects that should not be recorded using
|
||||
// VirtualObjectStats and RecordSimpleVirtualObjectStats. For recording those
|
||||
// objects dispatch to the low level ObjectStats::RecordObjectStats manually.
|
||||
bool ShouldRecordObject(HeapObject* object, CowMode check_cow_array);
|
||||
bool ShouldRecordObject(HeapObject object, CowMode check_cow_array);
|
||||
|
||||
void RecordObjectStats(HeapObject* obj, InstanceType type, size_t size);
|
||||
void RecordObjectStats(HeapObject obj, InstanceType type, size_t size);
|
||||
|
||||
// Specific recursion into constant pool or embedded code objects. Records
|
||||
// FixedArrays and Tuple2.
|
||||
void RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
|
||||
HeapObject* parent, HeapObject* object,
|
||||
HeapObject parent, HeapObject object,
|
||||
ObjectStats::VirtualInstanceType type);
|
||||
|
||||
// Details.
|
||||
@ -407,7 +407,7 @@ class ObjectStatsCollectorImpl {
|
||||
Heap* heap_;
|
||||
ObjectStats* stats_;
|
||||
MarkCompactCollector::NonAtomicMarkingState* marking_state_;
|
||||
std::unordered_set<HeapObject*> virtual_objects_;
|
||||
std::unordered_set<HeapObject, HeapObject::Hasher> virtual_objects_;
|
||||
std::unordered_set<Address> external_resources_;
|
||||
FieldStatsCollector field_stats_collector_;
|
||||
};
|
||||
@ -422,7 +422,7 @@ ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap,
|
||||
&stats->tagged_fields_count_, &stats->embedder_fields_count_,
|
||||
&stats->unboxed_double_fields_count_, &stats->raw_fields_count_) {}
|
||||
|
||||
bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject* obj,
|
||||
bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject obj,
|
||||
CowMode check_cow_array) {
|
||||
if (obj->IsFixedArrayExact()) {
|
||||
FixedArray fixed_array = FixedArray::cast(obj);
|
||||
@ -434,7 +434,7 @@ bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject* obj,
|
||||
}
|
||||
|
||||
void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
|
||||
HeapObject* parent, FixedArray hash_table,
|
||||
HeapObject parent, FixedArray hash_table,
|
||||
ObjectStats::VirtualInstanceType type) {
|
||||
CHECK(hash_table->IsHashTable());
|
||||
// TODO(mlippautz): Implement over allocation for hash tables.
|
||||
@ -443,14 +443,13 @@ void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
|
||||
}
|
||||
|
||||
bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
|
||||
HeapObject* parent, HeapObject* obj,
|
||||
ObjectStats::VirtualInstanceType type) {
|
||||
HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type) {
|
||||
return RecordVirtualObjectStats(parent, obj, type, obj->Size(),
|
||||
ObjectStats::kNoOverAllocation, kCheckCow);
|
||||
}
|
||||
|
||||
bool ObjectStatsCollectorImpl::RecordVirtualObjectStats(
|
||||
HeapObject* parent, HeapObject* obj, ObjectStats::VirtualInstanceType type,
|
||||
HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type,
|
||||
size_t size, size_t over_allocated, CowMode check_cow_array) {
|
||||
if (!SameLiveness(parent, obj) || !ShouldRecordObject(obj, check_cow_array)) {
|
||||
return false;
|
||||
@ -638,7 +637,7 @@ void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
|
||||
// Log the monomorphic/polymorphic helper objects that this slot owns.
|
||||
for (int i = 0; i < it.entry_size(); i++) {
|
||||
MaybeObject raw_object = vector->get(slot.ToInt() + i);
|
||||
HeapObject* object;
|
||||
HeapObject object;
|
||||
if (raw_object->GetHeapObject(&object)) {
|
||||
if (object->IsCell() || object->IsWeakFixedArray()) {
|
||||
RecordSimpleVirtualObjectStats(
|
||||
@ -655,14 +654,14 @@ void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
|
||||
void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
|
||||
FixedArray array) {
|
||||
if (IsCowArray(array)) {
|
||||
RecordVirtualObjectStats(nullptr, array, ObjectStats::COW_ARRAY_TYPE,
|
||||
RecordVirtualObjectStats(HeapObject(), array, ObjectStats::COW_ARRAY_TYPE,
|
||||
array->Size(), ObjectStats::kNoOverAllocation,
|
||||
kIgnoreCow);
|
||||
}
|
||||
}
|
||||
|
||||
void ObjectStatsCollectorImpl::CollectStatistics(
|
||||
HeapObject* obj, Phase phase, CollectFieldStats collect_field_stats) {
|
||||
HeapObject obj, Phase phase, CollectFieldStats collect_field_stats) {
|
||||
Map map = obj->map();
|
||||
switch (phase) {
|
||||
case kPhase1:
|
||||
@ -725,30 +724,31 @@ void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
|
||||
}
|
||||
|
||||
// FixedArray.
|
||||
RecordSimpleVirtualObjectStats(nullptr, heap_->serialized_objects(),
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), heap_->serialized_objects(),
|
||||
ObjectStats::SERIALIZED_OBJECTS_TYPE);
|
||||
RecordSimpleVirtualObjectStats(nullptr, heap_->number_string_cache(),
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), heap_->number_string_cache(),
|
||||
ObjectStats::NUMBER_STRING_CACHE_TYPE);
|
||||
RecordSimpleVirtualObjectStats(
|
||||
nullptr, heap_->single_character_string_cache(),
|
||||
HeapObject(), heap_->single_character_string_cache(),
|
||||
ObjectStats::SINGLE_CHARACTER_STRING_CACHE_TYPE);
|
||||
RecordSimpleVirtualObjectStats(nullptr, heap_->string_split_cache(),
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), heap_->string_split_cache(),
|
||||
ObjectStats::STRING_SPLIT_CACHE_TYPE);
|
||||
RecordSimpleVirtualObjectStats(nullptr, heap_->regexp_multiple_cache(),
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), heap_->regexp_multiple_cache(),
|
||||
ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
|
||||
RecordSimpleVirtualObjectStats(nullptr, heap_->retained_maps(),
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), heap_->retained_maps(),
|
||||
ObjectStats::RETAINED_MAPS_TYPE);
|
||||
|
||||
// WeakArrayList.
|
||||
RecordSimpleVirtualObjectStats(
|
||||
nullptr, WeakArrayList::cast(heap_->noscript_shared_function_infos()),
|
||||
HeapObject(),
|
||||
WeakArrayList::cast(heap_->noscript_shared_function_infos()),
|
||||
ObjectStats::NOSCRIPT_SHARED_FUNCTION_INFOS_TYPE);
|
||||
RecordSimpleVirtualObjectStats(nullptr,
|
||||
RecordSimpleVirtualObjectStats(HeapObject(),
|
||||
WeakArrayList::cast(heap_->script_list()),
|
||||
ObjectStats::SCRIPT_LIST_TYPE);
|
||||
}
|
||||
|
||||
void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject* obj,
|
||||
void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject obj,
|
||||
InstanceType type,
|
||||
size_t size) {
|
||||
if (virtual_objects_.find(obj) == virtual_objects_.end()) {
|
||||
@ -768,9 +768,8 @@ bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
|
||||
return array->map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
|
||||
}
|
||||
|
||||
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject* obj1,
|
||||
HeapObject* obj2) {
|
||||
return obj1 == nullptr || obj2 == nullptr ||
|
||||
bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
|
||||
return obj1.is_null() || obj2.is_null() ||
|
||||
marking_state_->Color(obj1) == marking_state_->Color(obj2);
|
||||
}
|
||||
|
||||
@ -849,7 +848,7 @@ void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
|
||||
// Uncompiled SharedFunctionInfo gets its own category.
|
||||
if (!info->is_compiled()) {
|
||||
RecordSimpleVirtualObjectStats(
|
||||
nullptr, info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
|
||||
HeapObject(), info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -857,7 +856,7 @@ void ObjectStatsCollectorImpl::RecordVirtualJSFunctionDetails(
|
||||
JSFunction function) {
|
||||
// Uncompiled JSFunctions get their own category.
|
||||
if (!function->is_compiled()) {
|
||||
RecordSimpleVirtualObjectStats(nullptr, function,
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), function,
|
||||
ObjectStats::UNCOMPILED_JS_FUNCTION_TYPE);
|
||||
}
|
||||
}
|
||||
@ -870,7 +869,7 @@ void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
|
||||
|
||||
void ObjectStatsCollectorImpl::
|
||||
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
|
||||
HeapObject* parent, HeapObject* object,
|
||||
HeapObject parent, HeapObject object,
|
||||
ObjectStats::VirtualInstanceType type) {
|
||||
if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
|
||||
if (object->IsFixedArrayExact()) {
|
||||
@ -926,7 +925,7 @@ ObjectStats::VirtualInstanceType CodeKindToVirtualInstanceType(
|
||||
} // namespace
|
||||
|
||||
void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
|
||||
RecordSimpleVirtualObjectStats(nullptr, code,
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), code,
|
||||
CodeKindToVirtualInstanceType(code->kind()));
|
||||
RecordSimpleVirtualObjectStats(code, code->deoptimization_data(),
|
||||
ObjectStats::DEOPTIMIZATION_DATA_TYPE);
|
||||
@ -972,7 +971,7 @@ void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
|
||||
} else if (context->IsFunctionContext()) {
|
||||
RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context->Size());
|
||||
} else {
|
||||
RecordSimpleVirtualObjectStats(nullptr, context,
|
||||
RecordSimpleVirtualObjectStats(HeapObject(), context,
|
||||
ObjectStats::OTHER_CONTEXT_TYPE);
|
||||
}
|
||||
}
|
||||
@ -988,7 +987,7 @@ class ObjectStatsVisitor {
|
||||
heap->mark_compact_collector()->non_atomic_marking_state()),
|
||||
phase_(phase) {}
|
||||
|
||||
bool Visit(HeapObject* obj, int size) {
|
||||
bool Visit(HeapObject obj, int size) {
|
||||
if (marking_state_->IsBlack(obj)) {
|
||||
live_collector_->CollectStatistics(
|
||||
obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
|
||||
@ -1011,11 +1010,11 @@ namespace {
|
||||
|
||||
void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
|
||||
SpaceIterator space_it(heap);
|
||||
HeapObject* obj = nullptr;
|
||||
HeapObject obj;
|
||||
while (space_it.has_next()) {
|
||||
std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
|
||||
ObjectIterator* obj_it = it.get();
|
||||
while ((obj = obj_it->Next()) != nullptr) {
|
||||
for (obj = obj_it->Next(); !obj.is_null(); obj = obj_it->Next()) {
|
||||
visitor->Visit(obj, obj->Size());
|
||||
}
|
||||
}
|
||||
|
@ -21,25 +21,19 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
template <typename T, typename>
|
||||
T* HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
|
||||
template <typename T>
|
||||
T HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
template <typename T, typename>
|
||||
T HeapVisitor<ResultType, ConcreteVisitor>::Cast(HeapObject* object) {
|
||||
return T::cast(object);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject object) {
|
||||
return Visit(object->map(), object);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map map,
|
||||
HeapObject* object) {
|
||||
HeapObject object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
switch (map->visitor_id()) {
|
||||
#define CASE(TypeName, Type) \
|
||||
@ -75,7 +69,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(Map map,
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(
|
||||
HeapObject* host, MapWordSlot map_slot) {
|
||||
HeapObject host, MapWordSlot map_slot) {
|
||||
DCHECK(!host->map_word().IsForwardingAddress());
|
||||
static_cast<ConcreteVisitor*>(this)->VisitPointer(host, ObjectSlot(map_slot));
|
||||
}
|
||||
@ -108,12 +102,13 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject(
|
||||
Map map, HeapObject* object) {
|
||||
Map map, HeapObject object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = map->instance_size();
|
||||
if (visitor->ShouldVisitMapPointer())
|
||||
if (visitor->ShouldVisitMapPointer()) {
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
}
|
||||
return static_cast<ResultType>(size);
|
||||
}
|
||||
|
||||
@ -143,12 +138,13 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
|
||||
Map map, HeapObject* object) {
|
||||
Map map, HeapObject object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = map->instance_size();
|
||||
if (visitor->ShouldVisitMapPointer())
|
||||
if (visitor->ShouldVisitMapPointer()) {
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
}
|
||||
StructBodyDescriptor::IterateBody(map, object, size, visitor);
|
||||
return static_cast<ResultType>(size);
|
||||
}
|
||||
@ -196,12 +192,13 @@ int NewSpaceVisitor<ConcreteVisitor>::VisitJSWeakCell(Map map,
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitWeakArray(
|
||||
Map map, HeapObject* object) {
|
||||
Map map, HeapObject object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = WeakArrayBodyDescriptor::SizeOf(map, object);
|
||||
if (visitor->ShouldVisitMapPointer())
|
||||
if (visitor->ShouldVisitMapPointer()) {
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
}
|
||||
WeakArrayBodyDescriptor::IterateBody(map, object, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
|
||||
DCHECK(!tail.is_null());
|
||||
WeakListVisitor<T>::SetWeakNext(tail, retained);
|
||||
if (record_slots) {
|
||||
HeapObject* slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
|
||||
HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
|
||||
int slot_offset = WeakListVisitor<T>::WeakNextOffset();
|
||||
ObjectSlot slot = HeapObject::RawField(slot_holder, slot_offset);
|
||||
MarkCompactCollector::RecordSlot(slot_holder, slot,
|
||||
@ -95,7 +95,7 @@ struct WeakListVisitor<Code> {
|
||||
return code->code_data_container()->next_code_link();
|
||||
}
|
||||
|
||||
static HeapObject* WeakNextHolder(Code code) {
|
||||
static HeapObject WeakNextHolder(Code code) {
|
||||
return code->code_data_container();
|
||||
}
|
||||
|
||||
@ -121,7 +121,7 @@ struct WeakListVisitor<Context> {
|
||||
return context->next_context_link();
|
||||
}
|
||||
|
||||
static HeapObject* WeakNextHolder(Context context) { return context; }
|
||||
static HeapObject WeakNextHolder(Context context) { return context; }
|
||||
|
||||
static int WeakNextOffset() {
|
||||
return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
|
||||
@ -176,7 +176,7 @@ struct WeakListVisitor<AllocationSite> {
|
||||
|
||||
static Object* WeakNext(AllocationSite obj) { return obj->weak_next(); }
|
||||
|
||||
static HeapObject* WeakNextHolder(AllocationSite obj) { return obj; }
|
||||
static HeapObject WeakNextHolder(AllocationSite obj) { return obj; }
|
||||
|
||||
static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
|
||||
|
||||
|
@ -95,18 +95,18 @@ class WasmInstanceObject;
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
class HeapVisitor : public ObjectVisitor {
|
||||
public:
|
||||
V8_INLINE ResultType Visit(HeapObject* object);
|
||||
V8_INLINE ResultType Visit(Map map, HeapObject* object);
|
||||
V8_INLINE ResultType Visit(HeapObject object);
|
||||
V8_INLINE ResultType Visit(Map map, HeapObject object);
|
||||
|
||||
protected:
|
||||
// A guard predicate for visiting the object.
|
||||
// If it returns false then the default implementations of the Visit*
|
||||
// functions bailout from iterating the object pointers.
|
||||
V8_INLINE bool ShouldVisit(HeapObject* object) { return true; }
|
||||
V8_INLINE bool ShouldVisit(HeapObject object) { return true; }
|
||||
// Guard predicate for visiting the objects map pointer separately.
|
||||
V8_INLINE bool ShouldVisitMapPointer() { return true; }
|
||||
// A callback for visiting the map pointer in the object header.
|
||||
V8_INLINE void VisitMapPointer(HeapObject* host, MapWordSlot map_slot);
|
||||
V8_INLINE void VisitMapPointer(HeapObject host, MapWordSlot map_slot);
|
||||
// If this predicate returns false, then the heap visitor will fail
|
||||
// in default Visit implemention for subclasses of JSObject.
|
||||
V8_INLINE bool AllowDefaultJSObjectVisit() { return true; }
|
||||
@ -116,20 +116,15 @@ class HeapVisitor : public ObjectVisitor {
|
||||
TYPED_VISITOR_ID_LIST(VISIT)
|
||||
#undef VISIT
|
||||
V8_INLINE ResultType VisitShortcutCandidate(Map map, ConsString object);
|
||||
V8_INLINE ResultType VisitDataObject(Map map, HeapObject* object);
|
||||
V8_INLINE ResultType VisitDataObject(Map map, HeapObject object);
|
||||
V8_INLINE ResultType VisitJSObjectFast(Map map, JSObject object);
|
||||
V8_INLINE ResultType VisitJSApiObject(Map map, JSObject object);
|
||||
V8_INLINE ResultType VisitStruct(Map map, HeapObject* object);
|
||||
V8_INLINE ResultType VisitStruct(Map map, HeapObject object);
|
||||
V8_INLINE ResultType VisitFreeSpace(Map map, FreeSpace object);
|
||||
V8_INLINE ResultType VisitWeakArray(Map map, HeapObject* object);
|
||||
V8_INLINE ResultType VisitWeakArray(Map map, HeapObject object);
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<Object, T>::value>::type>
|
||||
static V8_INLINE T* Cast(HeapObject* object);
|
||||
|
||||
template <typename T, typename = typename std::enable_if<
|
||||
std::is_base_of<ObjectPtr, T>::value>::type>
|
||||
static V8_INLINE T Cast(HeapObject* object);
|
||||
template <typename T>
|
||||
static V8_INLINE T Cast(HeapObject object);
|
||||
};
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
|
@ -333,10 +333,10 @@ class UpdateTypedSlotHelper {
|
||||
static SlotCallbackResult UpdateEmbeddedPointer(Heap* heap, RelocInfo* rinfo,
|
||||
Callback callback) {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* old_target = rinfo->target_object();
|
||||
HeapObject* new_target = old_target;
|
||||
HeapObject old_target = rinfo->target_object();
|
||||
HeapObject new_target = old_target;
|
||||
SlotCallbackResult result = callback(FullMaybeObjectSlot(&new_target));
|
||||
DCHECK(!HasWeakHeapObjectTag(new_target));
|
||||
DCHECK(!HasWeakHeapObjectTag(new_target->ptr()));
|
||||
if (new_target != old_target) {
|
||||
rinfo->set_target_object(heap, HeapObject::cast(new_target));
|
||||
}
|
||||
|
@ -16,13 +16,13 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void Scavenger::PromotionList::View::PushRegularObject(HeapObject* object,
|
||||
void Scavenger::PromotionList::View::PushRegularObject(HeapObject object,
|
||||
int size) {
|
||||
promotion_list_->PushRegularObject(task_id_, object, size);
|
||||
}
|
||||
|
||||
void Scavenger::PromotionList::View::PushLargeObject(HeapObject* object,
|
||||
Map map, int size) {
|
||||
void Scavenger::PromotionList::View::PushLargeObject(HeapObject object, Map map,
|
||||
int size) {
|
||||
promotion_list_->PushLargeObject(task_id_, object, map, size);
|
||||
}
|
||||
|
||||
@ -46,12 +46,12 @@ bool Scavenger::PromotionList::View::ShouldEagerlyProcessPromotionList() {
|
||||
return promotion_list_->ShouldEagerlyProcessPromotionList(task_id_);
|
||||
}
|
||||
|
||||
void Scavenger::PromotionList::PushRegularObject(int task_id,
|
||||
HeapObject* object, int size) {
|
||||
void Scavenger::PromotionList::PushRegularObject(int task_id, HeapObject object,
|
||||
int size) {
|
||||
regular_object_promotion_list_.Push(task_id, ObjectAndSize(object, size));
|
||||
}
|
||||
|
||||
void Scavenger::PromotionList::PushLargeObject(int task_id, HeapObject* object,
|
||||
void Scavenger::PromotionList::PushLargeObject(int task_id, HeapObject object,
|
||||
Map map, int size) {
|
||||
large_object_promotion_list_.Push(task_id, {object, map, size});
|
||||
}
|
||||
@ -114,7 +114,7 @@ void Scavenger::PageMemoryFence(MaybeObject object) {
|
||||
#ifdef THREAD_SANITIZER
|
||||
// Perform a dummy acquire load to tell TSAN that there is no data race
|
||||
// with page initialization.
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object->GetHeapObject(&heap_object)) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
|
||||
CHECK_NOT_NULL(chunk->synchronized_heap());
|
||||
@ -122,7 +122,7 @@ void Scavenger::PageMemoryFence(MaybeObject object) {
|
||||
#endif
|
||||
}
|
||||
|
||||
bool Scavenger::MigrateObject(Map map, HeapObject* source, HeapObject* target,
|
||||
bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target,
|
||||
int size) {
|
||||
// Copy the content of source to target.
|
||||
target->set_map_word(MapWord::FromMap(map));
|
||||
@ -150,7 +150,7 @@ bool Scavenger::MigrateObject(Map map, HeapObject* source, HeapObject* target,
|
||||
template <typename THeapObjectSlot>
|
||||
CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
|
||||
THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size) {
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
@ -160,7 +160,7 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
|
||||
AllocationResult allocation =
|
||||
allocator_.Allocate(NEW_SPACE, object_size, alignment);
|
||||
|
||||
HeapObject* target = nullptr;
|
||||
HeapObject target;
|
||||
if (allocation.To(&target)) {
|
||||
DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
|
||||
target));
|
||||
@ -185,7 +185,7 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map map,
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size) {
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
@ -194,7 +194,7 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
|
||||
AllocationResult allocation =
|
||||
allocator_.Allocate(OLD_SPACE, object_size, alignment);
|
||||
|
||||
HeapObject* target = nullptr;
|
||||
HeapObject target;
|
||||
if (allocation.To(&target)) {
|
||||
DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
|
||||
target));
|
||||
@ -225,8 +225,7 @@ SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
|
||||
: REMOVE_SLOT;
|
||||
}
|
||||
|
||||
bool Scavenger::HandleLargeObject(Map map, HeapObject* object,
|
||||
int object_size) {
|
||||
bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size) {
|
||||
// TODO(hpayer): Make this check size based, i.e.
|
||||
// object_size > kMaxRegularHeapObjectSize
|
||||
if (V8_UNLIKELY(
|
||||
@ -250,7 +249,7 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject* object,
|
||||
template <typename THeapObjectSlot>
|
||||
SlotCallbackResult Scavenger::EvacuateObjectDefault(Map map,
|
||||
THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size) {
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
@ -325,7 +324,7 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
|
||||
DCHECK(IsShortcutCandidate(map->instance_type()));
|
||||
if (!is_incremental_marking_ &&
|
||||
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
|
||||
HeapObject* first = HeapObject::cast(object->unchecked_first());
|
||||
HeapObject first = HeapObject::cast(object->unchecked_first());
|
||||
|
||||
slot.StoreHeapObject(first);
|
||||
|
||||
@ -337,7 +336,7 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
|
||||
|
||||
MapWord first_word = first->synchronized_map_word();
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* target = first_word.ToForwardingAddress();
|
||||
HeapObject target = first_word.ToForwardingAddress();
|
||||
|
||||
slot.StoreHeapObject(target);
|
||||
object->map_slot().Release_Store(
|
||||
@ -357,7 +356,7 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
|
||||
HeapObject* source) {
|
||||
HeapObject source) {
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
|
||||
@ -384,7 +383,7 @@ SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
|
||||
HeapObject* object) {
|
||||
HeapObject object) {
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
|
||||
@ -396,7 +395,7 @@ SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
|
||||
// If the first word is a forwarding address, the object has already been
|
||||
// copied.
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* dest = first_word.ToForwardingAddress();
|
||||
HeapObject dest = first_word.ToForwardingAddress();
|
||||
DCHECK(Heap::InFromSpace(*p));
|
||||
if ((*p)->IsWeak()) {
|
||||
p.store(HeapObjectReference::Weak(dest));
|
||||
@ -428,8 +427,7 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
|
||||
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
|
||||
MaybeObject object = *slot;
|
||||
if (Heap::InFromSpace(object)) {
|
||||
HeapObject* heap_object = object->GetHeapObject();
|
||||
DCHECK(heap_object->IsHeapObject());
|
||||
HeapObject heap_object = object->GetHeapObject();
|
||||
|
||||
SlotCallbackResult result =
|
||||
ScavengeObject(THeapObjectSlot(slot), heap_object);
|
||||
@ -446,12 +444,12 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
|
||||
void ScavengeVisitor::VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
void ScavengeVisitor::VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) {
|
||||
return VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
void ScavengeVisitor::VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
void ScavengeVisitor::VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) {
|
||||
return VisitPointersImpl(host, start, end);
|
||||
}
|
||||
@ -468,9 +466,9 @@ void ScavengeVisitor::VisitCodeTarget(Code host, RelocInfo* rinfo) {
|
||||
}
|
||||
|
||||
void ScavengeVisitor::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
|
||||
HeapObject* heap_object = rinfo->target_object();
|
||||
HeapObject heap_object = rinfo->target_object();
|
||||
#ifdef DEBUG
|
||||
HeapObject* old_heap_object = heap_object;
|
||||
HeapObject old_heap_object = heap_object;
|
||||
#endif
|
||||
FullObjectSlot slot(&heap_object);
|
||||
VisitHeapObjectImpl(slot, heap_object);
|
||||
@ -480,18 +478,18 @@ void ScavengeVisitor::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void ScavengeVisitor::VisitHeapObjectImpl(TSlot slot, HeapObject* heap_object) {
|
||||
void ScavengeVisitor::VisitHeapObjectImpl(TSlot slot, HeapObject heap_object) {
|
||||
if (Heap::InNewSpace(heap_object)) {
|
||||
scavenger_->ScavengeObject(HeapObjectSlot(slot), heap_object);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
void ScavengeVisitor::VisitPointersImpl(HeapObject* host, TSlot start,
|
||||
void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start,
|
||||
TSlot end) {
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
// Treat weak references as strong.
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
VisitHeapObjectImpl(slot, heap_object);
|
||||
|
@ -76,12 +76,12 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
bool record_slots)
|
||||
: heap_(heap), scavenger_(scavenger), record_slots_(record_slots) {}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
VisitPointersImpl(host, start, end);
|
||||
}
|
||||
@ -91,19 +91,19 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
HandleSlot(host, FullHeapObjectSlot(&target), target);
|
||||
}
|
||||
V8_INLINE void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
||||
HeapObject* heap_object = rinfo->target_object();
|
||||
HeapObject heap_object = rinfo->target_object();
|
||||
HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end) {
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
|
||||
// Treat weak references as strong.
|
||||
// TODO(marja): Proper weakness handling in the young generation.
|
||||
for (TSlot slot = start; slot < end; ++slot) {
|
||||
typename TSlot::TObject object = slot.load();
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (object.GetHeapObject(&heap_object)) {
|
||||
HandleSlot(host, THeapObjectSlot(slot), heap_object);
|
||||
}
|
||||
@ -111,8 +111,8 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
}
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE void HandleSlot(HeapObject* host, THeapObjectSlot slot,
|
||||
HeapObject* target) {
|
||||
V8_INLINE void HandleSlot(HeapObject host, THeapObjectSlot slot,
|
||||
HeapObject target) {
|
||||
static_assert(
|
||||
std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
@ -304,7 +304,7 @@ void ScavengerCollector::CollectGarbage() {
|
||||
void ScavengerCollector::HandleSurvivingNewLargeObjects() {
|
||||
for (SurvivingNewLargeObjectMapEntry update_info :
|
||||
surviving_new_large_objects_) {
|
||||
HeapObject* object = update_info.first;
|
||||
HeapObject object = update_info.first;
|
||||
Map map = update_info.second;
|
||||
// Order is important here. We have to re-install the map to have access
|
||||
// to meta-data like size during page promotion.
|
||||
@ -354,7 +354,7 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging,
|
||||
is_incremental_marking_(heap->incremental_marking()->IsMarking()),
|
||||
is_compacting_(heap->incremental_marking()->IsCompacting()) {}
|
||||
|
||||
void Scavenger::IterateAndScavengePromotedObject(HeapObject* target, Map map,
|
||||
void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
|
||||
int size) {
|
||||
// We are not collecting slots on new space objects during mutation thus we
|
||||
// have to scan for pointers to evacuation candidates when we promote
|
||||
@ -419,7 +419,7 @@ void Scavenger::Process(OneshotBarrier* barrier) {
|
||||
|
||||
struct PromotionListEntry entry;
|
||||
while (promotion_list_.Pop(&entry)) {
|
||||
HeapObject* target = entry.heap_object;
|
||||
HeapObject target = entry.heap_object;
|
||||
DCHECK(!target->IsMap());
|
||||
IterateAndScavengePromotedObject(target, entry.map, entry.size);
|
||||
done = false;
|
||||
@ -458,8 +458,7 @@ void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
if (!Heap::InNewSpace(object)) return;
|
||||
|
||||
scavenger_->ScavengeObject(FullHeapObjectSlot(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
|
||||
}
|
||||
|
||||
RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
|
||||
|
@ -22,9 +22,10 @@ enum class CopyAndForwardResult {
|
||||
FAILURE
|
||||
};
|
||||
|
||||
using ObjectAndSize = std::pair<HeapObject*, int>;
|
||||
using SurvivingNewLargeObjectsMap = std::unordered_map<HeapObject*, Map>;
|
||||
using SurvivingNewLargeObjectMapEntry = std::pair<HeapObject*, Map>;
|
||||
using ObjectAndSize = std::pair<HeapObject, int>;
|
||||
using SurvivingNewLargeObjectsMap =
|
||||
std::unordered_map<HeapObject, Map, HeapObject::Hasher>;
|
||||
using SurvivingNewLargeObjectMapEntry = std::pair<HeapObject, Map>;
|
||||
|
||||
class ScavengerCollector {
|
||||
public:
|
||||
@ -54,7 +55,7 @@ class ScavengerCollector {
|
||||
class Scavenger {
|
||||
public:
|
||||
struct PromotionListEntry {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
Map map;
|
||||
int size;
|
||||
};
|
||||
@ -66,8 +67,8 @@ class Scavenger {
|
||||
View(PromotionList* promotion_list, int task_id)
|
||||
: promotion_list_(promotion_list), task_id_(task_id) {}
|
||||
|
||||
inline void PushRegularObject(HeapObject* object, int size);
|
||||
inline void PushLargeObject(HeapObject* object, Map map, int size);
|
||||
inline void PushRegularObject(HeapObject object, int size);
|
||||
inline void PushLargeObject(HeapObject object, Map map, int size);
|
||||
inline bool IsEmpty();
|
||||
inline size_t LocalPushSegmentSize();
|
||||
inline bool Pop(struct PromotionListEntry* entry);
|
||||
@ -83,8 +84,8 @@ class Scavenger {
|
||||
: regular_object_promotion_list_(num_tasks),
|
||||
large_object_promotion_list_(num_tasks) {}
|
||||
|
||||
inline void PushRegularObject(int task_id, HeapObject* object, int size);
|
||||
inline void PushLargeObject(int task_id, HeapObject* object, Map map,
|
||||
inline void PushRegularObject(int task_id, HeapObject object, int size);
|
||||
inline void PushLargeObject(int task_id, HeapObject object, Map map,
|
||||
int size);
|
||||
inline bool IsEmpty();
|
||||
inline size_t LocalPushSegmentSize(int task_id);
|
||||
@ -148,10 +149,10 @@ class Scavenger {
|
||||
// to be in from space.
|
||||
template <typename THeapObjectSlot>
|
||||
inline SlotCallbackResult ScavengeObject(THeapObjectSlot p,
|
||||
HeapObject* object);
|
||||
HeapObject object);
|
||||
|
||||
// Copies |source| to |target| and sets the forwarding pointer in |source|.
|
||||
V8_INLINE bool MigrateObject(Map map, HeapObject* source, HeapObject* target,
|
||||
V8_INLINE bool MigrateObject(Map map, HeapObject source, HeapObject target,
|
||||
int size);
|
||||
|
||||
V8_INLINE SlotCallbackResult
|
||||
@ -160,26 +161,25 @@ class Scavenger {
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE CopyAndForwardResult SemiSpaceCopyObject(Map map,
|
||||
THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size);
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE CopyAndForwardResult PromoteObject(Map map, THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size);
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE SlotCallbackResult EvacuateObject(THeapObjectSlot slot, Map map,
|
||||
HeapObject* source);
|
||||
HeapObject source);
|
||||
|
||||
V8_INLINE bool HandleLargeObject(Map map, HeapObject* object,
|
||||
int object_size);
|
||||
V8_INLINE bool HandleLargeObject(Map map, HeapObject object, int object_size);
|
||||
|
||||
// Different cases for object evacuation.
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map map,
|
||||
THeapObjectSlot slot,
|
||||
HeapObject* object,
|
||||
HeapObject object,
|
||||
int object_size);
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
@ -193,7 +193,7 @@ class Scavenger {
|
||||
ConsString object,
|
||||
int object_size);
|
||||
|
||||
void IterateAndScavengePromotedObject(HeapObject* target, Map map, int size);
|
||||
void IterateAndScavengePromotedObject(HeapObject target, Map map, int size);
|
||||
|
||||
static inline bool ContainsOnlyData(VisitorId visitor_id);
|
||||
|
||||
@ -236,10 +236,10 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
|
||||
public:
|
||||
explicit ScavengeVisitor(Scavenger* scavenger);
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, ObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) final;
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObjectSlot start,
|
||||
V8_INLINE void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final;
|
||||
|
||||
V8_INLINE void VisitCodeTarget(Code host, RelocInfo* rinfo) final;
|
||||
@ -247,10 +247,10 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
|
||||
|
||||
private:
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitHeapObjectImpl(TSlot slot, HeapObject* heap_object);
|
||||
V8_INLINE void VisitHeapObjectImpl(TSlot slot, HeapObject heap_object);
|
||||
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointersImpl(HeapObject* host, TSlot start, TSlot end);
|
||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end);
|
||||
|
||||
Scavenger* const scavenger_;
|
||||
};
|
||||
|
@ -105,7 +105,7 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
|
||||
!Map::CanHaveFastTransitionableElementsKind(instance_type),
|
||||
IsDictionaryElementsKind(elements_kind) ||
|
||||
IsTerminalElementsKind(elements_kind));
|
||||
HeapObject* result = nullptr;
|
||||
HeapObject result;
|
||||
// JSObjects have maps with a mutable prototype_validity_cell, so they cannot
|
||||
// go in RO_SPACE.
|
||||
AllocationResult allocation =
|
||||
@ -169,7 +169,7 @@ void Heap::FinalizePartialMap(Map map) {
|
||||
AllocationResult Heap::Allocate(Map map, AllocationSpace space) {
|
||||
DCHECK(map->instance_type() != MAP_TYPE);
|
||||
int size = map->instance_size();
|
||||
HeapObject* result = nullptr;
|
||||
HeapObject result;
|
||||
AllocationResult allocation = AllocateRaw(size, space);
|
||||
if (!allocation.To(&result)) return allocation;
|
||||
// New space objects are allocated white.
|
||||
@ -183,7 +183,7 @@ AllocationResult Heap::AllocateEmptyFixedTypedArray(
|
||||
ExternalArrayType array_type) {
|
||||
int size = OBJECT_POINTER_ALIGN(FixedTypedArrayBase::kDataOffset);
|
||||
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject object;
|
||||
AllocationResult allocation = AllocateRaw(
|
||||
size, RO_SPACE,
|
||||
array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
|
||||
@ -203,7 +203,7 @@ AllocationResult Heap::AllocateEmptyFixedTypedArray(
|
||||
}
|
||||
|
||||
bool Heap::CreateInitialMaps() {
|
||||
HeapObject* obj = nullptr;
|
||||
HeapObject obj;
|
||||
{
|
||||
AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize);
|
||||
if (!allocation.To(&obj)) return false;
|
||||
|
@ -42,42 +42,42 @@ PageRange::PageRange(Address start, Address limit)
|
||||
// -----------------------------------------------------------------------------
|
||||
// SemiSpaceIterator
|
||||
|
||||
HeapObject* SemiSpaceIterator::Next() {
|
||||
HeapObject SemiSpaceIterator::Next() {
|
||||
while (current_ != limit_) {
|
||||
if (Page::IsAlignedToPageSize(current_)) {
|
||||
Page* page = Page::FromAllocationAreaAddress(current_);
|
||||
page = page->next_page();
|
||||
DCHECK(page);
|
||||
current_ = page->area_start();
|
||||
if (current_ == limit_) return nullptr;
|
||||
if (current_ == limit_) return HeapObject();
|
||||
}
|
||||
HeapObject* object = HeapObject::FromAddress(current_);
|
||||
HeapObject object = HeapObject::FromAddress(current_);
|
||||
current_ += object->Size();
|
||||
if (!object->IsFiller()) {
|
||||
return object;
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// HeapObjectIterator
|
||||
|
||||
HeapObject* HeapObjectIterator::Next() {
|
||||
HeapObject HeapObjectIterator::Next() {
|
||||
do {
|
||||
HeapObject* next_obj = FromCurrentPage();
|
||||
if (next_obj != nullptr) return next_obj;
|
||||
HeapObject next_obj = FromCurrentPage();
|
||||
if (!next_obj.is_null()) return next_obj;
|
||||
} while (AdvanceToNextPage());
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
HeapObject* HeapObjectIterator::FromCurrentPage() {
|
||||
HeapObject HeapObjectIterator::FromCurrentPage() {
|
||||
while (cur_addr_ != cur_end_) {
|
||||
if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
|
||||
cur_addr_ = space_->limit();
|
||||
continue;
|
||||
}
|
||||
HeapObject* obj = HeapObject::FromAddress(cur_addr_);
|
||||
HeapObject obj = HeapObject::FromAddress(cur_addr_);
|
||||
const int obj_size = obj->Size();
|
||||
cur_addr_ += obj_size;
|
||||
DCHECK_LE(cur_addr_, cur_end_);
|
||||
@ -91,7 +91,7 @@ HeapObject* HeapObjectIterator::FromCurrentPage() {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
return HeapObject();
|
||||
}
|
||||
|
||||
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
@ -118,7 +118,7 @@ void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
// -----------------------------------------------------------------------------
|
||||
// SemiSpace
|
||||
|
||||
bool SemiSpace::Contains(HeapObject* o) {
|
||||
bool SemiSpace::Contains(HeapObject o) {
|
||||
return id_ == kToSpace
|
||||
? MemoryChunk::FromAddress(o->address())->InToSpace()
|
||||
: MemoryChunk::FromAddress(o->address())->InFromSpace();
|
||||
@ -138,15 +138,11 @@ bool SemiSpace::ContainsSlow(Address a) {
|
||||
// --------------------------------------------------------------------------
|
||||
// NewSpace
|
||||
|
||||
bool NewSpace::Contains(HeapObject* o) {
|
||||
return MemoryChunk::FromAddress(o->address())->InNewSpace();
|
||||
}
|
||||
|
||||
bool NewSpace::Contains(Object* o) {
|
||||
return o->IsHeapObject() && Contains(HeapObject::cast(o));
|
||||
}
|
||||
|
||||
bool NewSpace::Contains(HeapObjectPtr o) {
|
||||
bool NewSpace::Contains(HeapObject o) {
|
||||
return MemoryChunk::FromHeapObject(o)->InNewSpace();
|
||||
}
|
||||
|
||||
@ -198,7 +194,7 @@ size_t PagedSpace::RelinkFreeListCategories(Page* page) {
|
||||
return added;
|
||||
}
|
||||
|
||||
bool PagedSpace::TryFreeLast(HeapObject* object, int object_size) {
|
||||
bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
|
||||
if (allocation_info_.top() != kNullAddress) {
|
||||
const Address object_address = object->address();
|
||||
if ((allocation_info_.top() - object_size) == object_address) {
|
||||
@ -268,6 +264,10 @@ void Page::ClearEvacuationCandidate() {
|
||||
InitializeFreeListCategories();
|
||||
}
|
||||
|
||||
HeapObject LargePage::GetObject() {
|
||||
return HeapObject::FromAddress(area_start());
|
||||
}
|
||||
|
||||
OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
|
||||
: heap_(heap),
|
||||
state_(kOldSpaceState),
|
||||
@ -348,7 +348,7 @@ bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
|
||||
return SlowRefillLinearAllocationArea(size_in_bytes);
|
||||
}
|
||||
|
||||
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
|
||||
HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
|
||||
Address current_top = allocation_info_.top();
|
||||
Address new_top = current_top + size_in_bytes;
|
||||
DCHECK_LE(new_top, allocation_info_.limit());
|
||||
@ -356,13 +356,13 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
|
||||
return HeapObject::FromAddress(current_top);
|
||||
}
|
||||
|
||||
HeapObject* PagedSpace::TryAllocateLinearlyAligned(
|
||||
HeapObject PagedSpace::TryAllocateLinearlyAligned(
|
||||
int* size_in_bytes, AllocationAlignment alignment) {
|
||||
Address current_top = allocation_info_.top();
|
||||
int filler_size = Heap::GetFillToAlign(current_top, alignment);
|
||||
|
||||
Address new_top = current_top + filler_size + *size_in_bytes;
|
||||
if (new_top > allocation_info_.limit()) return nullptr;
|
||||
if (new_top > allocation_info_.limit()) return HeapObject();
|
||||
|
||||
allocation_info_.set_top(new_top);
|
||||
if (filler_size > 0) {
|
||||
@ -380,8 +380,8 @@ AllocationResult PagedSpace::AllocateRawUnaligned(
|
||||
if (!EnsureLinearAllocationArea(size_in_bytes)) {
|
||||
return AllocationResult::Retry(identity());
|
||||
}
|
||||
HeapObject* object = AllocateLinearly(size_in_bytes);
|
||||
DCHECK_NOT_NULL(object);
|
||||
HeapObject object = AllocateLinearly(size_in_bytes);
|
||||
DCHECK(!object.is_null());
|
||||
if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
|
||||
SkipList::Update(object->address(), size_in_bytes);
|
||||
}
|
||||
@ -395,8 +395,8 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
|
||||
DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
|
||||
DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
|
||||
int allocation_size = size_in_bytes;
|
||||
HeapObject* object = TryAllocateLinearlyAligned(&allocation_size, alignment);
|
||||
if (object == nullptr) {
|
||||
HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
|
||||
if (object.is_null()) {
|
||||
// We don't know exactly how much filler we need to align until space is
|
||||
// allocated, so assume the worst case.
|
||||
int filler_size = Heap::GetMaximumFillToAlign(alignment);
|
||||
@ -406,7 +406,7 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
|
||||
}
|
||||
allocation_size = size_in_bytes;
|
||||
object = TryAllocateLinearlyAligned(&allocation_size, alignment);
|
||||
DCHECK_NOT_NULL(object);
|
||||
DCHECK(!object.is_null());
|
||||
}
|
||||
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
|
||||
return object;
|
||||
@ -436,7 +436,7 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
|
||||
#else
|
||||
AllocationResult result = AllocateRawUnaligned(size_in_bytes);
|
||||
#endif
|
||||
HeapObject* heap_obj = nullptr;
|
||||
HeapObject heap_obj;
|
||||
if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
|
||||
DCHECK_IMPLIES(
|
||||
heap()->incremental_marking()->black_allocation(),
|
||||
@ -471,7 +471,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
|
||||
aligned_size_in_bytes = size_in_bytes + filler_size;
|
||||
}
|
||||
|
||||
HeapObject* obj = HeapObject::FromAddress(top);
|
||||
HeapObject obj = HeapObject::FromAddress(top);
|
||||
allocation_info_.set_top(top + aligned_size_in_bytes);
|
||||
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
|
||||
|
||||
@ -496,7 +496,7 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
|
||||
top = allocation_info_.top();
|
||||
}
|
||||
|
||||
HeapObject* obj = HeapObject::FromAddress(top);
|
||||
HeapObject obj = HeapObject::FromAddress(top);
|
||||
allocation_info_.set_top(top + size_in_bytes);
|
||||
DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
|
||||
|
||||
@ -533,7 +533,7 @@ LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
|
||||
AllocationResult result,
|
||||
intptr_t size) {
|
||||
if (result.IsRetry()) return InvalidBuffer();
|
||||
HeapObject* obj = nullptr;
|
||||
HeapObject obj;
|
||||
bool ok = result.To(&obj);
|
||||
USE(ok);
|
||||
DCHECK(ok);
|
||||
@ -551,7 +551,7 @@ bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
|
||||
bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
|
||||
if (IsValid()) {
|
||||
const Address object_address = object->address();
|
||||
if ((allocation_info_.top() - object_size) == object_address) {
|
||||
|
@ -942,7 +942,7 @@ size_t Page::AvailableInFreeList() {
|
||||
namespace {
|
||||
// Skips filler starting from the given filler until the end address.
|
||||
// Returns the first address after the skipped fillers.
|
||||
Address SkipFillers(HeapObject* filler, Address end) {
|
||||
Address SkipFillers(HeapObject filler, Address end) {
|
||||
Address addr = filler->address();
|
||||
while (addr < end) {
|
||||
filler = HeapObject::FromAddress(addr);
|
||||
@ -962,7 +962,7 @@ size_t Page::ShrinkToHighWaterMark() {
|
||||
|
||||
// Shrink pages to high water mark. The water mark points either to a filler
|
||||
// or the area_end.
|
||||
HeapObject* filler = HeapObject::FromAddress(HighWaterMark());
|
||||
HeapObject filler = HeapObject::FromAddress(HighWaterMark());
|
||||
if (filler->address() == area_end()) return 0;
|
||||
CHECK(filler->IsFiller());
|
||||
// Ensure that no objects were allocated in [filler, area_end) region.
|
||||
@ -1364,7 +1364,7 @@ void MemoryChunk::ReleaseInvalidatedSlots() {
|
||||
}
|
||||
}
|
||||
|
||||
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object,
|
||||
void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject object,
|
||||
int size) {
|
||||
if (!ShouldSkipEvacuationSlotRecording()) {
|
||||
if (invalidated_slots() == nullptr) {
|
||||
@ -1375,7 +1375,7 @@ void MemoryChunk::RegisterObjectWithInvalidatedSlots(HeapObject* object,
|
||||
}
|
||||
}
|
||||
|
||||
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject* object) {
|
||||
bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
|
||||
if (ShouldSkipEvacuationSlotRecording()) {
|
||||
// Invalidated slots do not matter if we are not recording slots.
|
||||
return true;
|
||||
@ -1386,8 +1386,8 @@ bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject* object) {
|
||||
return invalidated_slots()->find(object) != invalidated_slots()->end();
|
||||
}
|
||||
|
||||
void MemoryChunk::MoveObjectWithInvalidatedSlots(HeapObject* old_start,
|
||||
HeapObject* new_start) {
|
||||
void MemoryChunk::MoveObjectWithInvalidatedSlots(HeapObject old_start,
|
||||
HeapObject new_start) {
|
||||
DCHECK_LT(old_start, new_start);
|
||||
DCHECK_EQ(MemoryChunk::FromHeapObject(old_start),
|
||||
MemoryChunk::FromHeapObject(new_start));
|
||||
@ -1936,8 +1936,7 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
Address end_of_previous_object = page->area_start();
|
||||
Address top = page->area_end();
|
||||
|
||||
for (HeapObject* object = it.Next(); object != nullptr;
|
||||
object = it.Next()) {
|
||||
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
|
||||
CHECK(end_of_previous_object <= object->address());
|
||||
|
||||
// The first word should be a map, and we expect all map pointers to
|
||||
@ -1998,8 +1997,7 @@ void PagedSpace::VerifyLiveBytes() {
|
||||
CHECK(page->SweepingDone());
|
||||
HeapObjectIterator it(page);
|
||||
int black_size = 0;
|
||||
for (HeapObject* object = it.Next(); object != nullptr;
|
||||
object = it.Next()) {
|
||||
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
|
||||
// All the interior pointers should be contained in the heap.
|
||||
if (marking_state->IsBlack(object)) {
|
||||
black_size += object->Size();
|
||||
@ -2019,8 +2017,7 @@ void PagedSpace::VerifyCountersAfterSweeping() {
|
||||
total_capacity += page->area_size();
|
||||
HeapObjectIterator it(page);
|
||||
size_t real_allocated = 0;
|
||||
for (HeapObject* object = it.Next(); object != nullptr;
|
||||
object = it.Next()) {
|
||||
for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) {
|
||||
if (!object->IsFiller()) {
|
||||
real_allocated += object->Size();
|
||||
}
|
||||
@ -2435,7 +2432,7 @@ void NewSpace::Verify(Isolate* isolate) {
|
||||
CHECK(!Page::FromAllocationAreaAddress(current)->ContainsLimit(top()) ||
|
||||
current < top());
|
||||
|
||||
HeapObject* object = HeapObject::FromAddress(current);
|
||||
HeapObject object = HeapObject::FromAddress(current);
|
||||
|
||||
// The first word should be a map, and we expect all map pointers to
|
||||
// be in map space or read-only space.
|
||||
@ -2871,7 +2868,7 @@ void FreeListCategory::RepairFreeList(Heap* heap) {
|
||||
MapWordSlot map_location = n.map_slot();
|
||||
// We can't use .is_null() here because ObjectSlot.load() returns an
|
||||
// ObjectPtr (for which "is null" is not defined, as it would be
|
||||
// indistinguishable from "is Smi(0)"). Only HeapObjectPtr has "is_null()".
|
||||
// indistinguishable from "is Smi(0)"). Only HeapObject has "is_null()".
|
||||
if (map_location.load() == Map()) {
|
||||
map_location.store(ReadOnlyRoots(heap).free_space_map());
|
||||
} else {
|
||||
@ -3242,7 +3239,7 @@ bool PagedSpace::RawSlowRefillLinearAllocationArea(int size_in_bytes) {
|
||||
// MapSpace implementation
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); }
|
||||
void MapSpace::VerifyObject(HeapObject object) { CHECK(object->IsMap()); }
|
||||
#endif
|
||||
|
||||
ReadOnlySpace::ReadOnlySpace(Heap* heap)
|
||||
@ -3299,7 +3296,7 @@ void ReadOnlySpace::RepairFreeListsAfterDeserialization() {
|
||||
Address end = page->area_end();
|
||||
if (start < end - size) {
|
||||
// A region at the high watermark is already in free list.
|
||||
HeapObject* filler = HeapObject::FromAddress(start);
|
||||
HeapObject filler = HeapObject::FromAddress(start);
|
||||
CHECK(filler->IsFiller());
|
||||
start += filler->Size();
|
||||
}
|
||||
@ -3314,7 +3311,7 @@ void ReadOnlySpace::ClearStringPaddingIfNeeded() {
|
||||
WritableScope writable_scope(this);
|
||||
for (Page* page : *this) {
|
||||
HeapObjectIterator iterator(page);
|
||||
for (HeapObject* o = iterator.Next(); o != nullptr; o = iterator.Next()) {
|
||||
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
|
||||
if (o->IsSeqOneByteString()) {
|
||||
SeqOneByteString::cast(o)->clear_padding();
|
||||
} else if (o->IsSeqTwoByteString()) {
|
||||
@ -3367,16 +3364,14 @@ LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) {
|
||||
current_ = space->first_page();
|
||||
}
|
||||
|
||||
HeapObject LargeObjectIterator::Next() {
|
||||
if (current_ == nullptr) return HeapObject();
|
||||
|
||||
HeapObject* LargeObjectIterator::Next() {
|
||||
if (current_ == nullptr) return nullptr;
|
||||
|
||||
HeapObject* object = current_->GetObject();
|
||||
HeapObject object = current_->GetObject();
|
||||
current_ = current_->next_page();
|
||||
return object;
|
||||
}
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// LargeObjectSpace
|
||||
|
||||
@ -3417,7 +3412,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
|
||||
LargePage* page = AllocateLargePage(object_size, executable);
|
||||
if (page == nullptr) return AllocationResult::Retry(identity());
|
||||
page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking());
|
||||
HeapObject* object = page->GetObject();
|
||||
HeapObject object = page->GetObject();
|
||||
heap()->StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
heap()->GCFlagsForIncrementalMarking(),
|
||||
kGCCallbackScheduleIdleGarbageCollection);
|
||||
@ -3440,7 +3435,7 @@ LargePage* LargeObjectSpace::AllocateLargePage(int object_size,
|
||||
|
||||
Register(page, object_size);
|
||||
|
||||
HeapObject* object = page->GetObject();
|
||||
HeapObject object = page->GetObject();
|
||||
|
||||
heap()->CreateFillerObjectAt(object->address(), object_size,
|
||||
ClearRecordedSlots::kNo);
|
||||
@ -3483,7 +3478,7 @@ void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
|
||||
IncrementalMarking::NonAtomicMarkingState* marking_state =
|
||||
heap()->incremental_marking()->non_atomic_marking_state();
|
||||
LargeObjectIterator it(this);
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
if (marking_state->IsBlackOrGrey(obj)) {
|
||||
Marking::MarkWhite(marking_state->MarkBitFrom(obj));
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
@ -3561,7 +3556,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
|
||||
objects_size_ = 0;
|
||||
while (current) {
|
||||
LargePage* next_current = current->next_page();
|
||||
HeapObject* object = current->GetObject();
|
||||
HeapObject object = current->GetObject();
|
||||
DCHECK(!marking_state->IsGrey(object));
|
||||
if (marking_state->IsBlack(object)) {
|
||||
Address free_start;
|
||||
@ -3596,7 +3591,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
|
||||
}
|
||||
}
|
||||
|
||||
bool LargeObjectSpace::Contains(HeapObject* object) {
|
||||
bool LargeObjectSpace::Contains(HeapObject object) {
|
||||
Address address = object->address();
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(address);
|
||||
|
||||
@ -3625,7 +3620,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
|
||||
chunk = chunk->next_page()) {
|
||||
// Each chunk contains an object that starts at the large object page's
|
||||
// object area start.
|
||||
HeapObject* object = chunk->GetObject();
|
||||
HeapObject object = chunk->GetObject();
|
||||
Page* page = Page::FromAddress(object->address());
|
||||
CHECK(object->address() == page->area_start());
|
||||
|
||||
@ -3663,7 +3658,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
|
||||
for (int j = 0; j < array->length(); j++) {
|
||||
Object* element = array->get(j);
|
||||
if (element->IsHeapObject()) {
|
||||
HeapObject* element_object = HeapObject::cast(element);
|
||||
HeapObject element_object = HeapObject::cast(element);
|
||||
CHECK(heap()->Contains(element_object));
|
||||
CHECK(element_object->map()->IsMap());
|
||||
}
|
||||
@ -3673,7 +3668,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
|
||||
for (int j = 0; j < array->length(); j++) {
|
||||
Object* property = array->get(j);
|
||||
if (property->IsHeapObject()) {
|
||||
HeapObject* property_object = HeapObject::cast(property);
|
||||
HeapObject property_object = HeapObject::cast(property);
|
||||
CHECK(heap()->Contains(property_object));
|
||||
CHECK(property_object->map()->IsMap());
|
||||
}
|
||||
@ -3695,7 +3690,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
|
||||
void LargeObjectSpace::Print() {
|
||||
StdoutStream os;
|
||||
LargeObjectIterator it(this);
|
||||
for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
obj->Print(os);
|
||||
}
|
||||
}
|
||||
@ -3707,7 +3702,7 @@ void Page::Print() {
|
||||
printf(" --------------------------------------\n");
|
||||
HeapObjectIterator objects(this);
|
||||
unsigned mark_size = 0;
|
||||
for (HeapObject* object = objects.Next(); object != nullptr;
|
||||
for (HeapObject object = objects.Next(); !object.is_null();
|
||||
object = objects.Next()) {
|
||||
bool is_marked =
|
||||
heap()->incremental_marking()->marking_state()->IsBlackOrGrey(object);
|
||||
|
@ -358,9 +358,11 @@ class MemoryChunk {
|
||||
|
||||
static const intptr_t kSizeOffset = 0;
|
||||
static const intptr_t kFlagsOffset = kSizeOffset + kSizetSize;
|
||||
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kSystemPointerSize;
|
||||
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kUIntptrSize;
|
||||
static const intptr_t kReservationOffset =
|
||||
kMarkBitmapOffset + kSystemPointerSize;
|
||||
static const intptr_t kHeapOffset =
|
||||
kReservationOffset + 3 * kSystemPointerSize;
|
||||
|
||||
static const size_t kHeaderSize =
|
||||
kSizeOffset // NOLINT
|
||||
@ -368,10 +370,10 @@ class MemoryChunk {
|
||||
+ kUIntptrSize // uintptr_t flags_
|
||||
+ kSystemPointerSize // Bitmap* marking_bitmap_
|
||||
+ 3 * kSystemPointerSize // VirtualMemory reservation_
|
||||
+ kSystemPointerSize // Heap* heap_
|
||||
+ kSystemPointerSize // Address area_start_
|
||||
+ kSystemPointerSize // Address area_end_
|
||||
+ kSystemPointerSize // Address owner_
|
||||
+ kSystemPointerSize // Heap* heap_
|
||||
+ kIntptrSize // intptr_t progress_bar_
|
||||
+ kIntptrSize // std::atomic<intptr_t> live_byte_count_
|
||||
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
|
||||
@ -408,12 +410,7 @@ class MemoryChunk {
|
||||
return reinterpret_cast<MemoryChunk*>(a & ~kAlignmentMask);
|
||||
}
|
||||
// Only works if the object is in the first kPageSize of the MemoryChunk.
|
||||
static MemoryChunk* FromHeapObject(const HeapObject* o) {
|
||||
return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) &
|
||||
~kAlignmentMask);
|
||||
}
|
||||
// Only works if the object is in the first kPageSize of the MemoryChunk.
|
||||
static MemoryChunk* FromHeapObject(const HeapObjectPtr o) {
|
||||
static MemoryChunk* FromHeapObject(const HeapObject o) {
|
||||
return reinterpret_cast<MemoryChunk*>(o.ptr() & ~kAlignmentMask);
|
||||
}
|
||||
|
||||
@ -513,11 +510,11 @@ class MemoryChunk {
|
||||
|
||||
InvalidatedSlots* AllocateInvalidatedSlots();
|
||||
void ReleaseInvalidatedSlots();
|
||||
void RegisterObjectWithInvalidatedSlots(HeapObject* object, int size);
|
||||
void RegisterObjectWithInvalidatedSlots(HeapObject object, int size);
|
||||
// Updates invalidated_slots after array left-trimming.
|
||||
void MoveObjectWithInvalidatedSlots(HeapObject* old_start,
|
||||
HeapObject* new_start);
|
||||
bool RegisteredObjectWithInvalidatedSlots(HeapObject* object);
|
||||
void MoveObjectWithInvalidatedSlots(HeapObject old_start,
|
||||
HeapObject new_start);
|
||||
bool RegisteredObjectWithInvalidatedSlots(HeapObject object);
|
||||
InvalidatedSlots* invalidated_slots() { return invalidated_slots_; }
|
||||
|
||||
void ReleaseLocalTracker();
|
||||
@ -673,6 +670,8 @@ class MemoryChunk {
|
||||
// If the chunk needs to remember its memory reservation, it is stored here.
|
||||
VirtualMemory reservation_;
|
||||
|
||||
Heap* heap_;
|
||||
|
||||
// Start and end of allocatable memory on this chunk.
|
||||
Address area_start_;
|
||||
Address area_end_;
|
||||
@ -680,8 +679,6 @@ class MemoryChunk {
|
||||
// The space owning this memory chunk.
|
||||
std::atomic<Space*> owner_;
|
||||
|
||||
Heap* heap_;
|
||||
|
||||
// Used by the incremental marker to keep track of the scanning progress in
|
||||
// large objects that have a progress bar and are scanned in increments.
|
||||
intptr_t progress_bar_;
|
||||
@ -781,9 +778,8 @@ class Page : public MemoryChunk {
|
||||
static Page* FromAddress(Address addr) {
|
||||
return reinterpret_cast<Page*>(addr & ~kPageAlignmentMask);
|
||||
}
|
||||
static Page* FromHeapObject(const HeapObject* o) {
|
||||
return reinterpret_cast<Page*>(reinterpret_cast<Address>(o) &
|
||||
~kAlignmentMask);
|
||||
static Page* FromHeapObject(const HeapObject o) {
|
||||
return reinterpret_cast<Page*>(o.ptr() & ~kAlignmentMask);
|
||||
}
|
||||
|
||||
// Returns the page containing the address provided. The address can
|
||||
@ -909,11 +905,11 @@ class LargePage : public MemoryChunk {
|
||||
// x64 and ia32 architectures.
|
||||
static const int kMaxCodePageSize = 512 * MB;
|
||||
|
||||
static LargePage* FromHeapObject(const HeapObject* o) {
|
||||
static LargePage* FromHeapObject(const HeapObject o) {
|
||||
return static_cast<LargePage*>(MemoryChunk::FromHeapObject(o));
|
||||
}
|
||||
|
||||
HeapObject* GetObject() { return HeapObject::FromAddress(area_start()); }
|
||||
inline HeapObject GetObject();
|
||||
|
||||
inline LargePage* next_page() {
|
||||
return static_cast<LargePage*>(list_node_.next());
|
||||
@ -1530,7 +1526,7 @@ MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>(
|
||||
class V8_EXPORT_PRIVATE ObjectIterator : public Malloced {
|
||||
public:
|
||||
virtual ~ObjectIterator() = default;
|
||||
virtual HeapObject* Next() = 0;
|
||||
virtual HeapObject Next() = 0;
|
||||
};
|
||||
|
||||
template <class PAGE_TYPE>
|
||||
@ -1589,11 +1585,11 @@ class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator {
|
||||
// Advance to the next object, skipping free spaces and other fillers and
|
||||
// skipping the special garbage section of which there is one per space.
|
||||
// Returns nullptr when the iteration has ended.
|
||||
inline HeapObject* Next() override;
|
||||
inline HeapObject Next() override;
|
||||
|
||||
private:
|
||||
// Fast (inlined) path of next().
|
||||
inline HeapObject* FromCurrentPage();
|
||||
inline HeapObject FromCurrentPage();
|
||||
|
||||
// Slow path of next(), goes into the next page. Returns false if the
|
||||
// iteration has ended.
|
||||
@ -1996,7 +1992,7 @@ class LocalAllocationBuffer {
|
||||
// Returns true if the merge was successful, false otherwise.
|
||||
inline bool TryMerge(LocalAllocationBuffer* other);
|
||||
|
||||
inline bool TryFreeLast(HeapObject* object, int object_size);
|
||||
inline bool TryFreeLast(HeapObject object, int object_size);
|
||||
|
||||
// Close a LAB, effectively invalidating it. Returns the unused area.
|
||||
LinearAllocationArea Close();
|
||||
@ -2175,7 +2171,7 @@ class V8_EXPORT_PRIVATE PagedSpace
|
||||
return size_in_bytes - wasted;
|
||||
}
|
||||
|
||||
inline bool TryFreeLast(HeapObject* object, int object_size);
|
||||
inline bool TryFreeLast(HeapObject object, int object_size);
|
||||
|
||||
void ResetFreeList();
|
||||
|
||||
@ -2223,7 +2219,7 @@ class V8_EXPORT_PRIVATE PagedSpace
|
||||
|
||||
// Overridden by subclasses to verify space-specific object
|
||||
// properties (e.g., only maps or free-list nodes are in map space).
|
||||
virtual void VerifyObject(HeapObject* obj) {}
|
||||
virtual void VerifyObject(HeapObject obj) {}
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -2313,13 +2309,13 @@ class V8_EXPORT_PRIVATE PagedSpace
|
||||
inline bool EnsureLinearAllocationArea(int size_in_bytes);
|
||||
// Allocates an object from the linear allocation area. Assumes that the
|
||||
// linear allocation area is large enought to fit the object.
|
||||
inline HeapObject* AllocateLinearly(int size_in_bytes);
|
||||
inline HeapObject AllocateLinearly(int size_in_bytes);
|
||||
// Tries to allocate an aligned object from the linear allocation area.
|
||||
// Returns nullptr if the linear allocation area does not fit the object.
|
||||
// Otherwise, returns the object pointer and writes the allocation size
|
||||
// (object size + alignment filler size) to the size_in_bytes.
|
||||
inline HeapObject* TryAllocateLinearlyAligned(int* size_in_bytes,
|
||||
AllocationAlignment alignment);
|
||||
inline HeapObject TryAllocateLinearlyAligned(int* size_in_bytes,
|
||||
AllocationAlignment alignment);
|
||||
|
||||
V8_WARN_UNUSED_RESULT bool RefillLinearAllocationAreaFromFreeList(
|
||||
size_t size_in_bytes);
|
||||
@ -2386,7 +2382,7 @@ class SemiSpace : public Space {
|
||||
current_page_(nullptr),
|
||||
pages_used_(0) {}
|
||||
|
||||
inline bool Contains(HeapObject* o);
|
||||
inline bool Contains(HeapObject o);
|
||||
inline bool Contains(Object* o);
|
||||
inline bool ContainsSlow(Address a);
|
||||
|
||||
@ -2546,7 +2542,7 @@ class SemiSpaceIterator : public ObjectIterator {
|
||||
// Create an iterator over the allocated objects in the given to-space.
|
||||
explicit SemiSpaceIterator(NewSpace* space);
|
||||
|
||||
inline HeapObject* Next() override;
|
||||
inline HeapObject Next() override;
|
||||
|
||||
private:
|
||||
void Initialize(Address start, Address end);
|
||||
@ -2572,10 +2568,9 @@ class NewSpace : public SpaceWithLinearArea {
|
||||
|
||||
~NewSpace() override { TearDown(); }
|
||||
|
||||
inline bool Contains(HeapObject* o);
|
||||
inline bool ContainsSlow(Address a);
|
||||
inline bool Contains(Object* o);
|
||||
inline bool Contains(HeapObjectPtr o);
|
||||
inline bool Contains(HeapObject o);
|
||||
|
||||
// Tears down the space. Heap memory was not allocated by the space, so it
|
||||
// is not deallocated here.
|
||||
@ -2910,7 +2905,7 @@ class MapSpace : public PagedSpace {
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
void VerifyObject(HeapObject* obj) override;
|
||||
void VerifyObject(HeapObject obj) override;
|
||||
#endif
|
||||
};
|
||||
|
||||
@ -3006,7 +3001,7 @@ class LargeObjectSpace : public Space {
|
||||
void PromoteNewLargeObject(LargePage* page);
|
||||
|
||||
// Checks whether a heap object is in this space; O(1).
|
||||
bool Contains(HeapObject* obj);
|
||||
bool Contains(HeapObject obj);
|
||||
// Checks whether an address is in the object area in this space. Iterates
|
||||
// all objects in the space. May be slow.
|
||||
bool ContainsSlow(Address addr) { return FindObject(addr)->IsHeapObject(); }
|
||||
@ -3085,7 +3080,7 @@ class LargeObjectIterator : public ObjectIterator {
|
||||
public:
|
||||
explicit LargeObjectIterator(LargeObjectSpace* space);
|
||||
|
||||
HeapObject* Next() override;
|
||||
HeapObject Next() override;
|
||||
|
||||
private:
|
||||
LargePage* current_;
|
||||
|
@ -286,7 +286,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) {
|
||||
HeapObject* const object = object_and_size.first;
|
||||
HeapObject const object = object_and_size.first;
|
||||
DCHECK(marking_state_->IsBlack(object));
|
||||
Address free_end = object->address();
|
||||
if (free_end != free_start) {
|
||||
|
@ -89,7 +89,7 @@ int RelocInfo::target_address_size() {
|
||||
return Assembler::kSpecialTargetSize;
|
||||
}
|
||||
|
||||
HeapObject* RelocInfo::target_object() {
|
||||
HeapObject RelocInfo::target_object() {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
return HeapObject::cast(Memory<Object*>(pc_));
|
||||
}
|
||||
@ -99,20 +99,19 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
return Handle<HeapObject>::cast(Memory<Handle<Object>>(pc_));
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
Memory<Object*>(pc_) = target;
|
||||
Memory<Address>(pc_) = target->ptr();
|
||||
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
|
||||
Assembler::FlushICache(pc_, sizeof(Address));
|
||||
}
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
|
||||
WriteBarrierForCode(host(), this, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_external_reference() {
|
||||
DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
|
||||
return Memory<Address>(pc_);
|
||||
@ -277,7 +276,8 @@ Address Assembler::target_address_from_return_address(Address pc) {
|
||||
void Assembler::deserialization_set_special_target_at(
|
||||
Address instruction_payload, Code code, Address target) {
|
||||
set_target_address_at(instruction_payload,
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
!code.is_null() ? code->constant_pool() : kNullAddress,
|
||||
target);
|
||||
}
|
||||
|
||||
int Assembler::deserialization_special_target_size(
|
||||
|
@ -49,7 +49,7 @@ void IC::update_receiver_map(Handle<Object> receiver) {
|
||||
}
|
||||
|
||||
bool IC::IsHandler(MaybeObject object) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
return (object->IsSmi() && (object.ptr() != kNullAddress)) ||
|
||||
(object->GetHeapObjectIfWeak(&heap_object) &&
|
||||
(heap_object->IsMap() || heap_object->IsPropertyCell())) ||
|
||||
|
@ -2786,7 +2786,7 @@ RUNTIME_FUNCTION(Runtime_LoadAccessorProperty) {
|
||||
|
||||
// Call the accessor without additional arguments.
|
||||
FunctionCallbackArguments custom(isolate, call_handler_info->data(),
|
||||
*receiver, holder, nullptr, nullptr, 0);
|
||||
*receiver, holder, HeapObject(), nullptr, 0);
|
||||
Handle<Object> result_handle = custom.Call(*call_handler_info);
|
||||
RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
|
||||
if (result_handle.is_null()) return ReadOnlyRoots(isolate).undefined_value();
|
||||
|
@ -18,7 +18,7 @@ IsolateAllocationMode Isolate::isolate_allocation_mode() {
|
||||
return isolate_allocator_->mode();
|
||||
}
|
||||
|
||||
bool Isolate::FromWritableHeapObject(HeapObject* obj, Isolate** isolate) {
|
||||
bool Isolate::FromWritableHeapObject(HeapObject obj, Isolate** isolate) {
|
||||
i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj);
|
||||
if (chunk->owner()->identity() == i::RO_SPACE) {
|
||||
*isolate = nullptr;
|
||||
|
@ -695,7 +695,7 @@ bool GetStackTraceLimit(Isolate* isolate, int* result) {
|
||||
|
||||
bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
|
||||
|
||||
bool IsBuiltinFunction(Isolate* isolate, HeapObject* object,
|
||||
bool IsBuiltinFunction(Isolate* isolate, HeapObject object,
|
||||
Builtins::Name builtin_index) {
|
||||
if (!object->IsJSFunction()) return false;
|
||||
JSFunction const function = JSFunction::cast(object);
|
||||
@ -3540,7 +3540,8 @@ void Isolate::MaybeInitializeVectorListFromHeap() {
|
||||
|
||||
{
|
||||
HeapIterator heap_iterator(heap());
|
||||
while (HeapObject* current_obj = heap_iterator.next()) {
|
||||
for (HeapObject current_obj = heap_iterator.next(); !current_obj.is_null();
|
||||
current_obj = heap_iterator.next()) {
|
||||
if (!current_obj->IsFeedbackVector()) continue;
|
||||
|
||||
FeedbackVector vector = FeedbackVector::cast(current_obj);
|
||||
|
@ -456,7 +456,7 @@ class ThreadLocalTop {
|
||||
V(int, suffix_table, (kBMMaxShift + 1)) \
|
||||
ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
|
||||
|
||||
typedef std::vector<HeapObject*> DebugObjectCache;
|
||||
typedef std::vector<HeapObject> DebugObjectCache;
|
||||
|
||||
#define ISOLATE_INIT_LIST(V) \
|
||||
/* Assembler state. */ \
|
||||
@ -613,7 +613,7 @@ class Isolate final : private HiddenFactory {
|
||||
// Get the isolate that the given HeapObject lives in, returning true on
|
||||
// success. If the object is not writable (i.e. lives in read-only space),
|
||||
// return false.
|
||||
inline static bool FromWritableHeapObject(HeapObject* obj, Isolate** isolate);
|
||||
inline static bool FromWritableHeapObject(HeapObject obj, Isolate** isolate);
|
||||
|
||||
// Usually called by Init(), but can be called early e.g. to allow
|
||||
// testing components that require logging but not the whole
|
||||
|
16
src/log.cc
16
src/log.cc
@ -486,7 +486,7 @@ class LowLevelLogger : public CodeEventLogger {
|
||||
void CodeMoveEvent(AbstractCode from, AbstractCode to) override;
|
||||
void CodeDisableOptEvent(AbstractCode code,
|
||||
SharedFunctionInfo shared) override {}
|
||||
void SnapshotPositionEvent(HeapObject* obj, int pos);
|
||||
void SnapshotPositionEvent(HeapObject obj, int pos);
|
||||
void CodeMovingGCEvent() override;
|
||||
|
||||
private:
|
||||
@ -1668,7 +1668,7 @@ void Logger::ICEvent(const char* type, bool keyed, Map map, Object* key,
|
||||
}
|
||||
|
||||
void Logger::MapEvent(const char* type, Map from, Map to, const char* reason,
|
||||
HeapObject* name_or_sfi) {
|
||||
HeapObject name_or_sfi) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
if (!log_->IsEnabled() || !FLAG_trace_maps) return;
|
||||
if (!to.is_null()) MapDetails(to);
|
||||
@ -1686,7 +1686,7 @@ void Logger::MapEvent(const char* type, Map from, Map to, const char* reason,
|
||||
<< reinterpret_cast<void*>(pc) << kNext << line << kNext << column
|
||||
<< kNext << reason << kNext;
|
||||
|
||||
if (name_or_sfi) {
|
||||
if (!name_or_sfi.is_null()) {
|
||||
if (name_or_sfi->IsName()) {
|
||||
msg << Name::cast(name_or_sfi);
|
||||
} else if (name_or_sfi->IsSharedFunctionInfo()) {
|
||||
@ -1758,7 +1758,7 @@ static int EnumerateCompiledFunctions(Heap* heap,
|
||||
|
||||
// Iterate the heap to find shared function info objects and record
|
||||
// the unoptimized code for them.
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsSharedFunctionInfo()) {
|
||||
SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
|
||||
@ -1798,7 +1798,7 @@ static int EnumerateWasmModuleObjects(
|
||||
DisallowHeapAllocation no_gc;
|
||||
int module_objects_count = 0;
|
||||
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsWasmModuleObject()) {
|
||||
WasmModuleObject module = WasmModuleObject::cast(obj);
|
||||
@ -1830,7 +1830,7 @@ void Logger::LogAccessorCallbacks() {
|
||||
Heap* heap = isolate_->heap();
|
||||
HeapIterator iterator(heap);
|
||||
DisallowHeapAllocation no_gc;
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (!obj->IsAccessorInfo()) continue;
|
||||
AccessorInfo ai = AccessorInfo::cast(obj);
|
||||
@ -1857,7 +1857,7 @@ void Logger::LogAllMaps() {
|
||||
DisallowHeapAllocation no_gc;
|
||||
Heap* heap = isolate_->heap();
|
||||
HeapIterator iterator(heap);
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (!obj->IsMap()) continue;
|
||||
Map map = Map::cast(obj);
|
||||
@ -2090,7 +2090,7 @@ void ExistingCodeLogger::LogCodeObjects() {
|
||||
Heap* heap = isolate_->heap();
|
||||
HeapIterator iterator(heap);
|
||||
DisallowHeapAllocation no_gc;
|
||||
for (HeapObject* obj = iterator.next(); obj != nullptr;
|
||||
for (HeapObject obj = iterator.next(); !obj.is_null();
|
||||
obj = iterator.next()) {
|
||||
if (obj->IsCode()) LogCodeObject(obj);
|
||||
if (obj->IsBytecodeArray()) LogCodeObject(obj);
|
||||
|
@ -234,7 +234,7 @@ class Logger : public CodeEventListener {
|
||||
|
||||
void MapEvent(const char* type, Map from, Map to,
|
||||
const char* reason = nullptr,
|
||||
HeapObject* name_or_sfi = nullptr);
|
||||
HeapObject name_or_sfi = HeapObject());
|
||||
void MapCreate(Map map);
|
||||
void MapDetails(Map map);
|
||||
|
||||
|
@ -21,7 +21,7 @@ MaybeObjectHandle::MaybeObjectHandle()
|
||||
handle_(Handle<Object>::null()) {}
|
||||
|
||||
MaybeObjectHandle::MaybeObjectHandle(MaybeObject object, Isolate* isolate) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
DCHECK(!object->IsCleared());
|
||||
if (object->GetHeapObjectIfWeak(&heap_object)) {
|
||||
handle_ = handle(heap_object, isolate);
|
||||
|
@ -127,11 +127,11 @@ void Assembler::deserialization_set_special_target_at(
|
||||
// On R6 the address location is shifted by one instruction
|
||||
set_target_address_at(
|
||||
instruction_payload - (kInstructionsFor32BitConstant - 1) * kInstrSize,
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
!code.is_null() ? code->constant_pool() : kNullAddress, target);
|
||||
} else {
|
||||
set_target_address_at(
|
||||
instruction_payload - kInstructionsFor32BitConstant * kInstrSize,
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
!code.is_null() ? code->constant_pool() : kNullAddress, target);
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,8 +180,8 @@ void Assembler::deserialization_set_target_internal_reference_at(
|
||||
|
||||
HeapObject* RelocInfo::target_object() {
|
||||
DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObject(rmode_));
|
||||
return HeapObject::cast(reinterpret_cast<Object*>(
|
||||
Assembler::target_address_at(pc_, constant_pool_)));
|
||||
return HeapObject::cast(
|
||||
ObjectPtr(Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
@ -193,19 +193,17 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
return origin->relative_code_target_object_handle_at(pc_);
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObject(rmode_));
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(target),
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
|
||||
icache_flush_mode);
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
|
||||
WriteBarrierForCode(host(), this, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_external_reference() {
|
||||
DCHECK(IsExternalReference(rmode_));
|
||||
return Assembler::target_address_at(pc_, constant_pool_);
|
||||
|
@ -4055,11 +4055,10 @@ Address Assembler::target_address_at(Address pc) {
|
||||
// qNaN is a MIPS sNaN, and ia32 sNaN is MIPS qNaN. If running from a heap
|
||||
// snapshot generated on ia32, the resulting MIPS sNaN must be quieted.
|
||||
// OS::nan_value() returns a qNaN.
|
||||
void Assembler::QuietNaN(HeapObject* object) {
|
||||
void Assembler::QuietNaN(HeapObject object) {
|
||||
HeapNumber::cast(object)->set_value(std::numeric_limits<double>::quiet_NaN());
|
||||
}
|
||||
|
||||
|
||||
// On Mips, a target address is stored in a lui/ori instruction pair, each
|
||||
// of which load 16 bits of the 32-bit address to a register.
|
||||
// Patching the address must replace both instr, and flush the i-cache.
|
||||
|
@ -259,7 +259,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
// of that call in the instruction stream.
|
||||
inline static Address target_address_from_return_address(Address pc);
|
||||
|
||||
static void QuietNaN(HeapObject* nan);
|
||||
static void QuietNaN(HeapObject nan);
|
||||
|
||||
// This sets the branch destination (which gets loaded at the call address).
|
||||
// This is for calls and branches within generated code. The serializer
|
||||
|
@ -534,13 +534,12 @@ void MipsDebugger::Debug() {
|
||||
while (cur < end) {
|
||||
PrintF(" 0x%08" PRIxPTR ": 0x%08x %10d",
|
||||
reinterpret_cast<intptr_t>(cur), *cur, *cur);
|
||||
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
|
||||
int value = *cur;
|
||||
ObjectPtr obj(*cur);
|
||||
Heap* current_heap = sim_->isolate_->heap();
|
||||
if (((value & 1) == 0) || current_heap->Contains(obj)) {
|
||||
if (obj.IsSmi() || current_heap->Contains(HeapObject::cast(obj))) {
|
||||
PrintF(" (");
|
||||
if ((value & 1) == 0) {
|
||||
PrintF("smi %d", value / 2);
|
||||
if (obj.IsSmi()) {
|
||||
PrintF("smi %d", Smi::ToInt(obj));
|
||||
} else {
|
||||
obj->ShortPrint();
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ void Assembler::deserialization_set_special_target_at(
|
||||
Address instruction_payload, Code code, Address target) {
|
||||
set_target_address_at(
|
||||
instruction_payload - kInstructionsFor64BitConstant * kInstrSize,
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
!code.is_null() ? code->constant_pool() : kNullAddress, target);
|
||||
}
|
||||
|
||||
int Assembler::deserialization_special_target_size(
|
||||
@ -150,10 +150,10 @@ void Assembler::deserialization_set_target_internal_reference_at(
|
||||
}
|
||||
}
|
||||
|
||||
HeapObject* RelocInfo::target_object() {
|
||||
HeapObject RelocInfo::target_object() {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
return HeapObject::cast(reinterpret_cast<Object*>(
|
||||
Assembler::target_address_at(pc_, constant_pool_)));
|
||||
return HeapObject::cast(
|
||||
ObjectPtr(Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
@ -162,19 +162,17 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
|
||||
Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
|
||||
void RelocInfo::set_target_object(Heap* heap, HeapObject target,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(target),
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, target->ptr(),
|
||||
icache_flush_mode);
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
|
||||
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null()) {
|
||||
WriteBarrierForCode(host(), this, target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_external_reference() {
|
||||
DCHECK(rmode_ == EXTERNAL_REFERENCE);
|
||||
return Assembler::target_address_at(pc_, constant_pool_);
|
||||
|
@ -4308,11 +4308,10 @@ Address Assembler::target_address_at(Address pc) {
|
||||
// qNaN is a MIPS sNaN, and ia32 sNaN is MIPS qNaN. If running from a heap
|
||||
// snapshot generated on ia32, the resulting MIPS sNaN must be quieted.
|
||||
// OS::nan_value() returns a qNaN.
|
||||
void Assembler::QuietNaN(HeapObject* object) {
|
||||
void Assembler::QuietNaN(HeapObject object) {
|
||||
HeapNumber::cast(object)->set_value(std::numeric_limits<double>::quiet_NaN());
|
||||
}
|
||||
|
||||
|
||||
// On Mips64, a target address is stored in a 4-instruction sequence:
|
||||
// 0: lui(rd, (j.imm64_ >> 32) & kImm16Mask);
|
||||
// 1: ori(rd, rd, (j.imm64_ >> 16) & kImm16Mask);
|
||||
|
@ -260,7 +260,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
|
||||
static void JumpLabelToJumpRegister(Address pc);
|
||||
|
||||
static void QuietNaN(HeapObject* nan);
|
||||
static void QuietNaN(HeapObject nan);
|
||||
|
||||
// This sets the branch destination (which gets loaded at the call address).
|
||||
// This is for calls and branches within generated code. The serializer
|
||||
|
@ -465,13 +465,12 @@ void MipsDebugger::Debug() {
|
||||
while (cur < end) {
|
||||
PrintF(" 0x%012" PRIxPTR " : 0x%016" PRIx64 " %14" PRId64 " ",
|
||||
reinterpret_cast<intptr_t>(cur), *cur, *cur);
|
||||
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
|
||||
int64_t value = *cur;
|
||||
ObjectPtr obj(*cur);
|
||||
Heap* current_heap = sim_->isolate_->heap();
|
||||
if (((value & 1) == 0) || current_heap->Contains(obj)) {
|
||||
if (obj.IsSmi() || current_heap->Contains(HeapObject::cast(obj))) {
|
||||
PrintF(" (");
|
||||
if ((value & 1) == 0) {
|
||||
PrintF("smi %d", static_cast<int>(value >> 32));
|
||||
if (obj.IsSmi()) {
|
||||
PrintF("smi %d", Smi::ToInt(obj));
|
||||
} else {
|
||||
obj->ShortPrint();
|
||||
}
|
||||
|
@ -22,17 +22,17 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
template <int start_offset>
|
||||
int FlexibleBodyDescriptor<start_offset>::SizeOf(Map map, HeapObject* object) {
|
||||
int FlexibleBodyDescriptor<start_offset>::SizeOf(Map map, HeapObject object) {
|
||||
return object->SizeFromMap(map);
|
||||
}
|
||||
|
||||
template <int start_offset>
|
||||
int FlexibleWeakBodyDescriptor<start_offset>::SizeOf(Map map,
|
||||
HeapObject* object) {
|
||||
HeapObject object) {
|
||||
return object->SizeFromMap(map);
|
||||
}
|
||||
|
||||
bool BodyDescriptorBase::IsValidJSObjectSlotImpl(Map map, HeapObject* obj,
|
||||
bool BodyDescriptorBase::IsValidJSObjectSlotImpl(Map map, HeapObject obj,
|
||||
int offset) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kTaggedSize);
|
||||
@ -66,7 +66,7 @@ bool BodyDescriptorBase::IsValidJSObjectSlotImpl(Map map, HeapObject* obj,
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void BodyDescriptorBase::IterateJSObjectBodyImpl(Map map, HeapObject* obj,
|
||||
void BodyDescriptorBase::IterateJSObjectBodyImpl(Map map, HeapObject obj,
|
||||
int start_offset,
|
||||
int end_offset,
|
||||
ObjectVisitor* v) {
|
||||
@ -118,7 +118,7 @@ void BodyDescriptorBase::IterateJSObjectBodyImpl(Map map, HeapObject* obj,
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject* obj,
|
||||
DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject obj,
|
||||
int start_offset,
|
||||
int end_offset,
|
||||
ObjectVisitor* v) {
|
||||
@ -127,33 +127,33 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject* obj,
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void BodyDescriptorBase::IteratePointer(HeapObject* obj, int offset,
|
||||
void BodyDescriptorBase::IteratePointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v) {
|
||||
v->VisitPointer(obj, HeapObject::RawField(obj, offset));
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
DISABLE_CFI_PERF void BodyDescriptorBase::IterateMaybeWeakPointers(
|
||||
HeapObject* obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||
v->VisitPointers(obj, HeapObject::RawMaybeWeakField(obj, start_offset),
|
||||
HeapObject::RawMaybeWeakField(obj, end_offset));
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject* obj, int offset,
|
||||
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v) {
|
||||
v->VisitPointer(obj, HeapObject::RawMaybeWeakField(obj, offset));
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
DISABLE_CFI_PERF void BodyDescriptorBase::IterateCustomWeakPointers(
|
||||
HeapObject* obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||
v->VisitCustomWeakPointers(obj, HeapObject::RawField(obj, start_offset),
|
||||
HeapObject::RawField(obj, end_offset));
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject* obj, int offset,
|
||||
void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v) {
|
||||
v->VisitCustomWeakPointer(obj, HeapObject::RawField(obj, offset));
|
||||
}
|
||||
@ -162,18 +162,18 @@ class JSObject::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static const int kStartOffset = JSReceiver::kPropertiesOrHashOffset;
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset < kStartOffset) return false;
|
||||
return IsValidJSObjectSlotImpl(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateJSObjectBodyImpl(map, obj, kStartOffset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
@ -182,24 +182,24 @@ class JSObject::FastBodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static const int kStartOffset = JSReceiver::kPropertiesOrHashOffset;
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= kStartOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, kStartOffset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class JSFunction::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset < kSizeWithoutPrototype) return true;
|
||||
if (offset < kSizeWithPrototype && map->has_prototype_slot()) {
|
||||
return true;
|
||||
@ -208,7 +208,7 @@ class JSFunction::BodyDescriptor final : public BodyDescriptorBase {
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
int header_size = JSFunction::GetHeaderSize(map->has_prototype_slot());
|
||||
DCHECK_EQ(header_size, JSObject::GetHeaderSize(map));
|
||||
@ -216,59 +216,59 @@ class JSFunction::BodyDescriptor final : public BodyDescriptorBase {
|
||||
IterateJSObjectBodyImpl(map, obj, header_size, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class JSWeakCell::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return JSObject::BodyDescriptor::IsValidSlot(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, JSReceiver::kPropertiesOrHashOffset, kTargetOffset, v);
|
||||
IterateCustomWeakPointer(obj, kTargetOffset, v);
|
||||
IteratePointers(obj, kTargetOffset + kPointerSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class JSWeakRef::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return JSObject::BodyDescriptor::IsValidSlot(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, JSReceiver::kPropertiesOrHashOffset, kTargetOffset, v);
|
||||
IterateCustomWeakPointer(obj, kTargetOffset, v);
|
||||
IteratePointers(obj, kTargetOffset + kPointerSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class SharedFunctionInfo::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return FixedBodyDescriptor<kStartOfPointerFieldsOffset,
|
||||
kEndOfTaggedFieldsOffset,
|
||||
kAlignedSize>::IsValidSlot(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateCustomWeakPointer(obj, kFunctionDataOffset, v);
|
||||
IteratePointers(obj,
|
||||
@ -276,7 +276,7 @@ class SharedFunctionInfo::BodyDescriptor final : public BodyDescriptorBase {
|
||||
SharedFunctionInfo::kEndOfTaggedFieldsOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
@ -290,7 +290,7 @@ class AllocationSite::BodyDescriptor final : public BodyDescriptorBase {
|
||||
STATIC_ASSERT(AllocationSite::kPretenureCreateCountOffset + kInt32Size ==
|
||||
AllocationSite::kWeakNextOffset);
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset >= AllocationSite::kStartOffset &&
|
||||
offset < AllocationSite::kCommonPointerFieldEndOffset) {
|
||||
return true;
|
||||
@ -304,7 +304,7 @@ class AllocationSite::BodyDescriptor final : public BodyDescriptorBase {
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
// Iterate over all the common pointer fields
|
||||
IteratePointers(obj, AllocationSite::kStartOffset,
|
||||
@ -317,49 +317,49 @@ class AllocationSite::BodyDescriptor final : public BodyDescriptorBase {
|
||||
}
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class JSArrayBuffer::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset < kEndOfTaggedFieldsOffset) return true;
|
||||
if (offset < kHeaderSize) return false;
|
||||
return IsValidJSObjectSlotImpl(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
// JSArrayBuffer instances contain raw data that the GC does not know about.
|
||||
IteratePointers(obj, kPropertiesOrHashOffset, kEndOfTaggedFieldsOffset, v);
|
||||
IterateJSObjectBodyImpl(map, obj, kHeaderSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class JSArrayBufferView::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset < kEndOfTaggedFieldsOffset) return true;
|
||||
if (offset < kHeaderSize) return false;
|
||||
return IsValidJSObjectSlotImpl(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
// JSArrayBufferView contains raw data that the GC does not know about.
|
||||
IteratePointers(obj, kPropertiesOrHashOffset, kEndOfTaggedFieldsOffset, v);
|
||||
IterateJSObjectBodyImpl(map, obj, kHeaderSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
@ -368,7 +368,7 @@ template <typename Derived>
|
||||
class SmallOrderedHashTable<Derived>::BodyDescriptor final
|
||||
: public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
Derived table = Derived::cast(obj);
|
||||
// Only data table part contains tagged values.
|
||||
return (offset >= DataTableStartOffset()) &&
|
||||
@ -376,7 +376,7 @@ class SmallOrderedHashTable<Derived>::BodyDescriptor final
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
Derived table = Derived::cast(obj);
|
||||
int start_offset = DataTableStartOffset();
|
||||
@ -384,7 +384,7 @@ class SmallOrderedHashTable<Derived>::BodyDescriptor final
|
||||
IteratePointers(obj, start_offset, end_offset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
Derived table = Derived::cast(obj);
|
||||
return table->SizeFor(table->Capacity());
|
||||
}
|
||||
@ -392,35 +392,33 @@ class SmallOrderedHashTable<Derived>::BodyDescriptor final
|
||||
|
||||
class ByteArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return ByteArray::SizeFor(ByteArray::cast(obj)->synchronized_length());
|
||||
}
|
||||
};
|
||||
|
||||
class BytecodeArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= kConstantPoolOffset &&
|
||||
offset <= kSourcePositionTableOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointer(obj, kConstantPoolOffset, v);
|
||||
IteratePointer(obj, kHandlerTableOffset, v);
|
||||
IteratePointer(obj, kSourcePositionTableOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return BytecodeArray::SizeFor(
|
||||
BytecodeArray::cast(obj)->synchronized_length());
|
||||
}
|
||||
@ -428,30 +426,26 @@ class BytecodeArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
|
||||
class BigInt::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return BigInt::SizeFor(BigInt::cast(obj)->synchronized_length());
|
||||
}
|
||||
};
|
||||
|
||||
class FixedDoubleArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return FixedDoubleArray::SizeFor(
|
||||
FixedDoubleArray::cast(obj)->synchronized_length());
|
||||
}
|
||||
@ -459,32 +453,30 @@ class FixedDoubleArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
|
||||
class FixedTypedArrayBase::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset == kBasePointerOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointer(obj, kBasePointerOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return FixedTypedArrayBase::cast(object)->size();
|
||||
}
|
||||
};
|
||||
|
||||
class FeedbackMetadata::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return FeedbackMetadata::SizeFor(
|
||||
FeedbackMetadata::cast(obj)->synchronized_slot_count());
|
||||
}
|
||||
@ -492,57 +484,57 @@ class FeedbackMetadata::BodyDescriptor final : public BodyDescriptorBase {
|
||||
|
||||
class FeedbackVector::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset == kSharedFunctionInfoOffset ||
|
||||
offset == kOptimizedCodeOffset || offset >= kFeedbackSlotsOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointer(obj, kSharedFunctionInfoOffset, v);
|
||||
IterateMaybeWeakPointer(obj, kOptimizedCodeOffset, v);
|
||||
IterateMaybeWeakPointers(obj, kFeedbackSlotsOffset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return FeedbackVector::SizeFor(FeedbackVector::cast(obj)->length());
|
||||
}
|
||||
};
|
||||
|
||||
class PreParsedScopeData::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset == kScopeDataOffset || offset >= kChildDataStartOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointer(obj, kScopeDataOffset, v);
|
||||
IteratePointers(obj, kChildDataStartOffset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return PreParsedScopeData::SizeFor(PreParsedScopeData::cast(obj)->length());
|
||||
}
|
||||
};
|
||||
|
||||
class PrototypeInfo::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= HeapObject::kHeaderSize;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, HeapObject::kHeaderSize, kObjectCreateMapOffset, v);
|
||||
IterateMaybeWeakPointer(obj, kObjectCreateMapOffset, v);
|
||||
IteratePointers(obj, kObjectCreateMapOffset + kPointerSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
return obj->SizeFromMap(map);
|
||||
}
|
||||
};
|
||||
@ -551,29 +543,27 @@ class JSWeakCollection::BodyDescriptorImpl final : public BodyDescriptorBase {
|
||||
public:
|
||||
STATIC_ASSERT(kTableOffset + kPointerSize == kSize);
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return IsValidJSObjectSlotImpl(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateJSObjectBodyImpl(map, obj, kPropertiesOrHashOffset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class Foreign::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
v->VisitExternalReference(
|
||||
Foreign::cast(obj),
|
||||
@ -581,33 +571,29 @@ class Foreign::BodyDescriptor final : public BodyDescriptorBase {
|
||||
HeapObject::RawField(obj, kForeignAddressOffset).address()));
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) { return kSize; }
|
||||
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
||||
};
|
||||
|
||||
class ExternalOneByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) { return kSize; }
|
||||
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
||||
};
|
||||
|
||||
class ExternalTwoByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) { return kSize; }
|
||||
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
||||
};
|
||||
|
||||
class Code::BodyDescriptor final : public BodyDescriptorBase {
|
||||
@ -620,13 +606,13 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
|
||||
kCodeDataContainerOffset);
|
||||
STATIC_ASSERT(kCodeDataContainerOffset + kPointerSize == kDataStart);
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
// Slots in code can't be invalid because we never trim code objects.
|
||||
return true;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, ObjectVisitor* v) {
|
||||
static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) {
|
||||
int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
|
||||
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
|
||||
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
|
||||
@ -643,27 +629,25 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateBody(map, obj, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return Code::unchecked_cast(object)->CodeSize();
|
||||
}
|
||||
};
|
||||
|
||||
class SeqOneByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
SeqOneByteString string = SeqOneByteString::cast(obj);
|
||||
return string->SizeFor(string->synchronized_length());
|
||||
}
|
||||
@ -671,15 +655,13 @@ class SeqOneByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
|
||||
class SeqTwoByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
return false;
|
||||
}
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) { return false; }
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) {
|
||||
static inline int SizeOf(Map map, HeapObject obj) {
|
||||
SeqTwoByteString string = SeqTwoByteString::cast(obj);
|
||||
return string->SizeFor(string->synchronized_length());
|
||||
}
|
||||
@ -687,33 +669,33 @@ class SeqTwoByteString::BodyDescriptor final : public BodyDescriptorBase {
|
||||
|
||||
class WasmInstanceObject::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
if (offset < kMemoryStartOffset) return true;
|
||||
if (offset < kModuleObjectOffset) return false;
|
||||
return IsValidJSObjectSlotImpl(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, kPropertiesOrHashOffset, kEndOfTaggedFieldsOffset, v);
|
||||
IterateJSObjectBodyImpl(map, obj, kSize, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return map->instance_size();
|
||||
}
|
||||
};
|
||||
|
||||
class Map::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= Map::kPointerFieldsBeginOffset &&
|
||||
offset < Map::kPointerFieldsEndOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, Map::kPointerFieldsBeginOffset,
|
||||
Map::kTransitionsOrPrototypeInfoOffset, v);
|
||||
@ -722,17 +704,17 @@ class Map::BodyDescriptor final : public BodyDescriptorBase {
|
||||
Map::kPointerFieldsEndOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* obj) { return Map::kSize; }
|
||||
static inline int SizeOf(Map map, HeapObject obj) { return Map::kSize; }
|
||||
};
|
||||
|
||||
class DataHandler::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= HeapObject::kHeaderSize;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
static_assert(kSmiHandlerOffset < kData1Offset,
|
||||
"Field order must be in sync with this iteration code");
|
||||
@ -742,19 +724,19 @@ class DataHandler::BodyDescriptor final : public BodyDescriptorBase {
|
||||
IterateMaybeWeakPointers(obj, kData1Offset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return object->SizeFromMap(map);
|
||||
}
|
||||
};
|
||||
|
||||
class NativeContext::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset < NativeContext::kEndOfTaggedFieldsOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, NativeContext::kStartOfStrongFieldsOffset,
|
||||
NativeContext::kEndOfStrongFieldsOffset, v);
|
||||
@ -762,20 +744,20 @@ class NativeContext::BodyDescriptor final : public BodyDescriptorBase {
|
||||
NativeContext::kEndOfWeakFieldsOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return NativeContext::kSize;
|
||||
}
|
||||
};
|
||||
|
||||
class CodeDataContainer::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= CodeDataContainer::kHeaderSize &&
|
||||
offset < CodeDataContainer::kSize;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, CodeDataContainer::kHeaderSize,
|
||||
CodeDataContainer::kPointerFieldsStrongEndOffset, v);
|
||||
@ -784,14 +766,14 @@ class CodeDataContainer::BodyDescriptor final : public BodyDescriptorBase {
|
||||
CodeDataContainer::kPointerFieldsWeakEndOffset, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return CodeDataContainer::kSize;
|
||||
}
|
||||
};
|
||||
|
||||
class EmbedderDataArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kSystemPointerSize);
|
||||
STATIC_ASSERT(base::bits::IsPowerOfTwo(kEmbedderDataSlotSize));
|
||||
@ -808,7 +790,7 @@ class EmbedderDataArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kSystemPointerSize);
|
||||
@ -826,7 +808,7 @@ class EmbedderDataArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
return object->SizeFromMap(map);
|
||||
}
|
||||
};
|
||||
@ -1064,7 +1046,7 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3, T4 p4) {
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void HeapObject::IterateFast(ObjectVisitor* v) {
|
||||
BodyDescriptorBase::IteratePointer(this, kMapOffset, v);
|
||||
BodyDescriptorBase::IteratePointer(*this, kMapOffset, v);
|
||||
IterateBodyFast(v);
|
||||
}
|
||||
|
||||
@ -1078,7 +1060,7 @@ void HeapObject::IterateBodyFast(ObjectVisitor* v) {
|
||||
|
||||
struct CallIterateBody {
|
||||
template <typename BodyDescriptor, typename ObjectVisitor>
|
||||
static void apply(Map map, HeapObject* obj, int object_size,
|
||||
static void apply(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
BodyDescriptor::IterateBody(map, obj, object_size, v);
|
||||
}
|
||||
@ -1086,7 +1068,7 @@ struct CallIterateBody {
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void HeapObject::IterateBodyFast(Map map, int object_size, ObjectVisitor* v) {
|
||||
BodyDescriptorApply<CallIterateBody, void>(map->instance_type(), map, this,
|
||||
BodyDescriptorApply<CallIterateBody, void>(map->instance_type(), map, *this,
|
||||
object_size, v);
|
||||
}
|
||||
} // namespace internal
|
||||
|
@ -19,53 +19,53 @@ namespace internal {
|
||||
// It is used for invalid slots filtering. If the offset points outside
|
||||
// of the object or to the map word, the result is UNDEFINED (!!!).
|
||||
//
|
||||
// static bool IsValidSlot(Map map, HeapObject* obj, int offset);
|
||||
// static bool IsValidSlot(Map map, HeapObject obj, int offset);
|
||||
//
|
||||
//
|
||||
// 2) Iterate object's body using stateful object visitor.
|
||||
//
|
||||
// template <typename ObjectVisitor>
|
||||
// static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
// static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
// ObjectVisitor* v);
|
||||
class BodyDescriptorBase {
|
||||
public:
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IteratePointers(HeapObject* obj, int start_offset,
|
||||
static inline void IteratePointers(HeapObject obj, int start_offset,
|
||||
int end_offset, ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IteratePointer(HeapObject* obj, int offset,
|
||||
static inline void IteratePointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateCustomWeakPointers(HeapObject* obj,
|
||||
int start_offset, int end_offset,
|
||||
static inline void IterateCustomWeakPointers(HeapObject obj, int start_offset,
|
||||
int end_offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateCustomWeakPointer(HeapObject* obj, int offset,
|
||||
static inline void IterateCustomWeakPointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateMaybeWeakPointers(HeapObject* obj, int start_offset,
|
||||
static inline void IterateMaybeWeakPointers(HeapObject obj, int start_offset,
|
||||
int end_offset, ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateMaybeWeakPointer(HeapObject* obj, int offset,
|
||||
static inline void IterateMaybeWeakPointer(HeapObject obj, int offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
protected:
|
||||
// Returns true for all header and embedder fields.
|
||||
static inline bool IsValidJSObjectSlotImpl(Map map, HeapObject* obj,
|
||||
static inline bool IsValidJSObjectSlotImpl(Map map, HeapObject obj,
|
||||
int offset);
|
||||
|
||||
// Returns true for all header and embedder fields.
|
||||
static inline bool IsValidEmbedderJSObjectSlotImpl(Map map, HeapObject* obj,
|
||||
static inline bool IsValidEmbedderJSObjectSlotImpl(Map map, HeapObject obj,
|
||||
int offset);
|
||||
|
||||
// Treats all header and embedder fields in the range as tagged.
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateJSObjectBodyImpl(Map map, HeapObject* obj,
|
||||
static inline void IterateJSObjectBodyImpl(Map map, HeapObject obj,
|
||||
int start_offset, int end_offset,
|
||||
ObjectVisitor* v);
|
||||
};
|
||||
@ -81,22 +81,22 @@ class FixedBodyDescriptor final : public BodyDescriptorBase {
|
||||
static const int kEndOffset = end_offset;
|
||||
static const int kSize = size;
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return offset >= kStartOffset && offset < kEndOffset;
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, ObjectVisitor* v) {
|
||||
static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) {
|
||||
IteratePointers(obj, start_offset, end_offset, v);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateBody(map, obj, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) { return kSize; }
|
||||
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
||||
};
|
||||
|
||||
|
||||
@ -108,17 +108,17 @@ class FlexibleBodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static const int kStartOffset = start_offset;
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return (offset >= kStartOffset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, start_offset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object);
|
||||
static inline int SizeOf(Map map, HeapObject object);
|
||||
};
|
||||
|
||||
|
||||
@ -129,17 +129,17 @@ class FlexibleWeakBodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static const int kStartOffset = start_offset;
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return (offset >= kStartOffset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IterateMaybeWeakPointers(obj, start_offset, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object);
|
||||
static inline int SizeOf(Map map, HeapObject object);
|
||||
};
|
||||
|
||||
// This class describes a body of an object which has a parent class that also
|
||||
@ -155,25 +155,25 @@ class SubclassBodyDescriptor final : public BodyDescriptorBase {
|
||||
STATIC_ASSERT(ParentBodyDescriptor::kSize <=
|
||||
ChildBodyDescriptor::kStartOffset);
|
||||
|
||||
static bool IsValidSlot(Map map, HeapObject* obj, int offset) {
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
return ParentBodyDescriptor::IsValidSlot(map, obj, offset) ||
|
||||
ChildBodyDescriptor::IsValidSlot(map, obj, offset);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, ObjectVisitor* v) {
|
||||
static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) {
|
||||
ParentBodyDescriptor::IterateBody(map, obj, v);
|
||||
ChildBodyDescriptor::IterateBody(map, obj, v);
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
ParentBodyDescriptor::IterateBody(map, obj, object_size, v);
|
||||
ChildBodyDescriptor::IterateBody(map, obj, object_size, v);
|
||||
}
|
||||
|
||||
static inline int SizeOf(Map map, HeapObject* object) {
|
||||
static inline int SizeOf(Map map, HeapObject object) {
|
||||
// The child should know its full size.
|
||||
return ChildBodyDescriptor::SizeOf(map, object);
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ void ObjectPtr::VerifyPointer(Isolate* isolate, Object* p) {
|
||||
}
|
||||
|
||||
void MaybeObject::VerifyMaybeObjectPointer(Isolate* isolate, MaybeObject p) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (p->GetHeapObject(&heap_object)) {
|
||||
HeapObject::VerifyHeapPointer(isolate, heap_object);
|
||||
} else {
|
||||
@ -121,7 +121,7 @@ void MaybeObject::VerifyMaybeObjectPointer(Isolate* isolate, MaybeObject p) {
|
||||
}
|
||||
|
||||
namespace {
|
||||
void VerifyForeignPointer(Isolate* isolate, HeapObject* host, Object* foreign) {
|
||||
void VerifyForeignPointer(Isolate* isolate, HeapObject host, Object* foreign) {
|
||||
host->VerifyPointer(isolate, foreign);
|
||||
CHECK(foreign->IsUndefined(isolate) || Foreign::IsNormalized(foreign));
|
||||
}
|
||||
@ -141,13 +141,13 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
#define STRING_TYPE_CASE(TYPE, size, name, CamelName) case TYPE:
|
||||
STRING_TYPE_LIST(STRING_TYPE_CASE)
|
||||
#undef STRING_TYPE_CASE
|
||||
String::cast(this)->StringVerify(isolate);
|
||||
String::cast(*this)->StringVerify(isolate);
|
||||
break;
|
||||
case SYMBOL_TYPE:
|
||||
Symbol::cast(this)->SymbolVerify(isolate);
|
||||
Symbol::cast(*this)->SymbolVerify(isolate);
|
||||
break;
|
||||
case MAP_TYPE:
|
||||
Map::cast(this)->MapVerify(isolate);
|
||||
Map::cast(*this)->MapVerify(isolate);
|
||||
break;
|
||||
case HEAP_NUMBER_TYPE:
|
||||
CHECK(IsHeapNumber());
|
||||
@ -156,17 +156,17 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
CHECK(IsMutableHeapNumber());
|
||||
break;
|
||||
case BIGINT_TYPE:
|
||||
BigInt::cast(this)->BigIntVerify(isolate);
|
||||
BigInt::cast(*this)->BigIntVerify(isolate);
|
||||
break;
|
||||
case CALL_HANDLER_INFO_TYPE:
|
||||
CallHandlerInfo::cast(this)->CallHandlerInfoVerify(isolate);
|
||||
CallHandlerInfo::cast(*this)->CallHandlerInfoVerify(isolate);
|
||||
break;
|
||||
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
|
||||
ObjectBoilerplateDescription::cast(this)
|
||||
ObjectBoilerplateDescription::cast(*this)
|
||||
->ObjectBoilerplateDescriptionVerify(isolate);
|
||||
break;
|
||||
case EMBEDDER_DATA_ARRAY_TYPE:
|
||||
EmbedderDataArray::cast(this)->EmbedderDataArrayVerify(isolate);
|
||||
EmbedderDataArray::cast(*this)->EmbedderDataArrayVerify(isolate);
|
||||
break;
|
||||
// FixedArray types
|
||||
case HASH_TABLE_TYPE:
|
||||
@ -182,7 +182,7 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
case FIXED_ARRAY_TYPE:
|
||||
case SCOPE_INFO_TYPE:
|
||||
case SCRIPT_CONTEXT_TABLE_TYPE:
|
||||
FixedArray::cast(this)->FixedArrayVerify(isolate);
|
||||
FixedArray::cast(*this)->FixedArrayVerify(isolate);
|
||||
break;
|
||||
case AWAIT_CONTEXT_TYPE:
|
||||
case BLOCK_CONTEXT_TYPE:
|
||||
@ -193,61 +193,61 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
case MODULE_CONTEXT_TYPE:
|
||||
case SCRIPT_CONTEXT_TYPE:
|
||||
case WITH_CONTEXT_TYPE:
|
||||
Context::cast(this)->ContextVerify(isolate);
|
||||
Context::cast(*this)->ContextVerify(isolate);
|
||||
break;
|
||||
case NATIVE_CONTEXT_TYPE:
|
||||
NativeContext::cast(this)->NativeContextVerify(isolate);
|
||||
NativeContext::cast(*this)->NativeContextVerify(isolate);
|
||||
break;
|
||||
case WEAK_FIXED_ARRAY_TYPE:
|
||||
WeakFixedArray::cast(this)->WeakFixedArrayVerify(isolate);
|
||||
WeakFixedArray::cast(*this)->WeakFixedArrayVerify(isolate);
|
||||
break;
|
||||
case WEAK_ARRAY_LIST_TYPE:
|
||||
WeakArrayList::cast(this)->WeakArrayListVerify(isolate);
|
||||
WeakArrayList::cast(*this)->WeakArrayListVerify(isolate);
|
||||
break;
|
||||
case FIXED_DOUBLE_ARRAY_TYPE:
|
||||
FixedDoubleArray::cast(this)->FixedDoubleArrayVerify(isolate);
|
||||
FixedDoubleArray::cast(*this)->FixedDoubleArrayVerify(isolate);
|
||||
break;
|
||||
case FEEDBACK_METADATA_TYPE:
|
||||
FeedbackMetadata::cast(this)->FeedbackMetadataVerify(isolate);
|
||||
FeedbackMetadata::cast(*this)->FeedbackMetadataVerify(isolate);
|
||||
break;
|
||||
case BYTE_ARRAY_TYPE:
|
||||
ByteArray::cast(this)->ByteArrayVerify(isolate);
|
||||
ByteArray::cast(*this)->ByteArrayVerify(isolate);
|
||||
break;
|
||||
case BYTECODE_ARRAY_TYPE:
|
||||
BytecodeArray::cast(this)->BytecodeArrayVerify(isolate);
|
||||
BytecodeArray::cast(*this)->BytecodeArrayVerify(isolate);
|
||||
break;
|
||||
case DESCRIPTOR_ARRAY_TYPE:
|
||||
DescriptorArray::cast(this)->DescriptorArrayVerify(isolate);
|
||||
DescriptorArray::cast(*this)->DescriptorArrayVerify(isolate);
|
||||
break;
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
TransitionArray::cast(this)->TransitionArrayVerify(isolate);
|
||||
TransitionArray::cast(*this)->TransitionArrayVerify(isolate);
|
||||
break;
|
||||
case PROPERTY_ARRAY_TYPE:
|
||||
PropertyArray::cast(this)->PropertyArrayVerify(isolate);
|
||||
PropertyArray::cast(*this)->PropertyArrayVerify(isolate);
|
||||
break;
|
||||
case FREE_SPACE_TYPE:
|
||||
FreeSpace::cast(this)->FreeSpaceVerify(isolate);
|
||||
FreeSpace::cast(*this)->FreeSpaceVerify(isolate);
|
||||
break;
|
||||
case FEEDBACK_CELL_TYPE:
|
||||
FeedbackCell::cast(this)->FeedbackCellVerify(isolate);
|
||||
FeedbackCell::cast(*this)->FeedbackCellVerify(isolate);
|
||||
break;
|
||||
case FEEDBACK_VECTOR_TYPE:
|
||||
FeedbackVector::cast(this)->FeedbackVectorVerify(isolate);
|
||||
FeedbackVector::cast(*this)->FeedbackVectorVerify(isolate);
|
||||
break;
|
||||
|
||||
#define VERIFY_TYPED_ARRAY(Type, type, TYPE, ctype) \
|
||||
case FIXED_##TYPE##_ARRAY_TYPE: \
|
||||
Fixed##Type##Array::cast(this)->FixedTypedArrayVerify(isolate); \
|
||||
#define VERIFY_TYPED_ARRAY(Type, type, TYPE, ctype) \
|
||||
case FIXED_##TYPE##_ARRAY_TYPE: \
|
||||
Fixed##Type##Array::cast(*this)->FixedTypedArrayVerify(isolate); \
|
||||
break;
|
||||
|
||||
TYPED_ARRAYS(VERIFY_TYPED_ARRAY)
|
||||
#undef VERIFY_TYPED_ARRAY
|
||||
|
||||
case CODE_TYPE:
|
||||
Code::cast(this)->CodeVerify(isolate);
|
||||
Code::cast(*this)->CodeVerify(isolate);
|
||||
break;
|
||||
case ODDBALL_TYPE:
|
||||
Oddball::cast(this)->OddballVerify(isolate);
|
||||
Oddball::cast(*this)->OddballVerify(isolate);
|
||||
break;
|
||||
case JS_OBJECT_TYPE:
|
||||
case JS_ERROR_TYPE:
|
||||
@ -258,224 +258,218 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
case WASM_GLOBAL_TYPE:
|
||||
case WASM_MEMORY_TYPE:
|
||||
case WASM_TABLE_TYPE:
|
||||
JSObject::cast(this)->JSObjectVerify(isolate);
|
||||
JSObject::cast(*this)->JSObjectVerify(isolate);
|
||||
break;
|
||||
case WASM_MODULE_TYPE:
|
||||
WasmModuleObject::cast(this)->WasmModuleObjectVerify(isolate);
|
||||
WasmModuleObject::cast(*this)->WasmModuleObjectVerify(isolate);
|
||||
break;
|
||||
case WASM_INSTANCE_TYPE:
|
||||
WasmInstanceObject::cast(this)->WasmInstanceObjectVerify(isolate);
|
||||
WasmInstanceObject::cast(*this)->WasmInstanceObjectVerify(isolate);
|
||||
break;
|
||||
case JS_ARGUMENTS_TYPE:
|
||||
JSArgumentsObject::cast(this)->JSArgumentsObjectVerify(isolate);
|
||||
JSArgumentsObject::cast(*this)->JSArgumentsObjectVerify(isolate);
|
||||
break;
|
||||
case JS_GENERATOR_OBJECT_TYPE:
|
||||
JSGeneratorObject::cast(this)->JSGeneratorObjectVerify(isolate);
|
||||
JSGeneratorObject::cast(*this)->JSGeneratorObjectVerify(isolate);
|
||||
break;
|
||||
case JS_ASYNC_FUNCTION_OBJECT_TYPE:
|
||||
JSAsyncFunctionObject::cast(this)->JSAsyncFunctionObjectVerify(isolate);
|
||||
JSAsyncFunctionObject::cast(*this)->JSAsyncFunctionObjectVerify(isolate);
|
||||
break;
|
||||
case JS_ASYNC_GENERATOR_OBJECT_TYPE:
|
||||
JSAsyncGeneratorObject::cast(this)->JSAsyncGeneratorObjectVerify(isolate);
|
||||
JSAsyncGeneratorObject::cast(*this)->JSAsyncGeneratorObjectVerify(
|
||||
isolate);
|
||||
break;
|
||||
case JS_VALUE_TYPE:
|
||||
JSValue::cast(this)->JSValueVerify(isolate);
|
||||
JSValue::cast(*this)->JSValueVerify(isolate);
|
||||
break;
|
||||
case JS_DATE_TYPE:
|
||||
JSDate::cast(this)->JSDateVerify(isolate);
|
||||
JSDate::cast(*this)->JSDateVerify(isolate);
|
||||
break;
|
||||
case JS_BOUND_FUNCTION_TYPE:
|
||||
JSBoundFunction::cast(this)->JSBoundFunctionVerify(isolate);
|
||||
JSBoundFunction::cast(*this)->JSBoundFunctionVerify(isolate);
|
||||
break;
|
||||
case JS_FUNCTION_TYPE:
|
||||
JSFunction::cast(this)->JSFunctionVerify(isolate);
|
||||
JSFunction::cast(*this)->JSFunctionVerify(isolate);
|
||||
break;
|
||||
case JS_GLOBAL_PROXY_TYPE:
|
||||
JSGlobalProxy::cast(this)->JSGlobalProxyVerify(isolate);
|
||||
JSGlobalProxy::cast(*this)->JSGlobalProxyVerify(isolate);
|
||||
break;
|
||||
case JS_GLOBAL_OBJECT_TYPE:
|
||||
JSGlobalObject::cast(this)->JSGlobalObjectVerify(isolate);
|
||||
JSGlobalObject::cast(*this)->JSGlobalObjectVerify(isolate);
|
||||
break;
|
||||
case CELL_TYPE:
|
||||
Cell::cast(this)->CellVerify(isolate);
|
||||
Cell::cast(*this)->CellVerify(isolate);
|
||||
break;
|
||||
case PROPERTY_CELL_TYPE:
|
||||
PropertyCell::cast(this)->PropertyCellVerify(isolate);
|
||||
PropertyCell::cast(*this)->PropertyCellVerify(isolate);
|
||||
break;
|
||||
case JS_ARRAY_TYPE:
|
||||
JSArray::cast(this)->JSArrayVerify(isolate);
|
||||
JSArray::cast(*this)->JSArrayVerify(isolate);
|
||||
break;
|
||||
case JS_MODULE_NAMESPACE_TYPE:
|
||||
JSModuleNamespace::cast(this)->JSModuleNamespaceVerify(isolate);
|
||||
JSModuleNamespace::cast(*this)->JSModuleNamespaceVerify(isolate);
|
||||
break;
|
||||
case JS_SET_TYPE:
|
||||
JSSet::cast(this)->JSSetVerify(isolate);
|
||||
JSSet::cast(*this)->JSSetVerify(isolate);
|
||||
break;
|
||||
case JS_MAP_TYPE:
|
||||
JSMap::cast(this)->JSMapVerify(isolate);
|
||||
JSMap::cast(*this)->JSMapVerify(isolate);
|
||||
break;
|
||||
case JS_SET_KEY_VALUE_ITERATOR_TYPE:
|
||||
case JS_SET_VALUE_ITERATOR_TYPE:
|
||||
JSSetIterator::cast(this)->JSSetIteratorVerify(isolate);
|
||||
JSSetIterator::cast(*this)->JSSetIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_MAP_KEY_ITERATOR_TYPE:
|
||||
case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
|
||||
case JS_MAP_VALUE_ITERATOR_TYPE:
|
||||
JSMapIterator::cast(this)->JSMapIteratorVerify(isolate);
|
||||
JSMapIterator::cast(*this)->JSMapIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_ARRAY_ITERATOR_TYPE:
|
||||
JSArrayIterator::cast(this)->JSArrayIteratorVerify(isolate);
|
||||
JSArrayIterator::cast(*this)->JSArrayIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_STRING_ITERATOR_TYPE:
|
||||
JSStringIterator::cast(this)->JSStringIteratorVerify(isolate);
|
||||
JSStringIterator::cast(*this)->JSStringIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
|
||||
JSAsyncFromSyncIterator::cast(this)->JSAsyncFromSyncIteratorVerify(
|
||||
JSAsyncFromSyncIterator::cast(*this)->JSAsyncFromSyncIteratorVerify(
|
||||
isolate);
|
||||
break;
|
||||
case JS_WEAK_CELL_TYPE:
|
||||
JSWeakCell::cast(this)->JSWeakCellVerify(isolate);
|
||||
JSWeakCell::cast(*this)->JSWeakCellVerify(isolate);
|
||||
break;
|
||||
case JS_WEAK_REF_TYPE:
|
||||
JSWeakRef::cast(this)->JSWeakRefVerify(isolate);
|
||||
JSWeakRef::cast(*this)->JSWeakRefVerify(isolate);
|
||||
break;
|
||||
case JS_WEAK_FACTORY_TYPE:
|
||||
JSWeakFactory::cast(this)->JSWeakFactoryVerify(isolate);
|
||||
JSWeakFactory::cast(*this)->JSWeakFactoryVerify(isolate);
|
||||
break;
|
||||
case JS_WEAK_FACTORY_CLEANUP_ITERATOR_TYPE:
|
||||
JSWeakFactoryCleanupIterator::cast(this)
|
||||
JSWeakFactoryCleanupIterator::cast(*this)
|
||||
->JSWeakFactoryCleanupIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_WEAK_MAP_TYPE:
|
||||
JSWeakMap::cast(this)->JSWeakMapVerify(isolate);
|
||||
JSWeakMap::cast(*this)->JSWeakMapVerify(isolate);
|
||||
break;
|
||||
case JS_WEAK_SET_TYPE:
|
||||
JSWeakSet::cast(this)->JSWeakSetVerify(isolate);
|
||||
JSWeakSet::cast(*this)->JSWeakSetVerify(isolate);
|
||||
break;
|
||||
case JS_PROMISE_TYPE:
|
||||
JSPromise::cast(this)->JSPromiseVerify(isolate);
|
||||
JSPromise::cast(*this)->JSPromiseVerify(isolate);
|
||||
break;
|
||||
case JS_REGEXP_TYPE:
|
||||
JSRegExp::cast(this)->JSRegExpVerify(isolate);
|
||||
JSRegExp::cast(*this)->JSRegExpVerify(isolate);
|
||||
break;
|
||||
case JS_REGEXP_STRING_ITERATOR_TYPE:
|
||||
JSRegExpStringIterator::cast(this)->JSRegExpStringIteratorVerify(isolate);
|
||||
JSRegExpStringIterator::cast(*this)->JSRegExpStringIteratorVerify(
|
||||
isolate);
|
||||
break;
|
||||
case FILLER_TYPE:
|
||||
break;
|
||||
case JS_PROXY_TYPE:
|
||||
JSProxy::cast(this)->JSProxyVerify(isolate);
|
||||
JSProxy::cast(*this)->JSProxyVerify(isolate);
|
||||
break;
|
||||
case FOREIGN_TYPE:
|
||||
Foreign::cast(this)->ForeignVerify(isolate);
|
||||
Foreign::cast(*this)->ForeignVerify(isolate);
|
||||
break;
|
||||
case PRE_PARSED_SCOPE_DATA_TYPE:
|
||||
PreParsedScopeData::cast(this)->PreParsedScopeDataVerify(isolate);
|
||||
PreParsedScopeData::cast(*this)->PreParsedScopeDataVerify(isolate);
|
||||
break;
|
||||
case UNCOMPILED_DATA_WITHOUT_PRE_PARSED_SCOPE_TYPE:
|
||||
UncompiledDataWithoutPreParsedScope::cast(this)
|
||||
UncompiledDataWithoutPreParsedScope::cast(*this)
|
||||
->UncompiledDataWithoutPreParsedScopeVerify(isolate);
|
||||
break;
|
||||
case UNCOMPILED_DATA_WITH_PRE_PARSED_SCOPE_TYPE:
|
||||
UncompiledDataWithPreParsedScope::cast(this)
|
||||
UncompiledDataWithPreParsedScope::cast(*this)
|
||||
->UncompiledDataWithPreParsedScopeVerify(isolate);
|
||||
break;
|
||||
case SHARED_FUNCTION_INFO_TYPE:
|
||||
SharedFunctionInfo::cast(this)->SharedFunctionInfoVerify(isolate);
|
||||
SharedFunctionInfo::cast(*this)->SharedFunctionInfoVerify(isolate);
|
||||
break;
|
||||
case JS_MESSAGE_OBJECT_TYPE:
|
||||
JSMessageObject::cast(this)->JSMessageObjectVerify(isolate);
|
||||
JSMessageObject::cast(*this)->JSMessageObjectVerify(isolate);
|
||||
break;
|
||||
case JS_ARRAY_BUFFER_TYPE:
|
||||
JSArrayBuffer::cast(this)->JSArrayBufferVerify(isolate);
|
||||
JSArrayBuffer::cast(*this)->JSArrayBufferVerify(isolate);
|
||||
break;
|
||||
case JS_TYPED_ARRAY_TYPE:
|
||||
JSTypedArray::cast(this)->JSTypedArrayVerify(isolate);
|
||||
JSTypedArray::cast(*this)->JSTypedArrayVerify(isolate);
|
||||
break;
|
||||
case JS_DATA_VIEW_TYPE:
|
||||
JSDataView::cast(this)->JSDataViewVerify(isolate);
|
||||
JSDataView::cast(*this)->JSDataViewVerify(isolate);
|
||||
break;
|
||||
case SMALL_ORDERED_HASH_SET_TYPE:
|
||||
SmallOrderedHashSet::cast(this)->SmallOrderedHashTableVerify(isolate);
|
||||
SmallOrderedHashSet::cast(*this)->SmallOrderedHashTableVerify(isolate);
|
||||
break;
|
||||
case SMALL_ORDERED_HASH_MAP_TYPE:
|
||||
SmallOrderedHashMap::cast(this)->SmallOrderedHashTableVerify(isolate);
|
||||
SmallOrderedHashMap::cast(*this)->SmallOrderedHashTableVerify(isolate);
|
||||
break;
|
||||
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
|
||||
SmallOrderedNameDictionary::cast(this)->SmallOrderedHashTableVerify(
|
||||
SmallOrderedNameDictionary::cast(*this)->SmallOrderedHashTableVerify(
|
||||
isolate);
|
||||
break;
|
||||
case CODE_DATA_CONTAINER_TYPE:
|
||||
CodeDataContainer::cast(this)->CodeDataContainerVerify(isolate);
|
||||
CodeDataContainer::cast(*this)->CodeDataContainerVerify(isolate);
|
||||
break;
|
||||
#ifdef V8_INTL_SUPPORT
|
||||
case JS_INTL_V8_BREAK_ITERATOR_TYPE:
|
||||
JSV8BreakIterator::cast(this)->JSV8BreakIteratorVerify(isolate);
|
||||
JSV8BreakIterator::cast(*this)->JSV8BreakIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_COLLATOR_TYPE:
|
||||
JSCollator::cast(this)->JSCollatorVerify(isolate);
|
||||
JSCollator::cast(*this)->JSCollatorVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_DATE_TIME_FORMAT_TYPE:
|
||||
JSDateTimeFormat::cast(this)->JSDateTimeFormatVerify(isolate);
|
||||
JSDateTimeFormat::cast(*this)->JSDateTimeFormatVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_LIST_FORMAT_TYPE:
|
||||
JSListFormat::cast(this)->JSListFormatVerify(isolate);
|
||||
JSListFormat::cast(*this)->JSListFormatVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_LOCALE_TYPE:
|
||||
JSLocale::cast(this)->JSLocaleVerify(isolate);
|
||||
JSLocale::cast(*this)->JSLocaleVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_NUMBER_FORMAT_TYPE:
|
||||
JSNumberFormat::cast(this)->JSNumberFormatVerify(isolate);
|
||||
JSNumberFormat::cast(*this)->JSNumberFormatVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_PLURAL_RULES_TYPE:
|
||||
JSPluralRules::cast(this)->JSPluralRulesVerify(isolate);
|
||||
JSPluralRules::cast(*this)->JSPluralRulesVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
|
||||
JSRelativeTimeFormat::cast(this)->JSRelativeTimeFormatVerify(isolate);
|
||||
JSRelativeTimeFormat::cast(*this)->JSRelativeTimeFormatVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_SEGMENT_ITERATOR_TYPE:
|
||||
JSSegmentIterator::cast(this)->JSSegmentIteratorVerify(isolate);
|
||||
JSSegmentIterator::cast(*this)->JSSegmentIteratorVerify(isolate);
|
||||
break;
|
||||
case JS_INTL_SEGMENTER_TYPE:
|
||||
JSSegmenter::cast(this)->JSSegmenterVerify(isolate);
|
||||
JSSegmenter::cast(*this)->JSSegmenterVerify(isolate);
|
||||
break;
|
||||
#endif // V8_INTL_SUPPORT
|
||||
|
||||
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
|
||||
case TYPE: \
|
||||
Name::cast(this)->Name##Verify(isolate); \
|
||||
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
|
||||
case TYPE: \
|
||||
Name::cast(*this)->Name##Verify(isolate); \
|
||||
break;
|
||||
STRUCT_LIST(MAKE_STRUCT_CASE)
|
||||
#undef MAKE_STRUCT_CASE
|
||||
|
||||
case ALLOCATION_SITE_TYPE:
|
||||
AllocationSite::cast(this)->AllocationSiteVerify(isolate);
|
||||
AllocationSite::cast(*this)->AllocationSiteVerify(isolate);
|
||||
break;
|
||||
|
||||
case LOAD_HANDLER_TYPE:
|
||||
LoadHandler::cast(this)->LoadHandlerVerify(isolate);
|
||||
LoadHandler::cast(*this)->LoadHandlerVerify(isolate);
|
||||
break;
|
||||
|
||||
case STORE_HANDLER_TYPE:
|
||||
StoreHandler::cast(this)->StoreHandlerVerify(isolate);
|
||||
StoreHandler::cast(*this)->StoreHandlerVerify(isolate);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void HeapObjectPtr::HeapObjectVerify(Isolate* isolate) {
|
||||
reinterpret_cast<HeapObject*>(ptr())->HeapObjectVerify(isolate);
|
||||
}
|
||||
|
||||
void HeapObject::VerifyHeapPointer(Isolate* isolate, Object* p) {
|
||||
CHECK(p->IsHeapObject());
|
||||
HeapObject* ho = HeapObject::cast(p);
|
||||
HeapObject ho = HeapObject::cast(p);
|
||||
CHECK(isolate->heap()->Contains(ho));
|
||||
}
|
||||
|
||||
void HeapObjectPtr::VerifyHeapPointer(Isolate* isolate, Object* p) {
|
||||
HeapObject::VerifyHeapPointer(isolate, p);
|
||||
}
|
||||
|
||||
void Symbol::SymbolVerify(Isolate* isolate) {
|
||||
CHECK(IsSymbol());
|
||||
CHECK(HasHashCode());
|
||||
@ -814,7 +808,7 @@ void DescriptorArray::DescriptorArrayVerify(Isolate* isolate) {
|
||||
CHECK_NE(details.attributes() & DONT_ENUM, 0);
|
||||
}
|
||||
MaybeObject value = get(ToValueIndex(descriptor));
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (details.location() == kField) {
|
||||
CHECK(
|
||||
value == MaybeObject::FromObject(FieldType::None()) ||
|
||||
@ -1745,7 +1739,7 @@ void PrototypeUsers::Verify(WeakArrayList array) {
|
||||
// slots.
|
||||
int weak_maps_count = 0;
|
||||
for (int i = kFirstIndex; i < array->length(); ++i) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
MaybeObject object = array->Get(i);
|
||||
if ((object->GetHeapObjectIfWeak(&heap_object) && heap_object->IsMap()) ||
|
||||
object->IsCleared()) {
|
||||
@ -1981,7 +1975,7 @@ void Script::ScriptVerify(Isolate* isolate) {
|
||||
VerifyPointer(isolate, line_ends());
|
||||
for (int i = 0; i < shared_function_infos()->length(); ++i) {
|
||||
MaybeObject maybe_object = shared_function_infos()->Get(i);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
CHECK(maybe_object->IsWeak() || maybe_object->IsCleared() ||
|
||||
(maybe_object->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsUndefined(isolate)));
|
||||
@ -1993,7 +1987,7 @@ void NormalizedMapCache::NormalizedMapCacheVerify(Isolate* isolate) {
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
for (int i = 0; i < length(); i++) {
|
||||
MaybeObject e = WeakFixedArray::Get(i);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (e->GetHeapObjectIfWeak(&heap_object)) {
|
||||
Map::cast(heap_object)->DictionaryMapVerify(isolate);
|
||||
} else {
|
||||
|
@ -130,10 +130,10 @@ HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
|
||||
return IsHeapObject() && HeapObject::cast(this)->Is##Type(); \
|
||||
} \
|
||||
bool HeapObject::Is##Type(Isolate* isolate) const { \
|
||||
return Object::Is##Type(isolate); \
|
||||
return reinterpret_cast<Object*>(ptr())->Is##Type(isolate); \
|
||||
} \
|
||||
bool HeapObject::Is##Type(ReadOnlyRoots roots) const { \
|
||||
return Object::Is##Type(roots); \
|
||||
return reinterpret_cast<Object*>(ptr())->Is##Type(roots); \
|
||||
} \
|
||||
bool HeapObject::Is##Type() const { return Is##Type(GetReadOnlyRoots()); }
|
||||
ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
|
||||
@ -152,11 +152,11 @@ bool Object::IsNullOrUndefined() const {
|
||||
}
|
||||
|
||||
bool HeapObject::IsNullOrUndefined(Isolate* isolate) const {
|
||||
return Object::IsNullOrUndefined(isolate);
|
||||
return reinterpret_cast<Object*>(ptr())->IsNullOrUndefined(isolate);
|
||||
}
|
||||
|
||||
bool HeapObject::IsNullOrUndefined(ReadOnlyRoots roots) const {
|
||||
return Object::IsNullOrUndefined(roots);
|
||||
return reinterpret_cast<Object*>(ptr())->IsNullOrUndefined(roots);
|
||||
}
|
||||
|
||||
bool HeapObject::IsNullOrUndefined() const {
|
||||
@ -186,51 +186,51 @@ bool HeapObject::IsTemplateInfo() const {
|
||||
|
||||
bool HeapObject::IsConsString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsCons();
|
||||
return StringShape(String::cast(*this)).IsCons();
|
||||
}
|
||||
|
||||
bool HeapObject::IsThinString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsThin();
|
||||
return StringShape(String::cast(*this)).IsThin();
|
||||
}
|
||||
|
||||
bool HeapObject::IsSlicedString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsSliced();
|
||||
return StringShape(String::cast(*this)).IsSliced();
|
||||
}
|
||||
|
||||
bool HeapObject::IsSeqString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsSequential();
|
||||
return StringShape(String::cast(*this)).IsSequential();
|
||||
}
|
||||
|
||||
bool HeapObject::IsSeqOneByteString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsSequential() &&
|
||||
String::cast(this)->IsOneByteRepresentation();
|
||||
return StringShape(String::cast(*this)).IsSequential() &&
|
||||
String::cast(*this)->IsOneByteRepresentation();
|
||||
}
|
||||
|
||||
bool HeapObject::IsSeqTwoByteString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsSequential() &&
|
||||
String::cast(this)->IsTwoByteRepresentation();
|
||||
return StringShape(String::cast(*this)).IsSequential() &&
|
||||
String::cast(*this)->IsTwoByteRepresentation();
|
||||
}
|
||||
|
||||
bool HeapObject::IsExternalString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsExternal();
|
||||
return StringShape(String::cast(*this)).IsExternal();
|
||||
}
|
||||
|
||||
bool HeapObject::IsExternalOneByteString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsExternal() &&
|
||||
String::cast(this)->IsOneByteRepresentation();
|
||||
return StringShape(String::cast(*this)).IsExternal() &&
|
||||
String::cast(*this)->IsOneByteRepresentation();
|
||||
}
|
||||
|
||||
bool HeapObject::IsExternalTwoByteString() const {
|
||||
if (!IsString()) return false;
|
||||
return StringShape(String::cast(this)).IsExternal() &&
|
||||
String::cast(this)->IsTwoByteRepresentation();
|
||||
return StringShape(String::cast(*this)).IsExternal() &&
|
||||
String::cast(*this)->IsTwoByteRepresentation();
|
||||
}
|
||||
|
||||
bool Object::IsNumber() const { return IsSmi() || IsHeapNumber(); }
|
||||
@ -258,7 +258,7 @@ bool HeapObject::IsFrameArray() const { return IsFixedArrayExact(); }
|
||||
|
||||
bool HeapObject::IsArrayList() const {
|
||||
return map() == GetReadOnlyRoots().array_list_map() ||
|
||||
this == GetReadOnlyRoots().empty_fixed_array();
|
||||
*this == GetReadOnlyRoots().empty_fixed_array();
|
||||
}
|
||||
|
||||
bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArrayExact(); }
|
||||
@ -273,7 +273,7 @@ bool HeapObject::IsDeoptimizationData() const {
|
||||
// a deoptimization data array. Since this is used for asserts we can
|
||||
// check that the length is zero or else the fixed size plus a multiple of
|
||||
// the entry size.
|
||||
int length = FixedArray::cast(this)->length();
|
||||
int length = FixedArray::cast(*this)->length();
|
||||
if (length == 0) return true;
|
||||
|
||||
length -= DeoptimizationData::kFirstDeoptEntryIndex;
|
||||
@ -291,7 +291,7 @@ bool HeapObject::IsTemplateList() const {
|
||||
if (!IsFixedArrayExact()) return false;
|
||||
// There's actually no way to see the difference between a fixed array and
|
||||
// a template list.
|
||||
if (FixedArray::cast(this)->length() < 1) return false;
|
||||
if (FixedArray::cast(*this)->length() < 1) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -307,32 +307,32 @@ bool HeapObject::IsAbstractCode() const {
|
||||
}
|
||||
|
||||
bool HeapObject::IsStringWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsString();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsString();
|
||||
}
|
||||
|
||||
bool HeapObject::IsBooleanWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsBoolean();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsBoolean();
|
||||
}
|
||||
|
||||
bool HeapObject::IsScriptWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsScript();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsScript();
|
||||
}
|
||||
|
||||
bool HeapObject::IsNumberWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsNumber();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsNumber();
|
||||
}
|
||||
|
||||
bool HeapObject::IsBigIntWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsBigInt();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsBigInt();
|
||||
}
|
||||
|
||||
bool HeapObject::IsSymbolWrapper() const {
|
||||
return IsJSValue() && JSValue::cast(this)->value()->IsSymbol();
|
||||
return IsJSValue() && JSValue::cast(*this)->value()->IsSymbol();
|
||||
}
|
||||
|
||||
bool HeapObject::IsBoolean() const {
|
||||
return IsOddball() &&
|
||||
((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
|
||||
((Oddball::cast(*this)->kind() & Oddball::kNotBooleanMask) == 0);
|
||||
}
|
||||
|
||||
bool HeapObject::IsJSArrayBufferView() const {
|
||||
@ -344,7 +344,7 @@ bool HeapObject::IsStringSet() const { return IsHashTable(); }
|
||||
bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
|
||||
|
||||
bool HeapObject::IsNormalizedMapCache() const {
|
||||
return NormalizedMapCache::IsNormalizedMapCache(this);
|
||||
return NormalizedMapCache::IsNormalizedMapCache(*this);
|
||||
}
|
||||
|
||||
bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
|
||||
@ -375,7 +375,7 @@ bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
|
||||
|
||||
bool HeapObject::IsAccessCheckNeeded() const {
|
||||
if (IsJSGlobalProxy()) {
|
||||
const JSGlobalProxy proxy = JSGlobalProxy::cast(this);
|
||||
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
|
||||
JSGlobalObject global = proxy->GetIsolate()->context()->global_object();
|
||||
return proxy->IsDetachedFrom(global);
|
||||
}
|
||||
@ -429,6 +429,7 @@ bool Object::IsMinusZero() const {
|
||||
i::IsMinusZero(HeapNumber::cast(this)->value());
|
||||
}
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(HeapObject, ObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(HashTableBase, FixedArray)
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
@ -463,7 +464,7 @@ NormalizedMapCache::NormalizedMapCache(Address ptr) : WeakFixedArray(ptr) {
|
||||
// OBJECT_CONSTRUCTORS_IMPL macro?
|
||||
}
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObject)
|
||||
OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
|
||||
OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
|
||||
|
||||
@ -475,7 +476,7 @@ OBJECT_CONSTRUCTORS_IMPL(TemplateObjectDescription, Tuple2)
|
||||
CAST_ACCESSOR2(BigInt)
|
||||
CAST_ACCESSOR2(ObjectBoilerplateDescription)
|
||||
CAST_ACCESSOR2(EphemeronHashTable)
|
||||
CAST_ACCESSOR(HeapObject)
|
||||
CAST_ACCESSOR2(HeapObject)
|
||||
CAST_ACCESSOR2(NormalizedMapCache)
|
||||
CAST_ACCESSOR(Object)
|
||||
CAST_ACCESSOR2(ObjectHashSet)
|
||||
@ -703,7 +704,7 @@ ObjectSlot HeapObject::RawField(int byte_offset) const {
|
||||
return ObjectSlot(FIELD_ADDR(this, byte_offset));
|
||||
}
|
||||
|
||||
ObjectSlot HeapObject::RawField(const HeapObject* obj, int byte_offset) {
|
||||
ObjectSlot HeapObject::RawField(const HeapObject obj, int byte_offset) {
|
||||
return ObjectSlot(FIELD_ADDR(obj, byte_offset));
|
||||
}
|
||||
|
||||
@ -711,8 +712,7 @@ MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
|
||||
return MaybeObjectSlot(FIELD_ADDR(this, byte_offset));
|
||||
}
|
||||
|
||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject* obj,
|
||||
int byte_offset) {
|
||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject obj, int byte_offset) {
|
||||
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
|
||||
}
|
||||
|
||||
@ -722,17 +722,15 @@ Map MapWord::ToMap() const { return Map::unchecked_cast(ObjectPtr(value_)); }
|
||||
|
||||
bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(value_); }
|
||||
|
||||
MapWord MapWord::FromForwardingAddress(HeapObject* object) {
|
||||
MapWord MapWord::FromForwardingAddress(HeapObject object) {
|
||||
return MapWord(object->ptr() - kHeapObjectTag);
|
||||
}
|
||||
|
||||
|
||||
HeapObject* MapWord::ToForwardingAddress() {
|
||||
HeapObject MapWord::ToForwardingAddress() {
|
||||
DCHECK(IsForwardingAddress());
|
||||
return HeapObject::FromAddress(value_);
|
||||
}
|
||||
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
|
||||
VerifyPointer(isolate, READ_FIELD(this, offset));
|
||||
@ -751,7 +749,7 @@ void HeapObject::VerifySmiField(int offset) {
|
||||
ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
|
||||
// TODO(v8:7464): When RO_SPACE is embedded, this will access a global
|
||||
// variable instead.
|
||||
return ReadOnlyRoots(MemoryChunk::FromHeapObject(this)->heap());
|
||||
return ReadOnlyRoots(MemoryChunk::FromHeapObject(*this)->heap());
|
||||
}
|
||||
|
||||
Heap* NeverReadOnlySpaceObject::GetHeap() const {
|
||||
@ -773,7 +771,7 @@ Map HeapObject::map() const { return map_word().ToMap(); }
|
||||
void HeapObject::set_map(Map value) {
|
||||
if (!value.is_null()) {
|
||||
#ifdef VERIFY_HEAP
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
|
||||
#endif
|
||||
}
|
||||
set_map_word(MapWord::FromMap(value));
|
||||
@ -791,7 +789,7 @@ Map HeapObject::synchronized_map() const {
|
||||
void HeapObject::synchronized_set_map(Map value) {
|
||||
if (!value.is_null()) {
|
||||
#ifdef VERIFY_HEAP
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
|
||||
#endif
|
||||
}
|
||||
synchronized_set_map_word(MapWord::FromMap(value));
|
||||
@ -807,7 +805,7 @@ void HeapObject::synchronized_set_map(Map value) {
|
||||
void HeapObject::set_map_no_write_barrier(Map value) {
|
||||
if (!value.is_null()) {
|
||||
#ifdef VERIFY_HEAP
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(this, value);
|
||||
Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
|
||||
#endif
|
||||
}
|
||||
set_map_word(MapWord::FromMap(value));
|
||||
@ -935,7 +933,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(
|
||||
const DisallowHeapAllocation& promise) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(this);
|
||||
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
|
||||
if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
||||
if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
|
||||
return UPDATE_WRITE_BARRIER;
|
||||
}
|
||||
|
||||
@ -954,13 +952,13 @@ AllocationAlignment HeapObject::RequiredAlignment(Map map) {
|
||||
bool HeapObject::NeedsRehashing() const {
|
||||
switch (map()->instance_type()) {
|
||||
case DESCRIPTOR_ARRAY_TYPE:
|
||||
return DescriptorArray::cast(this)->number_of_descriptors() > 1;
|
||||
return DescriptorArray::cast(*this)->number_of_descriptors() > 1;
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
return TransitionArray::cast(this)->number_of_entries() > 1;
|
||||
return TransitionArray::cast(*this)->number_of_entries() > 1;
|
||||
case ORDERED_HASH_MAP_TYPE:
|
||||
return OrderedHashMap::cast(this)->NumberOfElements() > 0;
|
||||
return OrderedHashMap::cast(*this)->NumberOfElements() > 0;
|
||||
case ORDERED_HASH_SET_TYPE:
|
||||
return OrderedHashSet::cast(this)->NumberOfElements() > 0;
|
||||
return OrderedHashSet::cast(*this)->NumberOfElements() > 0;
|
||||
case NAME_DICTIONARY_TYPE:
|
||||
case GLOBAL_DICTIONARY_TYPE:
|
||||
case NUMBER_DICTIONARY_TYPE:
|
||||
@ -1002,97 +1000,97 @@ int HeapObject::SizeFromMap(Map map) const {
|
||||
InstanceType instance_type = map->instance_type();
|
||||
if (IsInRange(instance_type, FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE)) {
|
||||
return FixedArray::SizeFor(
|
||||
FixedArray::unchecked_cast(this)->synchronized_length());
|
||||
FixedArray::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (IsInRange(instance_type, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE)) {
|
||||
// Native context has fixed size.
|
||||
DCHECK_NE(instance_type, NATIVE_CONTEXT_TYPE);
|
||||
return Context::SizeFor(Context::unchecked_cast(this)->length());
|
||||
return Context::SizeFor(Context::unchecked_cast(*this)->length());
|
||||
}
|
||||
if (instance_type == ONE_BYTE_STRING_TYPE ||
|
||||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
|
||||
// Strings may get concurrently truncated, hence we have to access its
|
||||
// length synchronized.
|
||||
return SeqOneByteString::SizeFor(
|
||||
SeqOneByteString::unchecked_cast(this)->synchronized_length());
|
||||
SeqOneByteString::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == BYTE_ARRAY_TYPE) {
|
||||
return ByteArray::SizeFor(
|
||||
ByteArray::unchecked_cast(this)->synchronized_length());
|
||||
ByteArray::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == BYTECODE_ARRAY_TYPE) {
|
||||
return BytecodeArray::SizeFor(
|
||||
BytecodeArray::unchecked_cast(this)->synchronized_length());
|
||||
BytecodeArray::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == FREE_SPACE_TYPE) {
|
||||
return FreeSpace::unchecked_cast(this)->relaxed_read_size();
|
||||
return FreeSpace::unchecked_cast(*this)->relaxed_read_size();
|
||||
}
|
||||
if (instance_type == STRING_TYPE ||
|
||||
instance_type == INTERNALIZED_STRING_TYPE) {
|
||||
// Strings may get concurrently truncated, hence we have to access its
|
||||
// length synchronized.
|
||||
return SeqTwoByteString::SizeFor(
|
||||
SeqTwoByteString::unchecked_cast(this)->synchronized_length());
|
||||
SeqTwoByteString::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
|
||||
return FixedDoubleArray::SizeFor(
|
||||
FixedDoubleArray::unchecked_cast(this)->synchronized_length());
|
||||
FixedDoubleArray::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == FEEDBACK_METADATA_TYPE) {
|
||||
return FeedbackMetadata::SizeFor(
|
||||
FeedbackMetadata::unchecked_cast(this)->synchronized_slot_count());
|
||||
FeedbackMetadata::unchecked_cast(*this)->synchronized_slot_count());
|
||||
}
|
||||
if (instance_type == DESCRIPTOR_ARRAY_TYPE) {
|
||||
return DescriptorArray::SizeFor(
|
||||
DescriptorArray::unchecked_cast(this)->number_of_all_descriptors());
|
||||
DescriptorArray::unchecked_cast(*this)->number_of_all_descriptors());
|
||||
}
|
||||
if (IsInRange(instance_type, FIRST_WEAK_FIXED_ARRAY_TYPE,
|
||||
LAST_WEAK_FIXED_ARRAY_TYPE)) {
|
||||
return WeakFixedArray::SizeFor(
|
||||
WeakFixedArray::unchecked_cast(this)->synchronized_length());
|
||||
WeakFixedArray::unchecked_cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == WEAK_ARRAY_LIST_TYPE) {
|
||||
return WeakArrayList::SizeForCapacity(
|
||||
WeakArrayList::unchecked_cast(this)->synchronized_capacity());
|
||||
WeakArrayList::unchecked_cast(*this)->synchronized_capacity());
|
||||
}
|
||||
if (IsInRange(instance_type, FIRST_FIXED_TYPED_ARRAY_TYPE,
|
||||
LAST_FIXED_TYPED_ARRAY_TYPE)) {
|
||||
return FixedTypedArrayBase::unchecked_cast(this)->TypedArraySize(
|
||||
return FixedTypedArrayBase::unchecked_cast(*this)->TypedArraySize(
|
||||
instance_type);
|
||||
}
|
||||
if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
|
||||
return SmallOrderedHashSet::SizeFor(
|
||||
SmallOrderedHashSet::unchecked_cast(this)->Capacity());
|
||||
SmallOrderedHashSet::unchecked_cast(*this)->Capacity());
|
||||
}
|
||||
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
|
||||
return SmallOrderedHashMap::SizeFor(
|
||||
SmallOrderedHashMap::unchecked_cast(this)->Capacity());
|
||||
SmallOrderedHashMap::unchecked_cast(*this)->Capacity());
|
||||
}
|
||||
if (instance_type == SMALL_ORDERED_NAME_DICTIONARY_TYPE) {
|
||||
return SmallOrderedNameDictionary::SizeFor(
|
||||
SmallOrderedNameDictionary::unchecked_cast(this)->Capacity());
|
||||
SmallOrderedNameDictionary::unchecked_cast(*this)->Capacity());
|
||||
}
|
||||
if (instance_type == PROPERTY_ARRAY_TYPE) {
|
||||
return PropertyArray::SizeFor(
|
||||
PropertyArray::cast(this)->synchronized_length());
|
||||
PropertyArray::cast(*this)->synchronized_length());
|
||||
}
|
||||
if (instance_type == FEEDBACK_VECTOR_TYPE) {
|
||||
return FeedbackVector::SizeFor(
|
||||
FeedbackVector::unchecked_cast(this)->length());
|
||||
FeedbackVector::unchecked_cast(*this)->length());
|
||||
}
|
||||
if (instance_type == BIGINT_TYPE) {
|
||||
return BigInt::SizeFor(BigInt::unchecked_cast(this)->length());
|
||||
return BigInt::SizeFor(BigInt::unchecked_cast(*this)->length());
|
||||
}
|
||||
if (instance_type == PRE_PARSED_SCOPE_DATA_TYPE) {
|
||||
return PreParsedScopeData::SizeFor(
|
||||
PreParsedScopeData::unchecked_cast(this)->length());
|
||||
PreParsedScopeData::unchecked_cast(*this)->length());
|
||||
}
|
||||
if (instance_type == CODE_TYPE) {
|
||||
return Code::unchecked_cast(this)->CodeSize();
|
||||
return Code::unchecked_cast(*this)->CodeSize();
|
||||
}
|
||||
DCHECK_EQ(instance_type, EMBEDDER_DATA_ARRAY_TYPE);
|
||||
return EmbedderDataArray::SizeFor(
|
||||
EmbedderDataArray::unchecked_cast(this)->length());
|
||||
EmbedderDataArray::unchecked_cast(*this)->length());
|
||||
}
|
||||
|
||||
ACCESSORS2(TemplateObjectDescription, raw_strings, FixedArray,
|
||||
@ -1262,7 +1260,7 @@ Relocatable::~Relocatable() {
|
||||
isolate_->set_relocatable_top(prev_);
|
||||
}
|
||||
|
||||
// Predictably converts HeapObject* or Address to uint32 by calculating
|
||||
// Predictably converts HeapObject or Address to uint32 by calculating
|
||||
// offset of the address in respective MemoryChunk.
|
||||
static inline uint32_t ObjectAddressForHashing(void* object) {
|
||||
uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
|
||||
|
@ -84,20 +84,6 @@ void Object::Print(std::ostream& os) { // NOLINT
|
||||
}
|
||||
|
||||
void HeapObject::PrintHeader(std::ostream& os, const char* id) { // NOLINT
|
||||
os << reinterpret_cast<void*>(this) << ": [";
|
||||
if (id != nullptr) {
|
||||
os << id;
|
||||
} else {
|
||||
os << map()->instance_type();
|
||||
}
|
||||
os << "]";
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(
|
||||
reinterpret_cast<Address>(const_cast<HeapObject*>(this)));
|
||||
if (chunk->owner()->identity() == OLD_SPACE) os << " in OldSpace";
|
||||
if (!IsMap()) os << "\n - map: " << Brief(map());
|
||||
}
|
||||
|
||||
void HeapObjectPtr::PrintHeader(std::ostream& os, const char* id) { // NOLINT
|
||||
os << reinterpret_cast<void*>(ptr()) << ": [";
|
||||
if (id != nullptr) {
|
||||
os << id;
|
||||
@ -114,39 +100,39 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
|
||||
InstanceType instance_type = map()->instance_type();
|
||||
|
||||
if (instance_type < FIRST_NONSTRING_TYPE) {
|
||||
String::cast(this)->StringPrint(os);
|
||||
String::cast(*this)->StringPrint(os);
|
||||
os << "\n";
|
||||
return;
|
||||
}
|
||||
|
||||
switch (instance_type) {
|
||||
case SYMBOL_TYPE:
|
||||
Symbol::cast(this)->SymbolPrint(os);
|
||||
Symbol::cast(*this)->SymbolPrint(os);
|
||||
break;
|
||||
case MAP_TYPE:
|
||||
Map::cast(this)->MapPrint(os);
|
||||
Map::cast(*this)->MapPrint(os);
|
||||
break;
|
||||
case HEAP_NUMBER_TYPE:
|
||||
HeapNumber::cast(this)->HeapNumberPrint(os);
|
||||
HeapNumber::cast(*this)->HeapNumberPrint(os);
|
||||
os << "\n";
|
||||
break;
|
||||
case MUTABLE_HEAP_NUMBER_TYPE:
|
||||
os << "<mutable ";
|
||||
MutableHeapNumber::cast(this)->MutableHeapNumberPrint(os);
|
||||
MutableHeapNumber::cast(*this)->MutableHeapNumberPrint(os);
|
||||
os << ">\n";
|
||||
break;
|
||||
case BIGINT_TYPE:
|
||||
BigInt::cast(this)->BigIntPrint(os);
|
||||
BigInt::cast(*this)->BigIntPrint(os);
|
||||
os << "\n";
|
||||
break;
|
||||
case EMBEDDER_DATA_ARRAY_TYPE:
|
||||
EmbedderDataArray::cast(this)->EmbedderDataArrayPrint(os);
|
||||
EmbedderDataArray::cast(*this)->EmbedderDataArrayPrint(os);
|
||||
break;
|
||||
case FIXED_DOUBLE_ARRAY_TYPE:
|
||||
FixedDoubleArray::cast(this)->FixedDoubleArrayPrint(os);
|
||||
FixedDoubleArray::cast(*this)->FixedDoubleArrayPrint(os);
|
||||
break;
|
||||
case FIXED_ARRAY_TYPE:
|
||||
FixedArray::cast(this)->FixedArrayPrint(os);
|
||||
FixedArray::cast(*this)->FixedArrayPrint(os);
|
||||
break;
|
||||
case AWAIT_CONTEXT_TYPE:
|
||||
case BLOCK_CONTEXT_TYPE:
|
||||
@ -158,10 +144,10 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
|
||||
case SCRIPT_CONTEXT_TYPE:
|
||||
case WITH_CONTEXT_TYPE:
|
||||
case SCRIPT_CONTEXT_TABLE_TYPE:
|
||||
Context::cast(this)->ContextPrint(os);
|
||||
Context::cast(*this)->ContextPrint(os);
|
||||
break;
|
||||
case NATIVE_CONTEXT_TYPE:
|
||||
NativeContext::cast(this)->NativeContextPrint(os);
|
||||
NativeContext::cast(*this)->NativeContextPrint(os);
|
||||
break;
|
||||
case HASH_TABLE_TYPE:
|
||||
case ORDERED_HASH_MAP_TYPE:
|
||||
@ -171,46 +157,46 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
|
||||
case GLOBAL_DICTIONARY_TYPE:
|
||||
case SIMPLE_NUMBER_DICTIONARY_TYPE:
|
||||
case STRING_TABLE_TYPE:
|
||||
ObjectHashTable::cast(this)->ObjectHashTablePrint(os);
|
||||
ObjectHashTable::cast(*this)->ObjectHashTablePrint(os);
|
||||
break;
|
||||
case NUMBER_DICTIONARY_TYPE:
|
||||
NumberDictionary::cast(this)->NumberDictionaryPrint(os);
|
||||
NumberDictionary::cast(*this)->NumberDictionaryPrint(os);
|
||||
break;
|
||||
case EPHEMERON_HASH_TABLE_TYPE:
|
||||
EphemeronHashTable::cast(this)->EphemeronHashTablePrint(os);
|
||||
EphemeronHashTable::cast(*this)->EphemeronHashTablePrint(os);
|
||||
break;
|
||||
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
|
||||
ObjectBoilerplateDescription::cast(this)
|
||||
ObjectBoilerplateDescription::cast(*this)
|
||||
->ObjectBoilerplateDescriptionPrint(os);
|
||||
break;
|
||||
case PROPERTY_ARRAY_TYPE:
|
||||
PropertyArray::cast(this)->PropertyArrayPrint(os);
|
||||
PropertyArray::cast(*this)->PropertyArrayPrint(os);
|
||||
break;
|
||||
case BYTE_ARRAY_TYPE:
|
||||
ByteArray::cast(this)->ByteArrayPrint(os);
|
||||
ByteArray::cast(*this)->ByteArrayPrint(os);
|
||||
break;
|
||||
case BYTECODE_ARRAY_TYPE:
|
||||
BytecodeArray::cast(this)->BytecodeArrayPrint(os);
|
||||
BytecodeArray::cast(*this)->BytecodeArrayPrint(os);
|
||||
break;
|
||||
case DESCRIPTOR_ARRAY_TYPE:
|
||||
DescriptorArray::cast(this)->DescriptorArrayPrint(os);
|
||||
DescriptorArray::cast(*this)->DescriptorArrayPrint(os);
|
||||
break;
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
TransitionArray::cast(this)->TransitionArrayPrint(os);
|
||||
TransitionArray::cast(*this)->TransitionArrayPrint(os);
|
||||
break;
|
||||
case FEEDBACK_CELL_TYPE:
|
||||
FeedbackCell::cast(this)->FeedbackCellPrint(os);
|
||||
FeedbackCell::cast(*this)->FeedbackCellPrint(os);
|
||||
break;
|
||||
case FEEDBACK_VECTOR_TYPE:
|
||||
FeedbackVector::cast(this)->FeedbackVectorPrint(os);
|
||||
FeedbackVector::cast(*this)->FeedbackVectorPrint(os);
|
||||
break;
|
||||
case FREE_SPACE_TYPE:
|
||||
FreeSpace::cast(this)->FreeSpacePrint(os);
|
||||
FreeSpace::cast(*this)->FreeSpacePrint(os);
|
||||
break;
|
||||
|
||||
#define PRINT_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype) \
|
||||
case Fixed##Type##Array::kInstanceType: \
|
||||
Fixed##Type##Array::cast(this)->FixedTypedArrayPrint(os); \
|
||||
#define PRINT_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype) \
|
||||
case Fixed##Type##Array::kInstanceType: \
|
||||
Fixed##Type##Array::cast(*this)->FixedTypedArrayPrint(os); \
|
||||
break;
|
||||
|
||||
TYPED_ARRAYS(PRINT_FIXED_TYPED_ARRAY)
|
||||
@ -232,196 +218,196 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
|
||||
case WASM_GLOBAL_TYPE:
|
||||
case WASM_MEMORY_TYPE:
|
||||
case WASM_TABLE_TYPE:
|
||||
JSObject::cast(this)->JSObjectPrint(os);
|
||||
JSObject::cast(*this)->JSObjectPrint(os);
|
||||
break;
|
||||
case WASM_MODULE_TYPE:
|
||||
WasmModuleObject::cast(this)->WasmModuleObjectPrint(os);
|
||||
WasmModuleObject::cast(*this)->WasmModuleObjectPrint(os);
|
||||
break;
|
||||
case WASM_INSTANCE_TYPE:
|
||||
WasmInstanceObject::cast(this)->WasmInstanceObjectPrint(os);
|
||||
WasmInstanceObject::cast(*this)->WasmInstanceObjectPrint(os);
|
||||
break;
|
||||
case JS_GENERATOR_OBJECT_TYPE:
|
||||
JSGeneratorObject::cast(this)->JSGeneratorObjectPrint(os);
|
||||
JSGeneratorObject::cast(*this)->JSGeneratorObjectPrint(os);
|
||||
break;
|
||||
case JS_PROMISE_TYPE:
|
||||
JSPromise::cast(this)->JSPromisePrint(os);
|
||||
JSPromise::cast(*this)->JSPromisePrint(os);
|
||||
break;
|
||||
case JS_ARRAY_TYPE:
|
||||
JSArray::cast(this)->JSArrayPrint(os);
|
||||
JSArray::cast(*this)->JSArrayPrint(os);
|
||||
break;
|
||||
case JS_REGEXP_TYPE:
|
||||
JSRegExp::cast(this)->JSRegExpPrint(os);
|
||||
JSRegExp::cast(*this)->JSRegExpPrint(os);
|
||||
break;
|
||||
case JS_REGEXP_STRING_ITERATOR_TYPE:
|
||||
JSRegExpStringIterator::cast(this)->JSRegExpStringIteratorPrint(os);
|
||||
JSRegExpStringIterator::cast(*this)->JSRegExpStringIteratorPrint(os);
|
||||
break;
|
||||
case ODDBALL_TYPE:
|
||||
Oddball::cast(this)->to_string()->Print(os);
|
||||
Oddball::cast(*this)->to_string()->Print(os);
|
||||
break;
|
||||
case JS_BOUND_FUNCTION_TYPE:
|
||||
JSBoundFunction::cast(this)->JSBoundFunctionPrint(os);
|
||||
JSBoundFunction::cast(*this)->JSBoundFunctionPrint(os);
|
||||
break;
|
||||
case JS_FUNCTION_TYPE:
|
||||
JSFunction::cast(this)->JSFunctionPrint(os);
|
||||
JSFunction::cast(*this)->JSFunctionPrint(os);
|
||||
break;
|
||||
case JS_GLOBAL_PROXY_TYPE:
|
||||
JSGlobalProxy::cast(this)->JSGlobalProxyPrint(os);
|
||||
JSGlobalProxy::cast(*this)->JSGlobalProxyPrint(os);
|
||||
break;
|
||||
case JS_GLOBAL_OBJECT_TYPE:
|
||||
JSGlobalObject::cast(this)->JSGlobalObjectPrint(os);
|
||||
JSGlobalObject::cast(*this)->JSGlobalObjectPrint(os);
|
||||
break;
|
||||
case JS_VALUE_TYPE:
|
||||
JSValue::cast(this)->JSValuePrint(os);
|
||||
JSValue::cast(*this)->JSValuePrint(os);
|
||||
break;
|
||||
case JS_DATE_TYPE:
|
||||
JSDate::cast(this)->JSDatePrint(os);
|
||||
JSDate::cast(*this)->JSDatePrint(os);
|
||||
break;
|
||||
case CODE_TYPE:
|
||||
Code::cast(this)->CodePrint(os);
|
||||
Code::cast(*this)->CodePrint(os);
|
||||
break;
|
||||
case CODE_DATA_CONTAINER_TYPE:
|
||||
CodeDataContainer::cast(this)->CodeDataContainerPrint(os);
|
||||
CodeDataContainer::cast(*this)->CodeDataContainerPrint(os);
|
||||
break;
|
||||
case JS_PROXY_TYPE:
|
||||
JSProxy::cast(this)->JSProxyPrint(os);
|
||||
JSProxy::cast(*this)->JSProxyPrint(os);
|
||||
break;
|
||||
case JS_SET_TYPE:
|
||||
JSSet::cast(this)->JSSetPrint(os);
|
||||
JSSet::cast(*this)->JSSetPrint(os);
|
||||
break;
|
||||
case JS_MAP_TYPE:
|
||||
JSMap::cast(this)->JSMapPrint(os);
|
||||
JSMap::cast(*this)->JSMapPrint(os);
|
||||
break;
|
||||
case JS_SET_KEY_VALUE_ITERATOR_TYPE:
|
||||
case JS_SET_VALUE_ITERATOR_TYPE:
|
||||
JSSetIterator::cast(this)->JSSetIteratorPrint(os);
|
||||
JSSetIterator::cast(*this)->JSSetIteratorPrint(os);
|
||||
break;
|
||||
case JS_MAP_KEY_ITERATOR_TYPE:
|
||||
case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
|
||||
case JS_MAP_VALUE_ITERATOR_TYPE:
|
||||
JSMapIterator::cast(this)->JSMapIteratorPrint(os);
|
||||
JSMapIterator::cast(*this)->JSMapIteratorPrint(os);
|
||||
break;
|
||||
case JS_WEAK_CELL_TYPE:
|
||||
JSWeakCell::cast(this)->JSWeakCellPrint(os);
|
||||
JSWeakCell::cast(*this)->JSWeakCellPrint(os);
|
||||
break;
|
||||
case JS_WEAK_REF_TYPE:
|
||||
JSWeakRef::cast(this)->JSWeakRefPrint(os);
|
||||
JSWeakRef::cast(*this)->JSWeakRefPrint(os);
|
||||
break;
|
||||
case JS_WEAK_FACTORY_TYPE:
|
||||
JSWeakFactory::cast(this)->JSWeakFactoryPrint(os);
|
||||
JSWeakFactory::cast(*this)->JSWeakFactoryPrint(os);
|
||||
break;
|
||||
case JS_WEAK_FACTORY_CLEANUP_ITERATOR_TYPE:
|
||||
JSWeakFactoryCleanupIterator::cast(this)
|
||||
JSWeakFactoryCleanupIterator::cast(*this)
|
||||
->JSWeakFactoryCleanupIteratorPrint(os);
|
||||
break;
|
||||
case JS_WEAK_MAP_TYPE:
|
||||
JSWeakMap::cast(this)->JSWeakMapPrint(os);
|
||||
JSWeakMap::cast(*this)->JSWeakMapPrint(os);
|
||||
break;
|
||||
case JS_WEAK_SET_TYPE:
|
||||
JSWeakSet::cast(this)->JSWeakSetPrint(os);
|
||||
JSWeakSet::cast(*this)->JSWeakSetPrint(os);
|
||||
break;
|
||||
case JS_MODULE_NAMESPACE_TYPE:
|
||||
JSModuleNamespace::cast(this)->JSModuleNamespacePrint(os);
|
||||
JSModuleNamespace::cast(*this)->JSModuleNamespacePrint(os);
|
||||
break;
|
||||
case FOREIGN_TYPE:
|
||||
Foreign::cast(this)->ForeignPrint(os);
|
||||
Foreign::cast(*this)->ForeignPrint(os);
|
||||
break;
|
||||
case CALL_HANDLER_INFO_TYPE:
|
||||
CallHandlerInfo::cast(this)->CallHandlerInfoPrint(os);
|
||||
CallHandlerInfo::cast(*this)->CallHandlerInfoPrint(os);
|
||||
break;
|
||||
case PRE_PARSED_SCOPE_DATA_TYPE:
|
||||
PreParsedScopeData::cast(this)->PreParsedScopeDataPrint(os);
|
||||
PreParsedScopeData::cast(*this)->PreParsedScopeDataPrint(os);
|
||||
break;
|
||||
case UNCOMPILED_DATA_WITHOUT_PRE_PARSED_SCOPE_TYPE:
|
||||
UncompiledDataWithoutPreParsedScope::cast(this)
|
||||
UncompiledDataWithoutPreParsedScope::cast(*this)
|
||||
->UncompiledDataWithoutPreParsedScopePrint(os);
|
||||
break;
|
||||
case UNCOMPILED_DATA_WITH_PRE_PARSED_SCOPE_TYPE:
|
||||
UncompiledDataWithPreParsedScope::cast(this)
|
||||
UncompiledDataWithPreParsedScope::cast(*this)
|
||||
->UncompiledDataWithPreParsedScopePrint(os);
|
||||
break;
|
||||
case SHARED_FUNCTION_INFO_TYPE:
|
||||
SharedFunctionInfo::cast(this)->SharedFunctionInfoPrint(os);
|
||||
SharedFunctionInfo::cast(*this)->SharedFunctionInfoPrint(os);
|
||||
break;
|
||||
case JS_MESSAGE_OBJECT_TYPE:
|
||||
JSMessageObject::cast(this)->JSMessageObjectPrint(os);
|
||||
JSMessageObject::cast(*this)->JSMessageObjectPrint(os);
|
||||
break;
|
||||
case CELL_TYPE:
|
||||
Cell::cast(this)->CellPrint(os);
|
||||
Cell::cast(*this)->CellPrint(os);
|
||||
break;
|
||||
case PROPERTY_CELL_TYPE:
|
||||
PropertyCell::cast(this)->PropertyCellPrint(os);
|
||||
PropertyCell::cast(*this)->PropertyCellPrint(os);
|
||||
break;
|
||||
case JS_ARRAY_BUFFER_TYPE:
|
||||
JSArrayBuffer::cast(this)->JSArrayBufferPrint(os);
|
||||
JSArrayBuffer::cast(*this)->JSArrayBufferPrint(os);
|
||||
break;
|
||||
case JS_ARRAY_ITERATOR_TYPE:
|
||||
JSArrayIterator::cast(this)->JSArrayIteratorPrint(os);
|
||||
JSArrayIterator::cast(*this)->JSArrayIteratorPrint(os);
|
||||
break;
|
||||
case JS_TYPED_ARRAY_TYPE:
|
||||
JSTypedArray::cast(this)->JSTypedArrayPrint(os);
|
||||
JSTypedArray::cast(*this)->JSTypedArrayPrint(os);
|
||||
break;
|
||||
case JS_DATA_VIEW_TYPE:
|
||||
JSDataView::cast(this)->JSDataViewPrint(os);
|
||||
JSDataView::cast(*this)->JSDataViewPrint(os);
|
||||
break;
|
||||
#ifdef V8_INTL_SUPPORT
|
||||
case JS_INTL_V8_BREAK_ITERATOR_TYPE:
|
||||
JSV8BreakIterator::cast(this)->JSV8BreakIteratorPrint(os);
|
||||
JSV8BreakIterator::cast(*this)->JSV8BreakIteratorPrint(os);
|
||||
break;
|
||||
case JS_INTL_COLLATOR_TYPE:
|
||||
JSCollator::cast(this)->JSCollatorPrint(os);
|
||||
JSCollator::cast(*this)->JSCollatorPrint(os);
|
||||
break;
|
||||
case JS_INTL_DATE_TIME_FORMAT_TYPE:
|
||||
JSDateTimeFormat::cast(this)->JSDateTimeFormatPrint(os);
|
||||
JSDateTimeFormat::cast(*this)->JSDateTimeFormatPrint(os);
|
||||
break;
|
||||
case JS_INTL_LIST_FORMAT_TYPE:
|
||||
JSListFormat::cast(this)->JSListFormatPrint(os);
|
||||
JSListFormat::cast(*this)->JSListFormatPrint(os);
|
||||
break;
|
||||
case JS_INTL_LOCALE_TYPE:
|
||||
JSLocale::cast(this)->JSLocalePrint(os);
|
||||
JSLocale::cast(*this)->JSLocalePrint(os);
|
||||
break;
|
||||
case JS_INTL_NUMBER_FORMAT_TYPE:
|
||||
JSNumberFormat::cast(this)->JSNumberFormatPrint(os);
|
||||
JSNumberFormat::cast(*this)->JSNumberFormatPrint(os);
|
||||
break;
|
||||
case JS_INTL_PLURAL_RULES_TYPE:
|
||||
JSPluralRules::cast(this)->JSPluralRulesPrint(os);
|
||||
JSPluralRules::cast(*this)->JSPluralRulesPrint(os);
|
||||
break;
|
||||
case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
|
||||
JSRelativeTimeFormat::cast(this)->JSRelativeTimeFormatPrint(os);
|
||||
JSRelativeTimeFormat::cast(*this)->JSRelativeTimeFormatPrint(os);
|
||||
break;
|
||||
case JS_INTL_SEGMENT_ITERATOR_TYPE:
|
||||
JSSegmentIterator::cast(this)->JSSegmentIteratorPrint(os);
|
||||
JSSegmentIterator::cast(*this)->JSSegmentIteratorPrint(os);
|
||||
break;
|
||||
case JS_INTL_SEGMENTER_TYPE:
|
||||
JSSegmenter::cast(this)->JSSegmenterPrint(os);
|
||||
JSSegmenter::cast(*this)->JSSegmenterPrint(os);
|
||||
break;
|
||||
#endif // V8_INTL_SUPPORT
|
||||
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
|
||||
case TYPE: \
|
||||
Name::cast(this)->Name##Print(os); \
|
||||
Name::cast(*this)->Name##Print(os); \
|
||||
break;
|
||||
STRUCT_LIST(MAKE_STRUCT_CASE)
|
||||
#undef MAKE_STRUCT_CASE
|
||||
|
||||
case ALLOCATION_SITE_TYPE:
|
||||
AllocationSite::cast(this)->AllocationSitePrint(os);
|
||||
AllocationSite::cast(*this)->AllocationSitePrint(os);
|
||||
break;
|
||||
case LOAD_HANDLER_TYPE:
|
||||
LoadHandler::cast(this)->LoadHandlerPrint(os);
|
||||
LoadHandler::cast(*this)->LoadHandlerPrint(os);
|
||||
break;
|
||||
case STORE_HANDLER_TYPE:
|
||||
StoreHandler::cast(this)->StoreHandlerPrint(os);
|
||||
StoreHandler::cast(*this)->StoreHandlerPrint(os);
|
||||
break;
|
||||
case SCOPE_INFO_TYPE:
|
||||
ScopeInfo::cast(this)->ScopeInfoPrint(os);
|
||||
ScopeInfo::cast(*this)->ScopeInfoPrint(os);
|
||||
break;
|
||||
case FEEDBACK_METADATA_TYPE:
|
||||
FeedbackMetadata::cast(this)->FeedbackMetadataPrint(os);
|
||||
FeedbackMetadata::cast(*this)->FeedbackMetadataPrint(os);
|
||||
break;
|
||||
case WEAK_FIXED_ARRAY_TYPE:
|
||||
WeakFixedArray::cast(this)->WeakFixedArrayPrint(os);
|
||||
WeakFixedArray::cast(*this)->WeakFixedArrayPrint(os);
|
||||
break;
|
||||
case WEAK_ARRAY_LIST_TYPE:
|
||||
WeakArrayList::cast(this)->WeakArrayListPrint(os);
|
||||
WeakArrayList::cast(*this)->WeakArrayListPrint(os);
|
||||
break;
|
||||
case INTERNALIZED_STRING_TYPE:
|
||||
case EXTERNAL_INTERNALIZED_STRING_TYPE:
|
||||
@ -2299,7 +2285,7 @@ void MaybeObject::Print() {
|
||||
|
||||
void MaybeObject::Print(std::ostream& os) {
|
||||
Smi smi;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (ToSmi(&smi)) {
|
||||
smi->SmiPrint(os);
|
||||
} else if (IsCleared()) {
|
||||
@ -2417,14 +2403,13 @@ void Map::MapPrint(std::ostream& os) { // NOLINT
|
||||
Isolate* isolate;
|
||||
// Read-only maps can't have transitions, which is fortunate because we need
|
||||
// the isolate to iterate over the transitions.
|
||||
if (Isolate::FromWritableHeapObject(reinterpret_cast<HeapObject*>(ptr()),
|
||||
&isolate)) {
|
||||
if (Isolate::FromWritableHeapObject(*this, &isolate)) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
TransitionsAccessor transitions(isolate, *this, &no_gc);
|
||||
int nof_transitions = transitions.NumberOfTransitions();
|
||||
if (nof_transitions > 0) {
|
||||
os << "\n - transitions #" << nof_transitions << ": ";
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
Smi smi;
|
||||
if (raw_transitions()->ToSmi(&smi)) {
|
||||
os << Brief(smi);
|
||||
|
208
src/objects.cc
208
src/objects.cc
@ -2367,7 +2367,7 @@ Map Object::GetPrototypeChainRootMap(Isolate* isolate) const {
|
||||
return native_context->number_function()->initial_map();
|
||||
}
|
||||
|
||||
const HeapObject* heap_object = HeapObject::cast(this);
|
||||
const HeapObject heap_object = HeapObject::cast(this);
|
||||
return heap_object->map()->GetPrototypeChainRootMap(isolate);
|
||||
}
|
||||
|
||||
@ -2581,6 +2581,10 @@ void Object::ShortPrint(StringStream* accumulator) {
|
||||
accumulator->Add(os.str().c_str());
|
||||
}
|
||||
|
||||
void ObjectPtr::ShortPrint(StringStream* accumulator) {
|
||||
reinterpret_cast<Object*>(ptr())->ShortPrint(accumulator);
|
||||
}
|
||||
|
||||
void Object::ShortPrint(std::ostream& os) { os << Brief(this); }
|
||||
|
||||
void ObjectPtr::ShortPrint(std::ostream& os) const { os << Brief(*this); }
|
||||
@ -2609,7 +2613,7 @@ Brief::Brief(const MaybeObject v) : value(v.ptr()) {}
|
||||
std::ostream& operator<<(std::ostream& os, const Brief& v) {
|
||||
MaybeObject maybe_object(v.value);
|
||||
Smi smi;
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (maybe_object->ToSmi(&smi)) {
|
||||
smi->SmiPrint(os);
|
||||
} else if (maybe_object->IsCleared()) {
|
||||
@ -3446,7 +3450,7 @@ void JSObject::PrintInstanceMigration(FILE* file, Map original_map,
|
||||
bool JSObject::IsUnmodifiedApiObject(FullObjectSlot o) {
|
||||
Object* object = *o;
|
||||
if (object->IsSmi()) return false;
|
||||
HeapObject* heap_object = HeapObject::cast(object);
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
if (!object->IsJSObject()) return false;
|
||||
JSObject js_object = JSObject::cast(object);
|
||||
if (!js_object->IsDroppableApiWrapper()) return false;
|
||||
@ -3464,21 +3468,21 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
if (IsString()) {
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
String::cast(this)->StringShortPrint(&accumulator);
|
||||
String::cast(*this)->StringShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
return;
|
||||
}
|
||||
if (IsJSObject()) {
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
JSObject::cast(this)->JSObjectShortPrint(&accumulator);
|
||||
JSObject::cast(*this)->JSObjectShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
return;
|
||||
}
|
||||
switch (map()->instance_type()) {
|
||||
case MAP_TYPE: {
|
||||
os << "<Map";
|
||||
Map mapInstance = Map::cast(this);
|
||||
Map mapInstance = Map::cast(*this);
|
||||
if (mapInstance->IsJSObjectMap()) {
|
||||
os << "(" << ElementsKindToString(mapInstance->elements_kind()) << ")";
|
||||
} else if (mapInstance->instance_size() != kVariableSizeSentinel) {
|
||||
@ -3490,97 +3494,97 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
os << "<AwaitContext generator= ";
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
Context::cast(this)->extension()->ShortPrint(&accumulator);
|
||||
Context::cast(*this)->extension()->ShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
os << '>';
|
||||
break;
|
||||
}
|
||||
case BLOCK_CONTEXT_TYPE:
|
||||
os << "<BlockContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<BlockContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case CATCH_CONTEXT_TYPE:
|
||||
os << "<CatchContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<CatchContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case DEBUG_EVALUATE_CONTEXT_TYPE:
|
||||
os << "<DebugEvaluateContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<DebugEvaluateContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case EVAL_CONTEXT_TYPE:
|
||||
os << "<EvalContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<EvalContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case FUNCTION_CONTEXT_TYPE:
|
||||
os << "<FunctionContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<FunctionContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case MODULE_CONTEXT_TYPE:
|
||||
os << "<ModuleContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<ModuleContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case NATIVE_CONTEXT_TYPE:
|
||||
os << "<NativeContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<NativeContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case SCRIPT_CONTEXT_TYPE:
|
||||
os << "<ScriptContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<ScriptContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case WITH_CONTEXT_TYPE:
|
||||
os << "<WithContext[" << Context::cast(this)->length() << "]>";
|
||||
os << "<WithContext[" << Context::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case SCRIPT_CONTEXT_TABLE_TYPE:
|
||||
os << "<ScriptContextTable[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<ScriptContextTable[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case HASH_TABLE_TYPE:
|
||||
os << "<HashTable[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<HashTable[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case ORDERED_HASH_MAP_TYPE:
|
||||
os << "<OrderedHashMap[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<OrderedHashMap[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case ORDERED_HASH_SET_TYPE:
|
||||
os << "<OrderedHashSet[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<OrderedHashSet[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case ORDERED_NAME_DICTIONARY_TYPE:
|
||||
os << "<OrderedNameDictionary[" << FixedArray::cast(this)->length()
|
||||
os << "<OrderedNameDictionary[" << FixedArray::cast(*this)->length()
|
||||
<< "]>";
|
||||
break;
|
||||
case NAME_DICTIONARY_TYPE:
|
||||
os << "<NameDictionary[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<NameDictionary[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case GLOBAL_DICTIONARY_TYPE:
|
||||
os << "<GlobalDictionary[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<GlobalDictionary[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case NUMBER_DICTIONARY_TYPE:
|
||||
os << "<NumberDictionary[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<NumberDictionary[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case SIMPLE_NUMBER_DICTIONARY_TYPE:
|
||||
os << "<SimpleNumberDictionary[" << FixedArray::cast(this)->length()
|
||||
os << "<SimpleNumberDictionary[" << FixedArray::cast(*this)->length()
|
||||
<< "]>";
|
||||
break;
|
||||
case STRING_TABLE_TYPE:
|
||||
os << "<StringTable[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<StringTable[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case FIXED_ARRAY_TYPE:
|
||||
os << "<FixedArray[" << FixedArray::cast(this)->length() << "]>";
|
||||
os << "<FixedArray[" << FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case OBJECT_BOILERPLATE_DESCRIPTION_TYPE:
|
||||
os << "<ObjectBoilerplateDescription[" << FixedArray::cast(this)->length()
|
||||
<< "]>";
|
||||
os << "<ObjectBoilerplateDescription["
|
||||
<< FixedArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case FIXED_DOUBLE_ARRAY_TYPE:
|
||||
os << "<FixedDoubleArray[" << FixedDoubleArray::cast(this)->length()
|
||||
os << "<FixedDoubleArray[" << FixedDoubleArray::cast(*this)->length()
|
||||
<< "]>";
|
||||
break;
|
||||
case BYTE_ARRAY_TYPE:
|
||||
os << "<ByteArray[" << ByteArray::cast(this)->length() << "]>";
|
||||
os << "<ByteArray[" << ByteArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case BYTECODE_ARRAY_TYPE:
|
||||
os << "<BytecodeArray[" << BytecodeArray::cast(this)->length() << "]>";
|
||||
os << "<BytecodeArray[" << BytecodeArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case DESCRIPTOR_ARRAY_TYPE:
|
||||
os << "<DescriptorArray["
|
||||
<< DescriptorArray::cast(this)->number_of_descriptors() << "]>";
|
||||
<< DescriptorArray::cast(*this)->number_of_descriptors() << "]>";
|
||||
break;
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
os << "<TransitionArray[" << TransitionArray::cast(this)->length()
|
||||
os << "<TransitionArray[" << TransitionArray::cast(*this)->length()
|
||||
<< "]>";
|
||||
break;
|
||||
case PROPERTY_ARRAY_TYPE:
|
||||
os << "<PropertyArray[" << PropertyArray::cast(this)->length() << "]>";
|
||||
os << "<PropertyArray[" << PropertyArray::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case FEEDBACK_CELL_TYPE: {
|
||||
{
|
||||
@ -3602,29 +3606,29 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
break;
|
||||
}
|
||||
case FEEDBACK_VECTOR_TYPE:
|
||||
os << "<FeedbackVector[" << FeedbackVector::cast(this)->length() << "]>";
|
||||
os << "<FeedbackVector[" << FeedbackVector::cast(*this)->length() << "]>";
|
||||
break;
|
||||
case FREE_SPACE_TYPE:
|
||||
os << "<FreeSpace[" << FreeSpace::cast(this)->size() << "]>";
|
||||
os << "<FreeSpace[" << FreeSpace::cast(*this)->size() << "]>";
|
||||
break;
|
||||
#define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype) \
|
||||
case FIXED_##TYPE##_ARRAY_TYPE: \
|
||||
os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(this)->length() \
|
||||
<< "]>"; \
|
||||
#define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype) \
|
||||
case FIXED_##TYPE##_ARRAY_TYPE: \
|
||||
os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(*this)->length() \
|
||||
<< "]>"; \
|
||||
break;
|
||||
|
||||
TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT)
|
||||
#undef TYPED_ARRAY_SHORT_PRINT
|
||||
|
||||
case PRE_PARSED_SCOPE_DATA_TYPE: {
|
||||
PreParsedScopeData data = PreParsedScopeData::cast(this);
|
||||
PreParsedScopeData data = PreParsedScopeData::cast(*this);
|
||||
os << "<PreParsedScopeData[" << data->length() << "]>";
|
||||
break;
|
||||
}
|
||||
|
||||
case UNCOMPILED_DATA_WITHOUT_PRE_PARSED_SCOPE_TYPE: {
|
||||
UncompiledDataWithoutPreParsedScope data =
|
||||
UncompiledDataWithoutPreParsedScope::cast(this);
|
||||
UncompiledDataWithoutPreParsedScope::cast(*this);
|
||||
os << "<UncompiledDataWithoutPreParsedScope (" << data->start_position()
|
||||
<< ", " << data->end_position() << ")]>";
|
||||
break;
|
||||
@ -3632,7 +3636,7 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
|
||||
case UNCOMPILED_DATA_WITH_PRE_PARSED_SCOPE_TYPE: {
|
||||
UncompiledDataWithPreParsedScope data =
|
||||
UncompiledDataWithPreParsedScope::cast(this);
|
||||
UncompiledDataWithPreParsedScope::cast(*this);
|
||||
os << "<UncompiledDataWithPreParsedScope (" << data->start_position()
|
||||
<< ", " << data->end_position()
|
||||
<< ") preparsed=" << Brief(data->pre_parsed_scope_data()) << ">";
|
||||
@ -3640,7 +3644,7 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
}
|
||||
|
||||
case SHARED_FUNCTION_INFO_TYPE: {
|
||||
SharedFunctionInfo shared = SharedFunctionInfo::cast(this);
|
||||
SharedFunctionInfo shared = SharedFunctionInfo::cast(*this);
|
||||
std::unique_ptr<char[]> debug_name = shared->DebugName()->ToCString();
|
||||
if (debug_name[0] != 0) {
|
||||
os << "<SharedFunctionInfo " << debug_name.get() << ">";
|
||||
@ -3652,29 +3656,29 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
case JS_MESSAGE_OBJECT_TYPE:
|
||||
os << "<JSMessageObject>";
|
||||
break;
|
||||
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
|
||||
case TYPE: \
|
||||
os << "<" #Name; \
|
||||
Name::cast(this)->BriefPrintDetails(os); \
|
||||
os << ">"; \
|
||||
#define MAKE_STRUCT_CASE(TYPE, Name, name) \
|
||||
case TYPE: \
|
||||
os << "<" #Name; \
|
||||
Name::cast(*this)->BriefPrintDetails(os); \
|
||||
os << ">"; \
|
||||
break;
|
||||
STRUCT_LIST(MAKE_STRUCT_CASE)
|
||||
#undef MAKE_STRUCT_CASE
|
||||
case ALLOCATION_SITE_TYPE: {
|
||||
os << "<AllocationSite";
|
||||
AllocationSite::cast(this)->BriefPrintDetails(os);
|
||||
AllocationSite::cast(*this)->BriefPrintDetails(os);
|
||||
os << ">";
|
||||
break;
|
||||
}
|
||||
case SCOPE_INFO_TYPE: {
|
||||
ScopeInfo scope = ScopeInfo::cast(this);
|
||||
ScopeInfo scope = ScopeInfo::cast(*this);
|
||||
os << "<ScopeInfo";
|
||||
if (scope->length()) os << " " << scope->scope_type() << " ";
|
||||
os << "[" << scope->length() << "]>";
|
||||
break;
|
||||
}
|
||||
case CODE_TYPE: {
|
||||
Code code = Code::cast(this);
|
||||
Code code = Code::cast(*this);
|
||||
os << "<Code " << Code::Kind2String(code->kind());
|
||||
if (code->is_builtin()) {
|
||||
os << " " << Builtins::name(code->builtin_index());
|
||||
@ -3695,31 +3699,31 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
os << "<false>";
|
||||
} else {
|
||||
os << "<Odd Oddball: ";
|
||||
os << Oddball::cast(this)->to_string()->ToCString().get();
|
||||
os << Oddball::cast(*this)->to_string()->ToCString().get();
|
||||
os << ">";
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SYMBOL_TYPE: {
|
||||
Symbol symbol = Symbol::cast(this);
|
||||
Symbol symbol = Symbol::cast(*this);
|
||||
symbol->SymbolShortPrint(os);
|
||||
break;
|
||||
}
|
||||
case HEAP_NUMBER_TYPE: {
|
||||
os << "<HeapNumber ";
|
||||
HeapNumber::cast(this)->HeapNumberPrint(os);
|
||||
HeapNumber::cast(*this)->HeapNumberPrint(os);
|
||||
os << ">";
|
||||
break;
|
||||
}
|
||||
case MUTABLE_HEAP_NUMBER_TYPE: {
|
||||
os << "<MutableHeapNumber ";
|
||||
MutableHeapNumber::cast(this)->MutableHeapNumberPrint(os);
|
||||
MutableHeapNumber::cast(*this)->MutableHeapNumberPrint(os);
|
||||
os << '>';
|
||||
break;
|
||||
}
|
||||
case BIGINT_TYPE: {
|
||||
os << "<BigInt ";
|
||||
BigInt::cast(this)->BigIntShortPrint(os);
|
||||
BigInt::cast(*this)->BigIntShortPrint(os);
|
||||
os << ">";
|
||||
break;
|
||||
}
|
||||
@ -3733,13 +3737,13 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
os << "<Cell value= ";
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
Cell::cast(this)->value()->ShortPrint(&accumulator);
|
||||
Cell::cast(*this)->value()->ShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
os << '>';
|
||||
break;
|
||||
}
|
||||
case PROPERTY_CELL_TYPE: {
|
||||
PropertyCell cell = PropertyCell::cast(this);
|
||||
PropertyCell cell = PropertyCell::cast(*this);
|
||||
os << "<PropertyCell name=";
|
||||
cell->name()->ShortPrint(os);
|
||||
os << " value=";
|
||||
@ -3751,7 +3755,7 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
|
||||
break;
|
||||
}
|
||||
case CALL_HANDLER_INFO_TYPE: {
|
||||
CallHandlerInfo info = CallHandlerInfo::cast(this);
|
||||
CallHandlerInfo info = CallHandlerInfo::cast(*this);
|
||||
os << "<CallHandlerInfo ";
|
||||
os << "callback= " << Brief(info->callback());
|
||||
os << ", js_callback= " << Brief(info->js_callback());
|
||||
@ -3803,7 +3807,7 @@ void HeapObject::IterateBody(Map map, int object_size, ObjectVisitor* v) {
|
||||
|
||||
struct CallIsValidSlot {
|
||||
template <typename BodyDescriptor>
|
||||
static bool apply(Map map, HeapObject* obj, int offset, int) {
|
||||
static bool apply(Map map, HeapObject obj, int offset, int) {
|
||||
return BodyDescriptor::IsValidSlot(map, obj, offset);
|
||||
}
|
||||
};
|
||||
@ -3811,7 +3815,7 @@ struct CallIsValidSlot {
|
||||
bool HeapObject::IsValidSlot(Map map, int offset) {
|
||||
DCHECK_NE(0, offset);
|
||||
return BodyDescriptorApply<CallIsValidSlot, bool>(map->instance_type(), map,
|
||||
this, offset, 0);
|
||||
*this, offset, 0);
|
||||
}
|
||||
|
||||
String JSReceiver::class_name() {
|
||||
@ -3895,11 +3899,11 @@ bool HeapObject::CanBeRehashed() const {
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
return true;
|
||||
case SMALL_ORDERED_HASH_MAP_TYPE:
|
||||
return SmallOrderedHashMap::cast(this)->NumberOfElements() == 0;
|
||||
return SmallOrderedHashMap::cast(*this)->NumberOfElements() == 0;
|
||||
case SMALL_ORDERED_HASH_SET_TYPE:
|
||||
return SmallOrderedHashMap::cast(this)->NumberOfElements() == 0;
|
||||
return SmallOrderedHashMap::cast(*this)->NumberOfElements() == 0;
|
||||
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
|
||||
return SmallOrderedNameDictionary::cast(this)->NumberOfElements() == 0;
|
||||
return SmallOrderedNameDictionary::cast(*this)->NumberOfElements() == 0;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@ -3912,35 +3916,35 @@ void HeapObject::RehashBasedOnMap(Isolate* isolate) {
|
||||
UNREACHABLE();
|
||||
break;
|
||||
case NAME_DICTIONARY_TYPE:
|
||||
NameDictionary::cast(this)->Rehash(isolate);
|
||||
NameDictionary::cast(*this)->Rehash(isolate);
|
||||
break;
|
||||
case GLOBAL_DICTIONARY_TYPE:
|
||||
GlobalDictionary::cast(this)->Rehash(isolate);
|
||||
GlobalDictionary::cast(*this)->Rehash(isolate);
|
||||
break;
|
||||
case NUMBER_DICTIONARY_TYPE:
|
||||
NumberDictionary::cast(this)->Rehash(isolate);
|
||||
NumberDictionary::cast(*this)->Rehash(isolate);
|
||||
break;
|
||||
case SIMPLE_NUMBER_DICTIONARY_TYPE:
|
||||
SimpleNumberDictionary::cast(this)->Rehash(isolate);
|
||||
SimpleNumberDictionary::cast(*this)->Rehash(isolate);
|
||||
break;
|
||||
case STRING_TABLE_TYPE:
|
||||
StringTable::cast(this)->Rehash(isolate);
|
||||
StringTable::cast(*this)->Rehash(isolate);
|
||||
break;
|
||||
case DESCRIPTOR_ARRAY_TYPE:
|
||||
DCHECK_LE(1, DescriptorArray::cast(this)->number_of_descriptors());
|
||||
DescriptorArray::cast(this)->Sort();
|
||||
DCHECK_LE(1, DescriptorArray::cast(*this)->number_of_descriptors());
|
||||
DescriptorArray::cast(*this)->Sort();
|
||||
break;
|
||||
case TRANSITION_ARRAY_TYPE:
|
||||
TransitionArray::cast(this)->Sort();
|
||||
TransitionArray::cast(*this)->Sort();
|
||||
break;
|
||||
case SMALL_ORDERED_HASH_MAP_TYPE:
|
||||
DCHECK_EQ(0, SmallOrderedHashMap::cast(this)->NumberOfElements());
|
||||
DCHECK_EQ(0, SmallOrderedHashMap::cast(*this)->NumberOfElements());
|
||||
break;
|
||||
case SMALL_ORDERED_HASH_SET_TYPE:
|
||||
DCHECK_EQ(0, SmallOrderedHashSet::cast(this)->NumberOfElements());
|
||||
DCHECK_EQ(0, SmallOrderedHashSet::cast(*this)->NumberOfElements());
|
||||
break;
|
||||
case SMALL_ORDERED_NAME_DICTIONARY_TYPE:
|
||||
DCHECK_EQ(0, SmallOrderedNameDictionary::cast(this)->NumberOfElements());
|
||||
DCHECK_EQ(0, SmallOrderedNameDictionary::cast(*this)->NumberOfElements());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@ -4057,7 +4061,7 @@ FieldType Map::UnwrapFieldType(MaybeObject wrapped_type) {
|
||||
if (wrapped_type->IsCleared()) {
|
||||
return FieldType::None();
|
||||
}
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (wrapped_type->GetHeapObjectIfWeak(&heap_object)) {
|
||||
return FieldType::cast(heap_object);
|
||||
}
|
||||
@ -6622,7 +6626,7 @@ MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map,
|
||||
PropertyNormalizationMode mode) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
MaybeObject value = WeakFixedArray::Get(GetIndex(fast_map));
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (!value->GetHeapObjectIfWeak(&heap_object)) {
|
||||
return MaybeHandle<Map>();
|
||||
}
|
||||
@ -6896,7 +6900,7 @@ Handle<NumberDictionary> JSObject::NormalizeElements(Handle<JSObject> object) {
|
||||
|
||||
namespace {
|
||||
|
||||
Object* SetHashAndUpdateProperties(HeapObject* properties, int hash) {
|
||||
Object* SetHashAndUpdateProperties(HeapObject properties, int hash) {
|
||||
DCHECK_NE(PropertyArray::kNoHashSentinel, hash);
|
||||
DCHECK(PropertyArray::HashField::is_valid(hash));
|
||||
|
||||
@ -6957,13 +6961,13 @@ void JSReceiver::SetIdentityHash(int hash) {
|
||||
DCHECK_NE(PropertyArray::kNoHashSentinel, hash);
|
||||
DCHECK(PropertyArray::HashField::is_valid(hash));
|
||||
|
||||
HeapObject* existing_properties = HeapObject::cast(raw_properties_or_hash());
|
||||
HeapObject existing_properties = HeapObject::cast(raw_properties_or_hash());
|
||||
Object* new_properties =
|
||||
SetHashAndUpdateProperties(existing_properties, hash);
|
||||
set_raw_properties_or_hash(new_properties);
|
||||
}
|
||||
|
||||
void JSReceiver::SetProperties(HeapObject* properties) {
|
||||
void JSReceiver::SetProperties(HeapObject properties) {
|
||||
DCHECK_IMPLIES(properties->IsPropertyArray() &&
|
||||
PropertyArray::cast(properties)->length() == 0,
|
||||
properties == GetReadOnlyRoots().empty_property_array());
|
||||
@ -10309,7 +10313,7 @@ Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
|
||||
if (!key->IsPrivate()) {
|
||||
int mask = DONT_DELETE | DONT_ENUM;
|
||||
// READ_ONLY is an invalid attribute for JS setters/getters.
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (details.kind() != kAccessor ||
|
||||
!(value_or_field_type->GetHeapObjectIfStrong(&heap_object) &&
|
||||
heap_object->IsAccessorPair())) {
|
||||
@ -10478,7 +10482,7 @@ void FixedArray::CopyTo(int pos, FixedArray dest, int dest_pos, int len) const {
|
||||
}
|
||||
}
|
||||
|
||||
void JSObject::PrototypeRegistryCompactionCallback(HeapObject* value,
|
||||
void JSObject::PrototypeRegistryCompactionCallback(HeapObject value,
|
||||
int old_index,
|
||||
int new_index) {
|
||||
DCHECK(value->IsMap() && Map::cast(value)->is_prototype_map());
|
||||
@ -10693,7 +10697,7 @@ WeakArrayList PrototypeUsers::Compact(Handle<WeakArrayList> array, Heap* heap,
|
||||
int copy_to = kFirstIndex;
|
||||
for (int i = kFirstIndex; i < array->length(); i++) {
|
||||
MaybeObject element = array->Get(i);
|
||||
HeapObject* value;
|
||||
HeapObject value;
|
||||
if (element->GetHeapObjectIfWeak(&value)) {
|
||||
callback(value, i, copy_to);
|
||||
new_array->Set(copy_to++, element);
|
||||
@ -10776,7 +10780,7 @@ Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
|
||||
}
|
||||
|
||||
void DescriptorArray::Initialize(EnumCache enum_cache,
|
||||
HeapObject* undefined_value,
|
||||
HeapObject undefined_value,
|
||||
int nof_descriptors, int slack) {
|
||||
DCHECK_GE(nof_descriptors, 0);
|
||||
DCHECK_GE(slack, 0);
|
||||
@ -13012,7 +13016,7 @@ void InvalidatePrototypeChainsInternal(Map map) {
|
||||
// For now, only maps register themselves as users.
|
||||
for (int i = PrototypeUsers::kFirstIndex; i < prototype_users->length();
|
||||
++i) {
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (prototype_users->Get(i)->GetHeapObjectIfWeak(&heap_object) &&
|
||||
heap_object->IsMap()) {
|
||||
// Walk the prototype chain (backwards, towards leaf objects) if
|
||||
@ -13922,7 +13926,7 @@ MaybeHandle<SharedFunctionInfo> Script::FindSharedFunctionInfo(
|
||||
// triggers the mismatch.
|
||||
CHECK_LT(fun->function_literal_id(), shared_function_infos()->length());
|
||||
MaybeObject shared = shared_function_infos()->Get(fun->function_literal_id());
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (!shared->GetHeapObject(&heap_object) ||
|
||||
heap_object->IsUndefined(isolate)) {
|
||||
return MaybeHandle<SharedFunctionInfo>();
|
||||
@ -14002,7 +14006,7 @@ SharedFunctionInfo::ScriptIterator::ScriptIterator(
|
||||
SharedFunctionInfo SharedFunctionInfo::ScriptIterator::Next() {
|
||||
while (index_ < shared_function_infos_->length()) {
|
||||
MaybeObject raw = shared_function_infos_->Get(index_++);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (!raw->GetHeapObject(&heap_object) ||
|
||||
heap_object->IsUndefined(isolate_)) {
|
||||
continue;
|
||||
@ -14023,11 +14027,11 @@ SharedFunctionInfo::GlobalIterator::GlobalIterator(Isolate* isolate)
|
||||
sfi_iterator_(isolate, script_iterator_.Next()) {}
|
||||
|
||||
SharedFunctionInfo SharedFunctionInfo::GlobalIterator::Next() {
|
||||
HeapObject* next = noscript_sfi_iterator_.Next();
|
||||
if (next != nullptr) return SharedFunctionInfo::cast(next);
|
||||
HeapObject next = noscript_sfi_iterator_.Next();
|
||||
if (!next.is_null()) return SharedFunctionInfo::cast(next);
|
||||
for (;;) {
|
||||
next = sfi_iterator_.Next();
|
||||
if (next != nullptr) return SharedFunctionInfo::cast(next);
|
||||
if (!next.is_null()) return SharedFunctionInfo::cast(next);
|
||||
Script next_script = script_iterator_.Next();
|
||||
if (next_script.is_null()) return SharedFunctionInfo();
|
||||
sfi_iterator_.Reset(next_script);
|
||||
@ -14058,7 +14062,7 @@ void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
|
||||
#ifdef DEBUG
|
||||
DCHECK_LT(function_literal_id, list->length());
|
||||
MaybeObject maybe_object = list->Get(function_literal_id);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (maybe_object->GetHeapObjectIfWeak(&heap_object)) {
|
||||
DCHECK_EQ(heap_object, *shared);
|
||||
}
|
||||
@ -14077,8 +14081,8 @@ void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
|
||||
#ifdef DEBUG
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
WeakArrayList::Iterator iterator(*list);
|
||||
HeapObject* next;
|
||||
while ((next = iterator.Next()) != nullptr) {
|
||||
for (HeapObject next = iterator.Next(); !next.is_null();
|
||||
next = iterator.Next()) {
|
||||
DCHECK_NE(next, *shared);
|
||||
}
|
||||
}
|
||||
@ -14098,7 +14102,7 @@ void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
|
||||
if (function_literal_id < infos->length()) {
|
||||
MaybeObject raw =
|
||||
old_script->shared_function_infos()->Get(function_literal_id);
|
||||
HeapObject* heap_object;
|
||||
HeapObject heap_object;
|
||||
if (raw->GetHeapObjectIfWeak(&heap_object) && heap_object == *shared) {
|
||||
old_script->shared_function_infos()->Set(
|
||||
function_literal_id, HeapObjectReference::Strong(
|
||||
@ -14157,12 +14161,12 @@ bool SharedFunctionInfo::HasSourceCode() const {
|
||||
}
|
||||
|
||||
void SharedFunctionInfo::DiscardCompiledMetadata(
|
||||
Isolate* isolate, std::function<void(HeapObjectPtr object, ObjectSlot slot,
|
||||
HeapObjectPtr target)>
|
||||
gc_notify_updated_slot) {
|
||||
Isolate* isolate,
|
||||
std::function<void(HeapObject object, ObjectSlot slot, HeapObject target)>
|
||||
gc_notify_updated_slot) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
if (is_compiled()) {
|
||||
HeapObjectPtr outer_scope_info;
|
||||
HeapObject outer_scope_info;
|
||||
if (scope_info()->HasOuterScopeInfo()) {
|
||||
outer_scope_info = scope_info()->OuterScopeInfo();
|
||||
} else {
|
||||
@ -14610,7 +14614,7 @@ void ObjectVisitor::VisitRelocInfo(RelocIterator* it) {
|
||||
}
|
||||
|
||||
void Code::ClearEmbeddedObjects(Heap* heap) {
|
||||
HeapObject* undefined = ReadOnlyRoots(heap).undefined_value();
|
||||
HeapObject undefined = ReadOnlyRoots(heap).undefined_value();
|
||||
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
||||
for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) {
|
||||
RelocInfo::Mode mode = it.rinfo()->rmode();
|
||||
@ -16638,7 +16642,7 @@ Handle<Object> JSPromise::TriggerPromiseReactions(Isolate* isolate,
|
||||
PromiseFulfillReactionJobTask::kPromiseOrCapabilityOffset));
|
||||
} else {
|
||||
DisallowHeapAllocation no_gc;
|
||||
HeapObject* handler = reaction->reject_handler();
|
||||
HeapObject handler = reaction->reject_handler();
|
||||
task->synchronized_set_map(
|
||||
ReadOnlyRoots(isolate).promise_reject_reaction_job_task_map());
|
||||
Handle<PromiseRejectReactionJobTask>::cast(task)->set_argument(*argument);
|
||||
|
@ -537,7 +537,7 @@ typedef ByteArray ByteArrayArgType;
|
||||
typedef FixedArray FixedArrayArgType;
|
||||
typedef FixedDoubleArray FixedDoubleArrayArgType;
|
||||
typedef Foreign ForeignArgType;
|
||||
typedef HeapObject* HeapObjectArgType;
|
||||
typedef HeapObject HeapObjectArgType;
|
||||
typedef JSArray JSArrayArgType;
|
||||
typedef JSAsyncGeneratorObject JSAsyncGeneratorObjectArgType;
|
||||
typedef JSFunction JSFunctionArgType;
|
||||
@ -1013,10 +1013,10 @@ class MapWord {
|
||||
inline bool IsForwardingAddress() const;
|
||||
|
||||
// Create a map word from a forwarding address.
|
||||
static inline MapWord FromForwardingAddress(HeapObject* object);
|
||||
static inline MapWord FromForwardingAddress(HeapObject object);
|
||||
|
||||
// View this map word as a forwarding address.
|
||||
inline HeapObject* ToForwardingAddress();
|
||||
inline HeapObject ToForwardingAddress();
|
||||
|
||||
static inline MapWord FromRawValue(uintptr_t value) {
|
||||
return MapWord(value);
|
||||
@ -1029,7 +1029,6 @@ class MapWord {
|
||||
private:
|
||||
// HeapObject calls the private constructor and directly reads the value.
|
||||
friend class HeapObject;
|
||||
friend class HeapObjectPtr;
|
||||
|
||||
explicit MapWord(Address value) : value_(value) {}
|
||||
|
||||
|
@ -22,7 +22,7 @@ class ValueSerializer;
|
||||
|
||||
// BigIntBase is just the raw data object underlying a BigInt. Use with care!
|
||||
// Most code should be using BigInts instead.
|
||||
class BigIntBase : public HeapObjectPtr {
|
||||
class BigIntBase : public HeapObject {
|
||||
public:
|
||||
inline int length() const {
|
||||
int32_t bitfield = RELAXED_READ_INT32_FIELD(this, kBitfieldOffset);
|
||||
@ -96,7 +96,7 @@ class BigIntBase : public HeapObjectPtr {
|
||||
// Only serves to make macros happy; other code should use IsBigInt.
|
||||
bool IsBigIntBase() const { return true; }
|
||||
|
||||
OBJECT_CONSTRUCTORS(BigIntBase, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(BigIntBase, HeapObject);
|
||||
};
|
||||
|
||||
class FreshlyAllocatedBigInt : public BigIntBase {
|
||||
|
@ -15,7 +15,7 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(Cell, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(Cell, HeapObject)
|
||||
|
||||
CAST_ACCESSOR2(Cell)
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class Cell : public HeapObjectPtr {
|
||||
class Cell : public HeapObject {
|
||||
public:
|
||||
// [value]: value of the cell.
|
||||
DECL_ACCESSORS(value, Object)
|
||||
@ -35,7 +35,7 @@ class Cell : public HeapObjectPtr {
|
||||
typedef FixedBodyDescriptor<kValueOffset, kValueOffset + kPointerSize, kSize>
|
||||
BodyDescriptor;
|
||||
|
||||
OBJECT_CONSTRUCTORS(Cell, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(Cell, HeapObject);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -24,9 +24,9 @@ namespace internal {
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData, FixedArray)
|
||||
OBJECT_CONSTRUCTORS_IMPL(BytecodeArray, FixedArrayBase)
|
||||
OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObject)
|
||||
OBJECT_CONSTRUCTORS_IMPL(DependentCode, WeakFixedArray)
|
||||
OBJECT_CONSTRUCTORS_IMPL(CodeDataContainer, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(CodeDataContainer, HeapObject)
|
||||
OBJECT_CONSTRUCTORS_IMPL(SourcePositionTableWithFrameCache, Tuple2)
|
||||
|
||||
NEVER_READ_ONLY_SPACE_IMPL(AbstractCode)
|
||||
@ -190,7 +190,7 @@ void DependentCode::copy(int from, int to) {
|
||||
Set(kCodesStartIndex + to, Get(kCodesStartIndex + from));
|
||||
}
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(Code, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(Code, HeapObject)
|
||||
NEVER_READ_ONLY_SPACE_IMPL(Code)
|
||||
|
||||
INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
|
||||
@ -594,7 +594,7 @@ Code Code::GetCodeFromTargetAddress(Address address) {
|
||||
CHECK(address < start || address >= end);
|
||||
}
|
||||
|
||||
HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
|
||||
HeapObject code = HeapObject::FromAddress(address - Code::kHeaderSize);
|
||||
// Unchecked cast because we can't rely on the map currently
|
||||
// not being a forwarding pointer.
|
||||
return Code::unchecked_cast(code);
|
||||
@ -602,7 +602,7 @@ Code Code::GetCodeFromTargetAddress(Address address) {
|
||||
|
||||
Code Code::GetObjectFromEntryAddress(Address location_of_address) {
|
||||
Address code_entry = Memory<Address>(location_of_address);
|
||||
HeapObject* code = HeapObject::FromAddress(code_entry - Code::kHeaderSize);
|
||||
HeapObject code = HeapObject::FromAddress(code_entry - Code::kHeaderSize);
|
||||
// Unchecked cast because we can't rely on the map currently
|
||||
// not being a forwarding pointer.
|
||||
return Code::unchecked_cast(code);
|
||||
@ -612,11 +612,11 @@ bool Code::CanContainWeakObjects() {
|
||||
return is_optimized_code() && can_have_weak_objects();
|
||||
}
|
||||
|
||||
bool Code::IsWeakObject(HeapObject* object) {
|
||||
bool Code::IsWeakObject(HeapObject object) {
|
||||
return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
|
||||
}
|
||||
|
||||
bool Code::IsWeakObjectInOptimizedCode(HeapObject* object) {
|
||||
bool Code::IsWeakObjectInOptimizedCode(HeapObject object) {
|
||||
Map map = object->synchronized_map();
|
||||
InstanceType instance_type = map->instance_type();
|
||||
if (InstanceTypeChecker::IsMap(instance_type)) {
|
||||
|
@ -28,7 +28,7 @@ class Register;
|
||||
}
|
||||
|
||||
// Code describes objects with on-the-fly generated machine code.
|
||||
class Code : public HeapObjectPtr {
|
||||
class Code : public HeapObject {
|
||||
public:
|
||||
NEVER_READ_ONLY_SPACE
|
||||
// Opaque data type for encapsulating code flags like kind, inline
|
||||
@ -349,9 +349,9 @@ class Code : public HeapObjectPtr {
|
||||
|
||||
inline bool CanContainWeakObjects();
|
||||
|
||||
inline bool IsWeakObject(HeapObject* object);
|
||||
inline bool IsWeakObject(HeapObject object);
|
||||
|
||||
static inline bool IsWeakObjectInOptimizedCode(HeapObject* object);
|
||||
static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
|
||||
|
||||
// Return true if the function is inlined in the code.
|
||||
bool Inlines(SharedFunctionInfo sfi);
|
||||
@ -456,7 +456,7 @@ class Code : public HeapObjectPtr {
|
||||
bool is_promise_rejection() const;
|
||||
bool is_exception_caught() const;
|
||||
|
||||
OBJECT_CONSTRUCTORS(Code, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(Code, HeapObject);
|
||||
};
|
||||
|
||||
class Code::OptimizedCodeIterator {
|
||||
@ -478,7 +478,7 @@ class Code::OptimizedCodeIterator {
|
||||
// pages within the heap, its header fields need to be immutable. There always
|
||||
// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
|
||||
// field {Code::code_data_container} itself is immutable.
|
||||
class CodeDataContainer : public HeapObjectPtr {
|
||||
class CodeDataContainer : public HeapObject {
|
||||
public:
|
||||
NEVER_READ_ONLY_SPACE
|
||||
DECL_ACCESSORS(next_code_link, Object)
|
||||
@ -511,10 +511,10 @@ class CodeDataContainer : public HeapObjectPtr {
|
||||
|
||||
class BodyDescriptor;
|
||||
|
||||
OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
|
||||
};
|
||||
|
||||
class AbstractCode : public HeapObjectPtr {
|
||||
class AbstractCode : public HeapObject {
|
||||
public:
|
||||
NEVER_READ_ONLY_SPACE
|
||||
// All code kinds and INTERPRETED_FUNCTION.
|
||||
@ -586,7 +586,7 @@ class AbstractCode : public HeapObjectPtr {
|
||||
// nesting that is deeper than 5 levels into account.
|
||||
static const int kMaxLoopNestingMarker = 6;
|
||||
|
||||
OBJECT_CONSTRUCTORS(AbstractCode, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS(AbstractCode, HeapObject)
|
||||
};
|
||||
|
||||
// Dependent code is a singly linked list of weak fixed arrays. Each array
|
||||
|
@ -23,7 +23,7 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
OBJECT_CONSTRUCTORS_IMPL(DescriptorArray, HeapObjectPtr)
|
||||
OBJECT_CONSTRUCTORS_IMPL(DescriptorArray, HeapObject)
|
||||
OBJECT_CONSTRUCTORS_IMPL(EnumCache, Tuple2)
|
||||
|
||||
CAST_ACCESSOR2(DescriptorArray)
|
||||
|
@ -53,7 +53,7 @@ class EnumCache : public Tuple2 {
|
||||
// The "value" fields store either values or field types. A field type is either
|
||||
// FieldType::None(), FieldType::Any() or a weak reference to a Map. All other
|
||||
// references are strong.
|
||||
class DescriptorArray : public HeapObjectPtr {
|
||||
class DescriptorArray : public HeapObject {
|
||||
public:
|
||||
DECL_INT16_ACCESSORS(number_of_all_descriptors)
|
||||
DECL_INT16_ACCESSORS(number_of_descriptors)
|
||||
@ -127,7 +127,7 @@ class DescriptorArray : public HeapObjectPtr {
|
||||
Isolate* isolate, int nof_descriptors, int slack,
|
||||
PretenureFlag pretenure = NOT_TENURED);
|
||||
|
||||
void Initialize(EnumCache enum_cache, HeapObject* undefined_value,
|
||||
void Initialize(EnumCache enum_cache, HeapObject undefined_value,
|
||||
int nof_descriptors, int slack);
|
||||
|
||||
DECL_CAST2(DescriptorArray)
|
||||
@ -226,7 +226,7 @@ class DescriptorArray : public HeapObjectPtr {
|
||||
// Swap first and second descriptor.
|
||||
inline void SwapSortedKeys(int first, int second);
|
||||
|
||||
OBJECT_CONSTRUCTORS(DescriptorArray, HeapObjectPtr);
|
||||
OBJECT_CONSTRUCTORS(DescriptorArray, HeapObject);
|
||||
};
|
||||
|
||||
class NumberOfMarkedDescriptors {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user