[cleanup] Remove unnecessary static functions from heap-object.h
Change-Id: I626340e61e9c1a46e0d1c882cc4fa86454d93e8a Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1526013 Commit-Queue: Mythri Alle <mythria@chromium.org> Commit-Queue: Maciej Goszczycki <goszczycki@google.com> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#60293}
This commit is contained in:
parent
a4ea15575f
commit
34102d9b7f
@ -228,8 +228,7 @@ class ConcurrentMarkingVisitor final
|
|||||||
if (marking_state_.IsBlackOrGrey(target)) {
|
if (marking_state_.IsBlackOrGrey(target)) {
|
||||||
// Record the slot inside the JSWeakRef, since the
|
// Record the slot inside the JSWeakRef, since the
|
||||||
// VisitJSObjectSubclass above didn't visit it.
|
// VisitJSObjectSubclass above didn't visit it.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
||||||
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
|
|
||||||
MarkCompactCollector::RecordSlot(weak_ref, slot, target);
|
MarkCompactCollector::RecordSlot(weak_ref, slot, target);
|
||||||
} else {
|
} else {
|
||||||
// JSWeakRef points to a potentially dead object. We have to process
|
// JSWeakRef points to a potentially dead object. We have to process
|
||||||
@ -251,8 +250,7 @@ class ConcurrentMarkingVisitor final
|
|||||||
if (marking_state_.IsBlackOrGrey(target)) {
|
if (marking_state_.IsBlackOrGrey(target)) {
|
||||||
// Record the slot inside the WeakCell, since the IterateBody above
|
// Record the slot inside the WeakCell, since the IterateBody above
|
||||||
// didn't visit it.
|
// didn't visit it.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
||||||
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
|
|
||||||
MarkCompactCollector::RecordSlot(weak_cell, slot, target);
|
MarkCompactCollector::RecordSlot(weak_cell, slot, target);
|
||||||
} else {
|
} else {
|
||||||
// WeakCell points to a potentially dead object. We have to process
|
// WeakCell points to a potentially dead object. We have to process
|
||||||
@ -331,8 +329,7 @@ class ConcurrentMarkingVisitor final
|
|||||||
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
|
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
|
||||||
int end = Min(size, start + kProgressBarScanningChunk);
|
int end = Min(size, start + kProgressBarScanningChunk);
|
||||||
if (start < end) {
|
if (start < end) {
|
||||||
VisitPointers(object, HeapObject::RawField(object, start),
|
VisitPointers(object, object.RawField(start), object.RawField(end));
|
||||||
HeapObject::RawField(object, end));
|
|
||||||
chunk->set_progress_bar(end);
|
chunk->set_progress_bar(end);
|
||||||
if (end < size) {
|
if (end < size) {
|
||||||
// The object can be pushed back onto the marking worklist only after
|
// The object can be pushed back onto the marking worklist only after
|
||||||
|
@ -2721,9 +2721,9 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
|||||||
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
|
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
|
||||||
|
|
||||||
// Remove recorded slots for the new map and length offset.
|
// Remove recorded slots for the new map and length offset.
|
||||||
ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
|
ClearRecordedSlot(new_object, new_object.RawField(0));
|
||||||
ClearRecordedSlot(new_object, HeapObject::RawField(
|
ClearRecordedSlot(new_object,
|
||||||
new_object, FixedArrayBase::kLengthOffset));
|
new_object.RawField(FixedArrayBase::kLengthOffset));
|
||||||
|
|
||||||
// Handle invalidated old-to-old slots.
|
// Handle invalidated old-to-old slots.
|
||||||
if (incremental_marking()->IsCompacting() &&
|
if (incremental_marking()->IsCompacting() &&
|
||||||
@ -2737,7 +2737,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
|||||||
// we need pointer granularity writes to avoid race with the concurrent
|
// we need pointer granularity writes to avoid race with the concurrent
|
||||||
// marking.
|
// marking.
|
||||||
if (filler->Size() > FreeSpace::kSize) {
|
if (filler->Size() > FreeSpace::kSize) {
|
||||||
MemsetTagged(HeapObject::RawField(filler, FreeSpace::kSize),
|
MemsetTagged(filler.RawField(FreeSpace::kSize),
|
||||||
ReadOnlyRoots(this).undefined_value(),
|
ReadOnlyRoots(this).undefined_value(),
|
||||||
(filler->Size() - FreeSpace::kSize) / kTaggedSize);
|
(filler->Size() - FreeSpace::kSize) / kTaggedSize);
|
||||||
}
|
}
|
||||||
|
@ -90,8 +90,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
|
|||||||
collector_->AddBytecodeFlushingCandidate(shared_info);
|
collector_->AddBytecodeFlushingCandidate(shared_info);
|
||||||
} else {
|
} else {
|
||||||
VisitPointer(shared_info,
|
VisitPointer(shared_info,
|
||||||
HeapObject::RawField(shared_info,
|
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
|
||||||
SharedFunctionInfo::kFunctionDataOffset));
|
|
||||||
}
|
}
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
@ -249,8 +248,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
|||||||
if (marking_state()->IsBlackOrGrey(target)) {
|
if (marking_state()->IsBlackOrGrey(target)) {
|
||||||
// Record the slot inside the JSWeakRef, since the IterateBody below
|
// Record the slot inside the JSWeakRef, since the IterateBody below
|
||||||
// won't visit it.
|
// won't visit it.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
||||||
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
|
|
||||||
collector_->RecordSlot(weak_ref, slot, target);
|
collector_->RecordSlot(weak_ref, slot, target);
|
||||||
} else {
|
} else {
|
||||||
// JSWeakRef points to a potentially dead object. We have to process
|
// JSWeakRef points to a potentially dead object. We have to process
|
||||||
@ -272,8 +270,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
|||||||
if (marking_state()->IsBlackOrGrey(target)) {
|
if (marking_state()->IsBlackOrGrey(target)) {
|
||||||
// Record the slot inside the WeakCell, since the IterateBody below
|
// Record the slot inside the WeakCell, since the IterateBody below
|
||||||
// won't visit it.
|
// won't visit it.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
||||||
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
|
|
||||||
collector_->RecordSlot(weak_cell, slot, target);
|
collector_->RecordSlot(weak_cell, slot, target);
|
||||||
} else {
|
} else {
|
||||||
// WeakCell points to a potentially dead object. We have to process
|
// WeakCell points to a potentially dead object. We have to process
|
||||||
@ -404,8 +401,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
|
|||||||
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
|
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
|
||||||
int end = Min(size, start + kProgressBarScanningChunk);
|
int end = Min(size, start + kProgressBarScanningChunk);
|
||||||
if (start < end) {
|
if (start < end) {
|
||||||
VisitPointers(object, HeapObject::RawField(object, start),
|
VisitPointers(object, object.RawField(start), object.RawField(end));
|
||||||
HeapObject::RawField(object, end));
|
|
||||||
chunk->set_progress_bar(end);
|
chunk->set_progress_bar(end);
|
||||||
if (end < size) {
|
if (end < size) {
|
||||||
DCHECK(marking_state()->IsBlack(object));
|
DCHECK(marking_state()->IsBlack(object));
|
||||||
|
@ -2075,8 +2075,8 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
|
|||||||
|
|
||||||
// Now record the slot, which has either been updated to an uncompiled data,
|
// Now record the slot, which has either been updated to an uncompiled data,
|
||||||
// or is the BytecodeArray which is still alive.
|
// or is the BytecodeArray which is still alive.
|
||||||
ObjectSlot slot = HeapObject::RawField(
|
ObjectSlot slot =
|
||||||
flushing_candidate, SharedFunctionInfo::kFunctionDataOffset);
|
flushing_candidate.RawField(SharedFunctionInfo::kFunctionDataOffset);
|
||||||
RecordSlot(flushing_candidate, slot, HeapObject::cast(*slot));
|
RecordSlot(flushing_candidate, slot, HeapObject::cast(*slot));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2292,8 +2292,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
|||||||
weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value());
|
weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value());
|
||||||
} else {
|
} else {
|
||||||
// The value of the JSWeakRef is alive.
|
// The value of the JSWeakRef is alive.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
|
||||||
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
|
|
||||||
RecordSlot(weak_ref, slot, target);
|
RecordSlot(weak_ref, slot, target);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2327,8 +2326,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
|
|||||||
DCHECK(finalization_group->scheduled_for_cleanup());
|
DCHECK(finalization_group->scheduled_for_cleanup());
|
||||||
} else {
|
} else {
|
||||||
// The value of the WeakCell is alive.
|
// The value of the WeakCell is alive.
|
||||||
ObjectSlot slot =
|
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
|
||||||
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
|
|
||||||
RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
|
RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ Object VisitWeakList(Heap* heap, Object list, WeakObjectRetainer* retainer) {
|
|||||||
if (record_slots) {
|
if (record_slots) {
|
||||||
HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
|
HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
|
||||||
int slot_offset = WeakListVisitor<T>::WeakNextOffset();
|
int slot_offset = WeakListVisitor<T>::WeakNextOffset();
|
||||||
ObjectSlot slot = HeapObject::RawField(slot_holder, slot_offset);
|
ObjectSlot slot = slot_holder.RawField(slot_offset);
|
||||||
MarkCompactCollector::RecordSlot(slot_holder, slot,
|
MarkCompactCollector::RecordSlot(slot_holder, slot,
|
||||||
HeapObject::cast(retained));
|
HeapObject::cast(retained));
|
||||||
}
|
}
|
||||||
|
@ -119,34 +119,33 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject obj,
|
|||||||
int start_offset,
|
int start_offset,
|
||||||
int end_offset,
|
int end_offset,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitPointers(obj, HeapObject::RawField(obj, start_offset),
|
v->VisitPointers(obj, obj.RawField(start_offset), obj.RawField(end_offset));
|
||||||
HeapObject::RawField(obj, end_offset));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
void BodyDescriptorBase::IteratePointer(HeapObject obj, int offset,
|
void BodyDescriptorBase::IteratePointer(HeapObject obj, int offset,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitPointer(obj, HeapObject::RawField(obj, offset));
|
v->VisitPointer(obj, obj.RawField(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
DISABLE_CFI_PERF void BodyDescriptorBase::IterateMaybeWeakPointers(
|
DISABLE_CFI_PERF void BodyDescriptorBase::IterateMaybeWeakPointers(
|
||||||
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||||
v->VisitPointers(obj, HeapObject::RawMaybeWeakField(obj, start_offset),
|
v->VisitPointers(obj, obj.RawMaybeWeakField(start_offset),
|
||||||
HeapObject::RawMaybeWeakField(obj, end_offset));
|
obj.RawMaybeWeakField(end_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject obj, int offset,
|
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject obj, int offset,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitPointer(obj, HeapObject::RawMaybeWeakField(obj, offset));
|
v->VisitPointer(obj, obj.RawMaybeWeakField(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
DISABLE_CFI_PERF void BodyDescriptorBase::IterateCustomWeakPointers(
|
DISABLE_CFI_PERF void BodyDescriptorBase::IterateCustomWeakPointers(
|
||||||
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
|
||||||
v->VisitCustomWeakPointers(obj, HeapObject::RawField(obj, start_offset),
|
v->VisitCustomWeakPointers(obj, obj.RawField(start_offset),
|
||||||
HeapObject::RawField(obj, end_offset));
|
obj.RawField(end_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
@ -155,14 +154,14 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IterateEphemeron(HeapObject obj,
|
|||||||
int key_offset,
|
int key_offset,
|
||||||
int value_offset,
|
int value_offset,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitEphemeron(obj, index, HeapObject::RawField(obj, key_offset),
|
v->VisitEphemeron(obj, index, obj.RawField(key_offset),
|
||||||
HeapObject::RawField(obj, value_offset));
|
obj.RawField(value_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ObjectVisitor>
|
template <typename ObjectVisitor>
|
||||||
void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject obj, int offset,
|
void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject obj, int offset,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitCustomWeakPointer(obj, HeapObject::RawField(obj, offset));
|
v->VisitCustomWeakPointer(obj, obj.RawField(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
class JSObject::BodyDescriptor final : public BodyDescriptorBase {
|
class JSObject::BodyDescriptor final : public BodyDescriptorBase {
|
||||||
@ -555,9 +554,8 @@ class Foreign::BodyDescriptor final : public BodyDescriptorBase {
|
|||||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||||
ObjectVisitor* v) {
|
ObjectVisitor* v) {
|
||||||
v->VisitExternalReference(
|
v->VisitExternalReference(
|
||||||
Foreign::cast(obj),
|
Foreign::cast(obj), reinterpret_cast<Address*>(
|
||||||
reinterpret_cast<Address*>(
|
obj.RawField(kForeignAddressOffset).address()));
|
||||||
HeapObject::RawField(obj, kForeignAddressOffset).address()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
static inline int SizeOf(Map map, HeapObject object) { return kSize; }
|
||||||
|
@ -463,6 +463,7 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
void HeapObject::VerifyHeapPointer(Isolate* isolate, Object p) {
|
void HeapObject::VerifyHeapPointer(Isolate* isolate, Object p) {
|
||||||
CHECK(p->IsHeapObject());
|
CHECK(p->IsHeapObject());
|
||||||
HeapObject ho = HeapObject::cast(p);
|
HeapObject ho = HeapObject::cast(p);
|
||||||
|
@ -584,18 +584,10 @@ ObjectSlot HeapObject::RawField(int byte_offset) const {
|
|||||||
return ObjectSlot(FIELD_ADDR(*this, byte_offset));
|
return ObjectSlot(FIELD_ADDR(*this, byte_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectSlot HeapObject::RawField(const HeapObject obj, int byte_offset) {
|
|
||||||
return ObjectSlot(FIELD_ADDR(obj, byte_offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
|
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
|
||||||
return MaybeObjectSlot(FIELD_ADDR(*this, byte_offset));
|
return MaybeObjectSlot(FIELD_ADDR(*this, byte_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject obj, int byte_offset) {
|
|
||||||
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); }
|
MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); }
|
||||||
|
|
||||||
Map MapWord::ToMap() const { return Map::unchecked_cast(Object(value_)); }
|
Map MapWord::ToMap() const { return Map::unchecked_cast(Object(value_)); }
|
||||||
@ -807,6 +799,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(
|
|||||||
return GetWriteBarrierModeForObject(*this, &promise);
|
return GetWriteBarrierModeForObject(*this, &promise);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
AllocationAlignment HeapObject::RequiredAlignment(Map map) {
|
AllocationAlignment HeapObject::RequiredAlignment(Map map) {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
// TODO(ishell, v8:8875): Consider using aligned allocations once the
|
// TODO(ishell, v8:8875): Consider using aligned allocations once the
|
||||||
|
@ -27,15 +27,18 @@ HeapObject::HeapObject(Address ptr, AllowInlineSmiStorage allow_smi)
|
|||||||
IsHeapObject());
|
IsHeapObject());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
HeapObject HeapObject::FromAddress(Address address) {
|
HeapObject HeapObject::FromAddress(Address address) {
|
||||||
DCHECK_TAG_ALIGNED(address);
|
DCHECK_TAG_ALIGNED(address);
|
||||||
return HeapObject(address + kHeapObjectTag);
|
return HeapObject(address + kHeapObjectTag);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
Heap* NeverReadOnlySpaceObject::GetHeap(const HeapObject object) {
|
Heap* NeverReadOnlySpaceObject::GetHeap(const HeapObject object) {
|
||||||
return GetHeapFromWritableObject(object);
|
return GetHeapFromWritableObject(object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
Isolate* NeverReadOnlySpaceObject::GetIsolate(const HeapObject object) {
|
Isolate* NeverReadOnlySpaceObject::GetIsolate(const HeapObject object) {
|
||||||
return Isolate::FromHeap(GetHeap(object));
|
return Isolate::FromHeap(GetHeap(object));
|
||||||
}
|
}
|
||||||
|
@ -131,9 +131,7 @@ class HeapObject : public Object {
|
|||||||
// Does not invoke write barrier, so should only be assigned to
|
// Does not invoke write barrier, so should only be assigned to
|
||||||
// during marking GC.
|
// during marking GC.
|
||||||
inline ObjectSlot RawField(int byte_offset) const;
|
inline ObjectSlot RawField(int byte_offset) const;
|
||||||
static inline ObjectSlot RawField(const HeapObject obj, int offset);
|
|
||||||
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
|
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
|
||||||
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject obj, int offset);
|
|
||||||
|
|
||||||
DECL_CAST(HeapObject)
|
DECL_CAST(HeapObject)
|
||||||
|
|
||||||
|
@ -2811,12 +2811,10 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
|
|||||||
index, MutableHeapNumber::cast(value)->value_as_bits());
|
index, MutableHeapNumber::cast(value)->value_as_bits());
|
||||||
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
|
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
|
||||||
// Transition from tagged to untagged slot.
|
// Transition from tagged to untagged slot.
|
||||||
heap->ClearRecordedSlot(*object,
|
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
|
||||||
HeapObject::RawField(*object, index.offset()));
|
|
||||||
} else {
|
} else {
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
heap->VerifyClearedSlot(*object,
|
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
|
||||||
HeapObject::RawField(*object, index.offset()));
|
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1517,12 +1517,9 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
|
|||||||
MaybeObject::FromObject(Smi::kZero));
|
MaybeObject::FromObject(Smi::kZero));
|
||||||
STATIC_ASSERT(kDescriptorsOffset ==
|
STATIC_ASSERT(kDescriptorsOffset ==
|
||||||
kTransitionsOrPrototypeInfoOffset + kTaggedSize);
|
kTransitionsOrPrototypeInfoOffset + kTaggedSize);
|
||||||
DCHECK_EQ(
|
DCHECK_EQ(0, memcmp(fresh->RawField(kDescriptorsOffset).ToVoidPtr(),
|
||||||
0,
|
new_map->RawField(kDescriptorsOffset).ToVoidPtr(),
|
||||||
memcmp(
|
kDependentCodeOffset - kDescriptorsOffset));
|
||||||
HeapObject::RawField(*fresh, kDescriptorsOffset).ToVoidPtr(),
|
|
||||||
HeapObject::RawField(*new_map, kDescriptorsOffset).ToVoidPtr(),
|
|
||||||
kDependentCodeOffset - kDescriptorsOffset));
|
|
||||||
} else {
|
} else {
|
||||||
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
|
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
|
||||||
reinterpret_cast<void*>(new_map->address()),
|
reinterpret_cast<void*>(new_map->address()),
|
||||||
|
@ -657,9 +657,8 @@ class IndexedReferencesExtractor : public ObjectVisitor {
|
|||||||
HeapEntry* parent)
|
HeapEntry* parent)
|
||||||
: generator_(generator),
|
: generator_(generator),
|
||||||
parent_obj_(parent_obj),
|
parent_obj_(parent_obj),
|
||||||
parent_start_(HeapObject::RawMaybeWeakField(parent_obj_, 0)),
|
parent_start_(parent_obj_.RawMaybeWeakField(0)),
|
||||||
parent_end_(
|
parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_->Size())),
|
||||||
HeapObject::RawMaybeWeakField(parent_obj_, parent_obj_->Size())),
|
|
||||||
parent_(parent),
|
parent_(parent),
|
||||||
next_index_(0) {}
|
next_index_(0) {}
|
||||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||||
|
@ -128,8 +128,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
|
|||||||
// Slot clearing is the reason why this entire function cannot currently
|
// Slot clearing is the reason why this entire function cannot currently
|
||||||
// be implemented in the DeleteProperty stub.
|
// be implemented in the DeleteProperty stub.
|
||||||
if (index.is_inobject() && !map->IsUnboxedDoubleField(index)) {
|
if (index.is_inobject() && !map->IsUnboxedDoubleField(index)) {
|
||||||
isolate->heap()->ClearRecordedSlot(
|
isolate->heap()->ClearRecordedSlot(*receiver,
|
||||||
*receiver, HeapObject::RawField(*receiver, index.offset()));
|
receiver->RawField(index.offset()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user