[cleanup] Remove unnecessary static functions from heap-object.h

Change-Id: I626340e61e9c1a46e0d1c882cc4fa86454d93e8a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1526013
Commit-Queue: Mythri Alle <mythria@chromium.org>
Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60293}
This commit is contained in:
Maciej Goszczycki 2019-03-15 18:48:54 +00:00 committed by Commit Bot
parent a4ea15575f
commit 34102d9b7f
14 changed files with 42 additions and 64 deletions

View File

@ -228,8 +228,7 @@ class ConcurrentMarkingVisitor final
if (marking_state_.IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the
// VisitJSObjectSubclass above didn't visit it.
ObjectSlot slot =
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
MarkCompactCollector::RecordSlot(weak_ref, slot, target);
} else {
// JSWeakRef points to a potentially dead object. We have to process
@ -251,8 +250,7 @@ class ConcurrentMarkingVisitor final
if (marking_state_.IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody above
// didn't visit it.
ObjectSlot slot =
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
MarkCompactCollector::RecordSlot(weak_cell, slot, target);
} else {
// WeakCell points to a potentially dead object. We have to process
@ -331,8 +329,7 @@ class ConcurrentMarkingVisitor final
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
int end = Min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, HeapObject::RawField(object, start),
HeapObject::RawField(object, end));
VisitPointers(object, object.RawField(start), object.RawField(end));
chunk->set_progress_bar(end);
if (end < size) {
// The object can be pushed back onto the marking worklist only after

View File

@ -2721,9 +2721,9 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
// Remove recorded slots for the new map and length offset.
ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
ClearRecordedSlot(new_object, HeapObject::RawField(
new_object, FixedArrayBase::kLengthOffset));
ClearRecordedSlot(new_object, new_object.RawField(0));
ClearRecordedSlot(new_object,
new_object.RawField(FixedArrayBase::kLengthOffset));
// Handle invalidated old-to-old slots.
if (incremental_marking()->IsCompacting() &&
@ -2737,7 +2737,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
// we need pointer granularity writes to avoid race with the concurrent
// marking.
if (filler->Size() > FreeSpace::kSize) {
MemsetTagged(HeapObject::RawField(filler, FreeSpace::kSize),
MemsetTagged(filler.RawField(FreeSpace::kSize),
ReadOnlyRoots(this).undefined_value(),
(filler->Size() - FreeSpace::kSize) / kTaggedSize);
}

View File

@ -90,8 +90,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
collector_->AddBytecodeFlushingCandidate(shared_info);
} else {
VisitPointer(shared_info,
HeapObject::RawField(shared_info,
SharedFunctionInfo::kFunctionDataOffset));
shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
}
return size;
}
@ -249,8 +248,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the JSWeakRef, since the IterateBody below
// won't visit it.
ObjectSlot slot =
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
collector_->RecordSlot(weak_ref, slot, target);
} else {
// JSWeakRef points to a potentially dead object. We have to process
@ -272,8 +270,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
if (marking_state()->IsBlackOrGrey(target)) {
// Record the slot inside the WeakCell, since the IterateBody below
// won't visit it.
ObjectSlot slot =
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
collector_->RecordSlot(weak_cell, slot, target);
} else {
// WeakCell points to a potentially dead object. We have to process
@ -404,8 +401,7 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
int end = Min(size, start + kProgressBarScanningChunk);
if (start < end) {
VisitPointers(object, HeapObject::RawField(object, start),
HeapObject::RawField(object, end));
VisitPointers(object, object.RawField(start), object.RawField(end));
chunk->set_progress_bar(end);
if (end < size) {
DCHECK(marking_state()->IsBlack(object));

View File

@ -2075,8 +2075,8 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() {
// Now record the slot, which has either been updated to an uncompiled data,
// or is the BytecodeArray which is still alive.
ObjectSlot slot = HeapObject::RawField(
flushing_candidate, SharedFunctionInfo::kFunctionDataOffset);
ObjectSlot slot =
flushing_candidate.RawField(SharedFunctionInfo::kFunctionDataOffset);
RecordSlot(flushing_candidate, slot, HeapObject::cast(*slot));
}
}
@ -2292,8 +2292,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value());
} else {
// The value of the JSWeakRef is alive.
ObjectSlot slot =
HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset);
ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
RecordSlot(weak_ref, slot, target);
}
}
@ -2327,8 +2326,7 @@ void MarkCompactCollector::ClearJSWeakRefs() {
DCHECK(finalization_group->scheduled_for_cleanup());
} else {
// The value of the WeakCell is alive.
ObjectSlot slot =
HeapObject::RawField(weak_cell, WeakCell::kTargetOffset);
ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
RecordSlot(weak_cell, slot, HeapObject::cast(*slot));
}
}

View File

@ -51,7 +51,7 @@ Object VisitWeakList(Heap* heap, Object list, WeakObjectRetainer* retainer) {
if (record_slots) {
HeapObject slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
int slot_offset = WeakListVisitor<T>::WeakNextOffset();
ObjectSlot slot = HeapObject::RawField(slot_holder, slot_offset);
ObjectSlot slot = slot_holder.RawField(slot_offset);
MarkCompactCollector::RecordSlot(slot_holder, slot,
HeapObject::cast(retained));
}

View File

@ -119,34 +119,33 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject obj,
int start_offset,
int end_offset,
ObjectVisitor* v) {
v->VisitPointers(obj, HeapObject::RawField(obj, start_offset),
HeapObject::RawField(obj, end_offset));
v->VisitPointers(obj, obj.RawField(start_offset), obj.RawField(end_offset));
}
template <typename ObjectVisitor>
void BodyDescriptorBase::IteratePointer(HeapObject obj, int offset,
ObjectVisitor* v) {
v->VisitPointer(obj, HeapObject::RawField(obj, offset));
v->VisitPointer(obj, obj.RawField(offset));
}
template <typename ObjectVisitor>
DISABLE_CFI_PERF void BodyDescriptorBase::IterateMaybeWeakPointers(
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
v->VisitPointers(obj, HeapObject::RawMaybeWeakField(obj, start_offset),
HeapObject::RawMaybeWeakField(obj, end_offset));
v->VisitPointers(obj, obj.RawMaybeWeakField(start_offset),
obj.RawMaybeWeakField(end_offset));
}
template <typename ObjectVisitor>
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject obj, int offset,
ObjectVisitor* v) {
v->VisitPointer(obj, HeapObject::RawMaybeWeakField(obj, offset));
v->VisitPointer(obj, obj.RawMaybeWeakField(offset));
}
template <typename ObjectVisitor>
DISABLE_CFI_PERF void BodyDescriptorBase::IterateCustomWeakPointers(
HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) {
v->VisitCustomWeakPointers(obj, HeapObject::RawField(obj, start_offset),
HeapObject::RawField(obj, end_offset));
v->VisitCustomWeakPointers(obj, obj.RawField(start_offset),
obj.RawField(end_offset));
}
template <typename ObjectVisitor>
@ -155,14 +154,14 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IterateEphemeron(HeapObject obj,
int key_offset,
int value_offset,
ObjectVisitor* v) {
v->VisitEphemeron(obj, index, HeapObject::RawField(obj, key_offset),
HeapObject::RawField(obj, value_offset));
v->VisitEphemeron(obj, index, obj.RawField(key_offset),
obj.RawField(value_offset));
}
template <typename ObjectVisitor>
void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject obj, int offset,
ObjectVisitor* v) {
v->VisitCustomWeakPointer(obj, HeapObject::RawField(obj, offset));
v->VisitCustomWeakPointer(obj, obj.RawField(offset));
}
class JSObject::BodyDescriptor final : public BodyDescriptorBase {
@ -555,9 +554,8 @@ class Foreign::BodyDescriptor final : public BodyDescriptorBase {
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
v->VisitExternalReference(
Foreign::cast(obj),
reinterpret_cast<Address*>(
HeapObject::RawField(obj, kForeignAddressOffset).address()));
Foreign::cast(obj), reinterpret_cast<Address*>(
obj.RawField(kForeignAddressOffset).address()));
}
static inline int SizeOf(Map map, HeapObject object) { return kSize; }

View File

@ -463,6 +463,7 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) {
}
}
// static
void HeapObject::VerifyHeapPointer(Isolate* isolate, Object p) {
CHECK(p->IsHeapObject());
HeapObject ho = HeapObject::cast(p);

View File

@ -584,18 +584,10 @@ ObjectSlot HeapObject::RawField(int byte_offset) const {
return ObjectSlot(FIELD_ADDR(*this, byte_offset));
}
ObjectSlot HeapObject::RawField(const HeapObject obj, int byte_offset) {
return ObjectSlot(FIELD_ADDR(obj, byte_offset));
}
MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const {
return MaybeObjectSlot(FIELD_ADDR(*this, byte_offset));
}
MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject obj, int byte_offset) {
return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset));
}
MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); }
Map MapWord::ToMap() const { return Map::unchecked_cast(Object(value_)); }
@ -807,6 +799,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(
return GetWriteBarrierModeForObject(*this, &promise);
}
// static
AllocationAlignment HeapObject::RequiredAlignment(Map map) {
#ifdef V8_COMPRESS_POINTERS
// TODO(ishell, v8:8875): Consider using aligned allocations once the

View File

@ -27,15 +27,18 @@ HeapObject::HeapObject(Address ptr, AllowInlineSmiStorage allow_smi)
IsHeapObject());
}
// static
HeapObject HeapObject::FromAddress(Address address) {
DCHECK_TAG_ALIGNED(address);
return HeapObject(address + kHeapObjectTag);
}
// static
Heap* NeverReadOnlySpaceObject::GetHeap(const HeapObject object) {
return GetHeapFromWritableObject(object);
}
// static
Isolate* NeverReadOnlySpaceObject::GetIsolate(const HeapObject object) {
return Isolate::FromHeap(GetHeap(object));
}

View File

@ -131,9 +131,7 @@ class HeapObject : public Object {
// Does not invoke write barrier, so should only be assigned to
// during marking GC.
inline ObjectSlot RawField(int byte_offset) const;
static inline ObjectSlot RawField(const HeapObject obj, int offset);
inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
static inline MaybeObjectSlot RawMaybeWeakField(HeapObject obj, int offset);
DECL_CAST(HeapObject)

View File

@ -2811,12 +2811,10 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
index, MutableHeapNumber::cast(value)->value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot.
heap->ClearRecordedSlot(*object,
HeapObject::RawField(*object, index.offset()));
heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
} else {
#ifdef DEBUG
heap->VerifyClearedSlot(*object,
HeapObject::RawField(*object, index.offset()));
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
#endif
}
} else {

View File

@ -1517,11 +1517,8 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
MaybeObject::FromObject(Smi::kZero));
STATIC_ASSERT(kDescriptorsOffset ==
kTransitionsOrPrototypeInfoOffset + kTaggedSize);
DCHECK_EQ(
0,
memcmp(
HeapObject::RawField(*fresh, kDescriptorsOffset).ToVoidPtr(),
HeapObject::RawField(*new_map, kDescriptorsOffset).ToVoidPtr(),
DCHECK_EQ(0, memcmp(fresh->RawField(kDescriptorsOffset).ToVoidPtr(),
new_map->RawField(kDescriptorsOffset).ToVoidPtr(),
kDependentCodeOffset - kDescriptorsOffset));
} else {
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),

View File

@ -657,9 +657,8 @@ class IndexedReferencesExtractor : public ObjectVisitor {
HeapEntry* parent)
: generator_(generator),
parent_obj_(parent_obj),
parent_start_(HeapObject::RawMaybeWeakField(parent_obj_, 0)),
parent_end_(
HeapObject::RawMaybeWeakField(parent_obj_, parent_obj_->Size())),
parent_start_(parent_obj_.RawMaybeWeakField(0)),
parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_->Size())),
parent_(parent),
next_index_(0) {}
void VisitPointers(HeapObject host, ObjectSlot start,

View File

@ -128,8 +128,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
// Slot clearing is the reason why this entire function cannot currently
// be implemented in the DeleteProperty stub.
if (index.is_inobject() && !map->IsUnboxedDoubleField(index)) {
isolate->heap()->ClearRecordedSlot(
*receiver, HeapObject::RawField(*receiver, index.offset()));
isolate->heap()->ClearRecordedSlot(*receiver,
receiver->RawField(index.offset()));
}
}
}