From 34102d9b7fa55ec3fc0f231899f09706f4675a1b Mon Sep 17 00:00:00 2001 From: Maciej Goszczycki Date: Fri, 15 Mar 2019 18:48:54 +0000 Subject: [PATCH] [cleanup] Remove unnecessary static functions from heap-object.h Change-Id: I626340e61e9c1a46e0d1c882cc4fa86454d93e8a Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1526013 Commit-Queue: Mythri Alle Commit-Queue: Maciej Goszczycki Reviewed-by: Michael Lippautz Cr-Commit-Position: refs/heads/master@{#60293} --- src/heap/concurrent-marking.cc | 9 +++------ src/heap/heap.cc | 8 ++++---- src/heap/mark-compact-inl.h | 12 ++++-------- src/heap/mark-compact.cc | 10 ++++------ src/heap/objects-visiting.cc | 2 +- src/objects-body-descriptors-inl.h | 26 ++++++++++++------------- src/objects-debug.cc | 1 + src/objects-inl.h | 9 +-------- src/objects/heap-object-inl.h | 3 +++ src/objects/heap-object.h | 2 -- src/objects/js-objects.cc | 6 ++---- src/objects/map.cc | 9 +++------ src/profiler/heap-snapshot-generator.cc | 5 ++--- src/runtime/runtime-object.cc | 4 ++-- 14 files changed, 42 insertions(+), 64 deletions(-) diff --git a/src/heap/concurrent-marking.cc b/src/heap/concurrent-marking.cc index e61b8256a7..5262fe3503 100644 --- a/src/heap/concurrent-marking.cc +++ b/src/heap/concurrent-marking.cc @@ -228,8 +228,7 @@ class ConcurrentMarkingVisitor final if (marking_state_.IsBlackOrGrey(target)) { // Record the slot inside the JSWeakRef, since the // VisitJSObjectSubclass above didn't visit it. - ObjectSlot slot = - HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset); + ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset); MarkCompactCollector::RecordSlot(weak_ref, slot, target); } else { // JSWeakRef points to a potentially dead object. We have to process @@ -251,8 +250,7 @@ class ConcurrentMarkingVisitor final if (marking_state_.IsBlackOrGrey(target)) { // Record the slot inside the WeakCell, since the IterateBody above // didn't visit it. - ObjectSlot slot = - HeapObject::RawField(weak_cell, WeakCell::kTargetOffset); + ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset); MarkCompactCollector::RecordSlot(weak_cell, slot, target); } else { // WeakCell points to a potentially dead object. We have to process @@ -331,8 +329,7 @@ class ConcurrentMarkingVisitor final Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); int end = Min(size, start + kProgressBarScanningChunk); if (start < end) { - VisitPointers(object, HeapObject::RawField(object, start), - HeapObject::RawField(object, end)); + VisitPointers(object, object.RawField(start), object.RawField(end)); chunk->set_progress_bar(end); if (end < size) { // The object can be pushed back onto the marking worklist only after diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 9f3f9085c3..aa47300f01 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -2721,9 +2721,9 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object, FixedArrayBase::cast(HeapObject::FromAddress(new_start)); // Remove recorded slots for the new map and length offset. - ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); - ClearRecordedSlot(new_object, HeapObject::RawField( - new_object, FixedArrayBase::kLengthOffset)); + ClearRecordedSlot(new_object, new_object.RawField(0)); + ClearRecordedSlot(new_object, + new_object.RawField(FixedArrayBase::kLengthOffset)); // Handle invalidated old-to-old slots. if (incremental_marking()->IsCompacting() && @@ -2737,7 +2737,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object, // we need pointer granularity writes to avoid race with the concurrent // marking. if (filler->Size() > FreeSpace::kSize) { - MemsetTagged(HeapObject::RawField(filler, FreeSpace::kSize), + MemsetTagged(filler.RawField(FreeSpace::kSize), ReadOnlyRoots(this).undefined_value(), (filler->Size() - FreeSpace::kSize) / kTaggedSize); } diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h index a4fbf06c10..90a3b1db5e 100644 --- a/src/heap/mark-compact-inl.h +++ b/src/heap/mark-compact-inl.h @@ -90,8 +90,7 @@ int MarkingVisitor:: collector_->AddBytecodeFlushingCandidate(shared_info); } else { VisitPointer(shared_info, - HeapObject::RawField(shared_info, - SharedFunctionInfo::kFunctionDataOffset)); + shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset)); } return size; } @@ -249,8 +248,7 @@ int MarkingVisitorIsBlackOrGrey(target)) { // Record the slot inside the JSWeakRef, since the IterateBody below // won't visit it. - ObjectSlot slot = - HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset); + ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset); collector_->RecordSlot(weak_ref, slot, target); } else { // JSWeakRef points to a potentially dead object. We have to process @@ -272,8 +270,7 @@ int MarkingVisitorIsBlackOrGrey(target)) { // Record the slot inside the WeakCell, since the IterateBody below // won't visit it. - ObjectSlot slot = - HeapObject::RawField(weak_cell, WeakCell::kTargetOffset); + ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset); collector_->RecordSlot(weak_cell, slot, target); } else { // WeakCell points to a potentially dead object. We have to process @@ -404,8 +401,7 @@ int MarkingVisitor:: Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar()); int end = Min(size, start + kProgressBarScanningChunk); if (start < end) { - VisitPointers(object, HeapObject::RawField(object, start), - HeapObject::RawField(object, end)); + VisitPointers(object, object.RawField(start), object.RawField(end)); chunk->set_progress_bar(end); if (end < size) { DCHECK(marking_state()->IsBlack(object)); diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index b117765c3c..bbda994ca1 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -2075,8 +2075,8 @@ void MarkCompactCollector::ClearOldBytecodeCandidates() { // Now record the slot, which has either been updated to an uncompiled data, // or is the BytecodeArray which is still alive. - ObjectSlot slot = HeapObject::RawField( - flushing_candidate, SharedFunctionInfo::kFunctionDataOffset); + ObjectSlot slot = + flushing_candidate.RawField(SharedFunctionInfo::kFunctionDataOffset); RecordSlot(flushing_candidate, slot, HeapObject::cast(*slot)); } } @@ -2292,8 +2292,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { weak_ref->set_target(ReadOnlyRoots(isolate()).undefined_value()); } else { // The value of the JSWeakRef is alive. - ObjectSlot slot = - HeapObject::RawField(weak_ref, JSWeakRef::kTargetOffset); + ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset); RecordSlot(weak_ref, slot, target); } } @@ -2327,8 +2326,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { DCHECK(finalization_group->scheduled_for_cleanup()); } else { // The value of the WeakCell is alive. - ObjectSlot slot = - HeapObject::RawField(weak_cell, WeakCell::kTargetOffset); + ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset); RecordSlot(weak_cell, slot, HeapObject::cast(*slot)); } } diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc index 063dae512f..d56dd91da2 100644 --- a/src/heap/objects-visiting.cc +++ b/src/heap/objects-visiting.cc @@ -51,7 +51,7 @@ Object VisitWeakList(Heap* heap, Object list, WeakObjectRetainer* retainer) { if (record_slots) { HeapObject slot_holder = WeakListVisitor::WeakNextHolder(tail); int slot_offset = WeakListVisitor::WeakNextOffset(); - ObjectSlot slot = HeapObject::RawField(slot_holder, slot_offset); + ObjectSlot slot = slot_holder.RawField(slot_offset); MarkCompactCollector::RecordSlot(slot_holder, slot, HeapObject::cast(retained)); } diff --git a/src/objects-body-descriptors-inl.h b/src/objects-body-descriptors-inl.h index 605ea87079..579d71f8d3 100644 --- a/src/objects-body-descriptors-inl.h +++ b/src/objects-body-descriptors-inl.h @@ -119,34 +119,33 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IteratePointers(HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) { - v->VisitPointers(obj, HeapObject::RawField(obj, start_offset), - HeapObject::RawField(obj, end_offset)); + v->VisitPointers(obj, obj.RawField(start_offset), obj.RawField(end_offset)); } template void BodyDescriptorBase::IteratePointer(HeapObject obj, int offset, ObjectVisitor* v) { - v->VisitPointer(obj, HeapObject::RawField(obj, offset)); + v->VisitPointer(obj, obj.RawField(offset)); } template DISABLE_CFI_PERF void BodyDescriptorBase::IterateMaybeWeakPointers( HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) { - v->VisitPointers(obj, HeapObject::RawMaybeWeakField(obj, start_offset), - HeapObject::RawMaybeWeakField(obj, end_offset)); + v->VisitPointers(obj, obj.RawMaybeWeakField(start_offset), + obj.RawMaybeWeakField(end_offset)); } template void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject obj, int offset, ObjectVisitor* v) { - v->VisitPointer(obj, HeapObject::RawMaybeWeakField(obj, offset)); + v->VisitPointer(obj, obj.RawMaybeWeakField(offset)); } template DISABLE_CFI_PERF void BodyDescriptorBase::IterateCustomWeakPointers( HeapObject obj, int start_offset, int end_offset, ObjectVisitor* v) { - v->VisitCustomWeakPointers(obj, HeapObject::RawField(obj, start_offset), - HeapObject::RawField(obj, end_offset)); + v->VisitCustomWeakPointers(obj, obj.RawField(start_offset), + obj.RawField(end_offset)); } template @@ -155,14 +154,14 @@ DISABLE_CFI_PERF void BodyDescriptorBase::IterateEphemeron(HeapObject obj, int key_offset, int value_offset, ObjectVisitor* v) { - v->VisitEphemeron(obj, index, HeapObject::RawField(obj, key_offset), - HeapObject::RawField(obj, value_offset)); + v->VisitEphemeron(obj, index, obj.RawField(key_offset), + obj.RawField(value_offset)); } template void BodyDescriptorBase::IterateCustomWeakPointer(HeapObject obj, int offset, ObjectVisitor* v) { - v->VisitCustomWeakPointer(obj, HeapObject::RawField(obj, offset)); + v->VisitCustomWeakPointer(obj, obj.RawField(offset)); } class JSObject::BodyDescriptor final : public BodyDescriptorBase { @@ -555,9 +554,8 @@ class Foreign::BodyDescriptor final : public BodyDescriptorBase { static inline void IterateBody(Map map, HeapObject obj, int object_size, ObjectVisitor* v) { v->VisitExternalReference( - Foreign::cast(obj), - reinterpret_cast( - HeapObject::RawField(obj, kForeignAddressOffset).address())); + Foreign::cast(obj), reinterpret_cast( + obj.RawField(kForeignAddressOffset).address())); } static inline int SizeOf(Map map, HeapObject object) { return kSize; } diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 2ee0195c1d..f11baa1163 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -463,6 +463,7 @@ void HeapObject::HeapObjectVerify(Isolate* isolate) { } } +// static void HeapObject::VerifyHeapPointer(Isolate* isolate, Object p) { CHECK(p->IsHeapObject()); HeapObject ho = HeapObject::cast(p); diff --git a/src/objects-inl.h b/src/objects-inl.h index 6f146241b1..a6ba51deb1 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -584,18 +584,10 @@ ObjectSlot HeapObject::RawField(int byte_offset) const { return ObjectSlot(FIELD_ADDR(*this, byte_offset)); } -ObjectSlot HeapObject::RawField(const HeapObject obj, int byte_offset) { - return ObjectSlot(FIELD_ADDR(obj, byte_offset)); -} - MaybeObjectSlot HeapObject::RawMaybeWeakField(int byte_offset) const { return MaybeObjectSlot(FIELD_ADDR(*this, byte_offset)); } -MaybeObjectSlot HeapObject::RawMaybeWeakField(HeapObject obj, int byte_offset) { - return MaybeObjectSlot(FIELD_ADDR(obj, byte_offset)); -} - MapWord MapWord::FromMap(const Map map) { return MapWord(map.ptr()); } Map MapWord::ToMap() const { return Map::unchecked_cast(Object(value_)); } @@ -807,6 +799,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode( return GetWriteBarrierModeForObject(*this, &promise); } +// static AllocationAlignment HeapObject::RequiredAlignment(Map map) { #ifdef V8_COMPRESS_POINTERS // TODO(ishell, v8:8875): Consider using aligned allocations once the diff --git a/src/objects/heap-object-inl.h b/src/objects/heap-object-inl.h index fbdcb0f6ec..be97f8bb79 100644 --- a/src/objects/heap-object-inl.h +++ b/src/objects/heap-object-inl.h @@ -27,15 +27,18 @@ HeapObject::HeapObject(Address ptr, AllowInlineSmiStorage allow_smi) IsHeapObject()); } +// static HeapObject HeapObject::FromAddress(Address address) { DCHECK_TAG_ALIGNED(address); return HeapObject(address + kHeapObjectTag); } +// static Heap* NeverReadOnlySpaceObject::GetHeap(const HeapObject object) { return GetHeapFromWritableObject(object); } +// static Isolate* NeverReadOnlySpaceObject::GetIsolate(const HeapObject object) { return Isolate::FromHeap(GetHeap(object)); } diff --git a/src/objects/heap-object.h b/src/objects/heap-object.h index a756ecd668..446a927354 100644 --- a/src/objects/heap-object.h +++ b/src/objects/heap-object.h @@ -131,9 +131,7 @@ class HeapObject : public Object { // Does not invoke write barrier, so should only be assigned to // during marking GC. inline ObjectSlot RawField(int byte_offset) const; - static inline ObjectSlot RawField(const HeapObject obj, int offset); inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const; - static inline MaybeObjectSlot RawMaybeWeakField(HeapObject obj, int offset); DECL_CAST(HeapObject) diff --git a/src/objects/js-objects.cc b/src/objects/js-objects.cc index a8dfd66c13..a46b69eedd 100644 --- a/src/objects/js-objects.cc +++ b/src/objects/js-objects.cc @@ -2811,12 +2811,10 @@ void MigrateFastToFast(Handle object, Handle new_map) { index, MutableHeapNumber::cast(value)->value_as_bits()); if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) { // Transition from tagged to untagged slot. - heap->ClearRecordedSlot(*object, - HeapObject::RawField(*object, index.offset())); + heap->ClearRecordedSlot(*object, object->RawField(index.offset())); } else { #ifdef DEBUG - heap->VerifyClearedSlot(*object, - HeapObject::RawField(*object, index.offset())); + heap->VerifyClearedSlot(*object, object->RawField(index.offset())); #endif } } else { diff --git a/src/objects/map.cc b/src/objects/map.cc index 32be60b3b7..4f6452fc1f 100644 --- a/src/objects/map.cc +++ b/src/objects/map.cc @@ -1517,12 +1517,9 @@ Handle Map::Normalize(Isolate* isolate, Handle fast_map, MaybeObject::FromObject(Smi::kZero)); STATIC_ASSERT(kDescriptorsOffset == kTransitionsOrPrototypeInfoOffset + kTaggedSize); - DCHECK_EQ( - 0, - memcmp( - HeapObject::RawField(*fresh, kDescriptorsOffset).ToVoidPtr(), - HeapObject::RawField(*new_map, kDescriptorsOffset).ToVoidPtr(), - kDependentCodeOffset - kDescriptorsOffset)); + DCHECK_EQ(0, memcmp(fresh->RawField(kDescriptorsOffset).ToVoidPtr(), + new_map->RawField(kDescriptorsOffset).ToVoidPtr(), + kDependentCodeOffset - kDescriptorsOffset)); } else { DCHECK_EQ(0, memcmp(reinterpret_cast(fresh->address()), reinterpret_cast(new_map->address()), diff --git a/src/profiler/heap-snapshot-generator.cc b/src/profiler/heap-snapshot-generator.cc index 4d6aa029f7..8be7864b66 100644 --- a/src/profiler/heap-snapshot-generator.cc +++ b/src/profiler/heap-snapshot-generator.cc @@ -657,9 +657,8 @@ class IndexedReferencesExtractor : public ObjectVisitor { HeapEntry* parent) : generator_(generator), parent_obj_(parent_obj), - parent_start_(HeapObject::RawMaybeWeakField(parent_obj_, 0)), - parent_end_( - HeapObject::RawMaybeWeakField(parent_obj_, parent_obj_->Size())), + parent_start_(parent_obj_.RawMaybeWeakField(0)), + parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_->Size())), parent_(parent), next_index_(0) {} void VisitPointers(HeapObject host, ObjectSlot start, diff --git a/src/runtime/runtime-object.cc b/src/runtime/runtime-object.cc index 5a4ca8b7b4..f0fec3e451 100644 --- a/src/runtime/runtime-object.cc +++ b/src/runtime/runtime-object.cc @@ -128,8 +128,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle receiver, // Slot clearing is the reason why this entire function cannot currently // be implemented in the DeleteProperty stub. if (index.is_inobject() && !map->IsUnboxedDoubleField(index)) { - isolate->heap()->ClearRecordedSlot( - *receiver, HeapObject::RawField(*receiver, index.offset())); + isolate->heap()->ClearRecordedSlot(*receiver, + receiver->RawField(index.offset())); } } }