[heap] delete Heap::LeftTrimFixedAray
No more uses left for this code (thank goodness.) R=hpayer@chromium.org BUG= Review URL: https://codereview.chromium.org/1474693003 Cr-Commit-Position: refs/heads/master@{#32305}
This commit is contained in:
parent
d5e5e1e161
commit
dc3442b1ec
@ -696,7 +696,6 @@ DEFINE_IMPLICATION(trace_detached_contexts, track_detached_contexts)
|
||||
#ifdef VERIFY_HEAP
|
||||
DEFINE_BOOL(verify_heap, false, "verify heap pointers before and after GC")
|
||||
#endif
|
||||
DEFINE_BOOL(move_object_start, false, "enable moving of object starts")
|
||||
DEFINE_BOOL(memory_reducer, true, "use memory reducer")
|
||||
DEFINE_BOOL(scavenge_reclaim_unmodified_objects, false,
|
||||
"remove unmodified and unreferenced objects")
|
||||
|
@ -3066,24 +3066,6 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CanMoveObjectStart(HeapObject* object) {
|
||||
if (!FLAG_move_object_start) return false;
|
||||
|
||||
Address address = object->address();
|
||||
|
||||
if (lo_space()->Contains(object)) return false;
|
||||
|
||||
Page* page = Page::FromAddress(address);
|
||||
// We can move the object start if:
|
||||
// (1) the object is not in old space,
|
||||
// (2) the page of the object was already swept,
|
||||
// (3) the page was already concurrently swept. This case is an optimization
|
||||
// for concurrent sweeping. The WasSwept predicate for concurrently swept
|
||||
// pages is set after sweeping all pages.
|
||||
return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted();
|
||||
}
|
||||
|
||||
|
||||
void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
|
||||
if (incremental_marking()->IsMarking() &&
|
||||
Marking::IsBlack(Marking::MarkBitFrom(object->address()))) {
|
||||
@ -3096,55 +3078,6 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
|
||||
}
|
||||
|
||||
|
||||
FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
|
||||
int elements_to_trim) {
|
||||
DCHECK(!object->IsFixedTypedArrayBase());
|
||||
const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
|
||||
const int bytes_to_trim = elements_to_trim * element_size;
|
||||
Map* map = object->map();
|
||||
|
||||
// For now this trick is only applied to objects in new and paged space.
|
||||
// In large object space the object's start must coincide with chunk
|
||||
// and thus the trick is just not applicable.
|
||||
DCHECK(!lo_space()->Contains(object));
|
||||
DCHECK(object->map() != fixed_cow_array_map());
|
||||
|
||||
STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
|
||||
STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
|
||||
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
|
||||
|
||||
const int len = object->length();
|
||||
DCHECK(elements_to_trim <= len);
|
||||
|
||||
// Calculate location of new array start.
|
||||
Address new_start = object->address() + bytes_to_trim;
|
||||
|
||||
// Technically in new space this write might be omitted (except for
|
||||
// debug mode which iterates through the heap), but to play safer
|
||||
// we still do it.
|
||||
CreateFillerObjectAt(object->address(), bytes_to_trim);
|
||||
|
||||
// Initialize header of the trimmed array. Since left trimming is only
|
||||
// performed on pages which are not concurrently swept creating a filler
|
||||
// object does not require synchronization.
|
||||
DCHECK(CanMoveObjectStart(object));
|
||||
Object** former_start = HeapObject::RawField(object, 0);
|
||||
int new_start_index = elements_to_trim * (element_size / kPointerSize);
|
||||
former_start[new_start_index] = map;
|
||||
former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
|
||||
FixedArrayBase* new_object =
|
||||
FixedArrayBase::cast(HeapObject::FromAddress(new_start));
|
||||
|
||||
// Maintain consistency of live bytes during incremental marking
|
||||
Marking::TransferMark(this, object->address(), new_start);
|
||||
AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
|
||||
|
||||
// Notify the heap profiler of change in object layout.
|
||||
OnMoveEvent(new_object, object, new_object->Size());
|
||||
return new_object;
|
||||
}
|
||||
|
||||
|
||||
// Force instantiation of templatized method.
|
||||
template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
|
||||
FixedArrayBase*, int);
|
||||
|
@ -827,15 +827,9 @@ class Heap {
|
||||
// when introducing gaps within pages.
|
||||
void CreateFillerObjectAt(Address addr, int size);
|
||||
|
||||
bool CanMoveObjectStart(HeapObject* object);
|
||||
|
||||
// Maintain consistency of live bytes during incremental marking.
|
||||
void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
|
||||
|
||||
// Trim the given array from the left. Note that this relocates the object
|
||||
// start and hence is only valid if there is only a single reference to it.
|
||||
FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
|
||||
|
||||
// Trim the given array from the right.
|
||||
template<Heap::InvocationMode mode>
|
||||
void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
|
||||
|
@ -594,43 +594,6 @@ bool MarkCompactCollector::IsSweepingCompleted() {
|
||||
}
|
||||
|
||||
|
||||
void Marking::TransferMark(Heap* heap, Address old_start, Address new_start) {
|
||||
// This is only used when resizing an object.
|
||||
DCHECK(MemoryChunk::FromAddress(old_start) ==
|
||||
MemoryChunk::FromAddress(new_start));
|
||||
|
||||
if (!heap->incremental_marking()->IsMarking()) return;
|
||||
|
||||
// If the mark doesn't move, we don't check the color of the object.
|
||||
// It doesn't matter whether the object is black, since it hasn't changed
|
||||
// size, so the adjustment to the live data count will be zero anyway.
|
||||
if (old_start == new_start) return;
|
||||
|
||||
MarkBit new_mark_bit = MarkBitFrom(new_start);
|
||||
MarkBit old_mark_bit = MarkBitFrom(old_start);
|
||||
|
||||
#ifdef DEBUG
|
||||
ObjectColor old_color = Color(old_mark_bit);
|
||||
#endif
|
||||
|
||||
if (Marking::IsBlack(old_mark_bit)) {
|
||||
Marking::BlackToWhite(old_mark_bit);
|
||||
Marking::MarkBlack(new_mark_bit);
|
||||
return;
|
||||
} else if (Marking::IsGrey(old_mark_bit)) {
|
||||
Marking::GreyToWhite(old_mark_bit);
|
||||
heap->incremental_marking()->WhiteToGreyAndPush(
|
||||
HeapObject::FromAddress(new_start), new_mark_bit);
|
||||
heap->incremental_marking()->RestartIfNotMarking();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
ObjectColor new_color = Color(new_mark_bit);
|
||||
DCHECK(new_color == old_color);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
const char* AllocationSpaceName(AllocationSpace space) {
|
||||
switch (space) {
|
||||
case NEW_SPACE:
|
||||
|
@ -119,8 +119,6 @@ class Marking : public AllStatic {
|
||||
markbit.Next().Set();
|
||||
}
|
||||
|
||||
static void TransferMark(Heap* heap, Address old_start, Address new_start);
|
||||
|
||||
#ifdef DEBUG
|
||||
enum ObjectColor {
|
||||
BLACK_OBJECT,
|
||||
|
Loading…
Reference in New Issue
Block a user