diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index d74bec0dfb..23e171232d 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -25,6 +25,16 @@ namespace v8 { namespace internal { +AllocationSpace AllocationResult::RetrySpace() { + DCHECK(IsRetry()); + return static_cast(Smi::cast(object_)->value()); +} + +HeapObject* AllocationResult::ToObjectChecked() { + CHECK(!IsRetry()); + return HeapObject::cast(object_); +} + void PromotionQueue::insert(HeapObject* target, int32_t size, bool was_marked_black) { if (emergency_stack_ != NULL) { diff --git a/src/heap/heap.cc b/src/heap/heap.cc index f2e601480f..de419efb02 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -3937,7 +3937,14 @@ AllocationResult Heap::AllocateRawFixedArray(int length, int size = FixedArray::SizeFor(length); AllocationSpace space = SelectSpace(pretenure); - return AllocateRaw(size, space); + AllocationResult result = AllocateRaw(size, space); + if (!result.IsRetry() && size > kMaxRegularHeapObjectSize && + FLAG_use_marking_progress_bar) { + MemoryChunk* chunk = + MemoryChunk::FromAddress(result.ToObjectChecked()->address()); + chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR); + } + return result; } diff --git a/src/heap/heap.h b/src/heap/heap.h index e65d2b45b6..cce467ff4f 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -437,6 +437,10 @@ class PromotionQueue { class AllocationResult { public: + static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) { + return AllocationResult(space); + } + // Implicit constructor from Object*. AllocationResult(Object* object) // NOLINT : object_(object) { @@ -447,11 +451,9 @@ class AllocationResult { AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {} - static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) { - return AllocationResult(space); - } - inline bool IsRetry() { return object_->IsSmi(); } + inline HeapObject* ToObjectChecked(); + inline AllocationSpace RetrySpace(); template bool To(T** obj) { @@ -460,13 +462,6 @@ class AllocationResult { return true; } - Object* ToObjectChecked() { - CHECK(!IsRetry()); - return object_; - } - - inline AllocationSpace RetrySpace(); - private: explicit AllocationResult(AllocationSpace space) : object_(Smi::FromInt(static_cast(space))) {} diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc index d091efb3aa..c53d1dc466 100644 --- a/src/heap/incremental-marking.cc +++ b/src/heap/incremental-marking.cc @@ -186,13 +186,9 @@ class IncrementalMarkingMarkingVisitor static void VisitFixedArrayIncremental(Map* map, HeapObject* object) { MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); - // TODO(mstarzinger): Move setting of the flag to the allocation site of - // the array. The visitor should just check the flag. - if (FLAG_use_marking_progress_bar && - chunk->owner()->identity() == LO_SPACE) { - chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR); - } if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { + DCHECK(!FLAG_use_marking_progress_bar || + chunk->owner()->identity() == LO_SPACE); Heap* heap = map->GetHeap(); // When using a progress bar for large fixed arrays, scan only a chunk of // the array and try to push it onto the marking deque again until it is diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h index 0fd69dacfe..314d22f9a6 100644 --- a/src/heap/spaces-inl.h +++ b/src/heap/spaces-inl.h @@ -165,14 +165,6 @@ bool NewSpace::FromSpaceContainsSlow(Address a) { bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); } bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); } -// -------------------------------------------------------------------------- -// AllocationResult - -AllocationSpace AllocationResult::RetrySpace() { - DCHECK(IsRetry()); - return static_cast(Smi::cast(object_)->value()); -} - Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, SemiSpace* owner) { DCHECK_EQ(executable, Executability::NOT_EXECUTABLE); diff --git a/src/heap/spaces.h b/src/heap/spaces.h index 690deeb93a..f1d05c73b3 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -498,7 +498,6 @@ class MemoryChunk { void ResetProgressBar() { if (IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { set_progress_bar(0); - ClearFlag(MemoryChunk::HAS_PROGRESS_BAR); } } diff --git a/src/snapshot/deserializer.cc b/src/snapshot/deserializer.cc index 7a2df28f62..b90a2c5b10 100644 --- a/src/snapshot/deserializer.cc +++ b/src/snapshot/deserializer.cc @@ -414,7 +414,7 @@ Address Deserializer::Allocate(int space_index, int size) { LargeObjectSpace* lo_space = isolate_->heap()->lo_space(); Executability exec = static_cast(source_.Get()); AllocationResult result = lo_space->AllocateRaw(size, exec); - HeapObject* obj = HeapObject::cast(result.ToObjectChecked()); + HeapObject* obj = result.ToObjectChecked(); deserialized_large_objects_.Add(obj); return obj->address(); } else if (space_index == MAP_SPACE) { diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc index 23321e3e0c..c69d391f90 100644 --- a/test/cctest/heap/test-heap.cc +++ b/test/cctest/heap/test-heap.cc @@ -2230,9 +2230,8 @@ TEST(TestAlignedOverAllocation) { AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kPointerSize); CHECK(!dummy.IsRetry()); - heap->CreateFillerObjectAt( - HeapObject::cast(dummy.ToObjectChecked())->address(), kPointerSize, - ClearRecordedSlots::kNo); + heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kPointerSize, + ClearRecordedSlots::kNo); // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones. const intptr_t double_misalignment = kDoubleSize - kPointerSize; diff --git a/test/cctest/heap/test-lab.cc b/test/cctest/heap/test-lab.cc index 3730e4a589..b625206f48 100644 --- a/test/cctest/heap/test-lab.cc +++ b/test/cctest/heap/test-lab.cc @@ -19,8 +19,7 @@ namespace internal { static Address AllocateLabBackingStore(Heap* heap, intptr_t size_in_bytes) { AllocationResult result = heap->old_space()->AllocateRaw( static_cast(size_in_bytes), kDoubleAligned); - Object* obj = result.ToObjectChecked(); - Address adr = HeapObject::cast(obj)->address(); + Address adr = result.ToObjectChecked()->address(); return adr; } diff --git a/test/cctest/heap/test-spaces.cc b/test/cctest/heap/test-spaces.cc index cefdea76b3..fb05c98400 100644 --- a/test/cctest/heap/test-spaces.cc +++ b/test/cctest/heap/test-spaces.cc @@ -368,9 +368,9 @@ TEST(NewSpace) { CHECK(new_space.HasBeenSetUp()); while (new_space.Available() >= kMaxRegularHeapObjectSize) { - Object* obj = new_space.AllocateRawUnaligned(kMaxRegularHeapObjectSize) - .ToObjectChecked(); - CHECK(new_space.Contains(HeapObject::cast(obj))); + CHECK(new_space.Contains( + new_space.AllocateRawUnaligned(kMaxRegularHeapObjectSize) + .ToObjectChecked())); } new_space.TearDown();